X
stringlengths
236
264k
y
stringlengths
5
74
// Copyright 2015 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.skyframe; import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertWithMessage; import com.google.common.base.Preconditions; import com.google.common.base.Suppliers; import com.google.common.collect.Collections2; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.ArtifactRoot; import com.google.devtools.build.lib.actions.FilesetOutputSymlink; import com.google.devtools.build.lib.actions.FilesetTraversalParams; import com.google.devtools.build.lib.actions.FilesetTraversalParamsFactory; import com.google.devtools.build.lib.actions.util.ActionsTestUtil; import com.google.devtools.build.lib.analysis.BlazeDirectories; import com.google.devtools.build.lib.analysis.ServerDirectories; import com.google.devtools.build.lib.clock.BlazeClock; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.events.NullEventHandler; import com.google.devtools.build.lib.pkgcache.PathPackageLocator; import com.google.devtools.build.lib.skyframe.ExternalFilesHelper.ExternalFileAction; import com.google.devtools.build.lib.skyframe.PackageLookupFunction.CrossRepositoryLabelViolationStrategy; import com.google.devtools.build.lib.testutil.FoundationTestCase; import com.google.devtools.build.lib.testutil.TestConstants; import com.google.devtools.build.lib.util.Fingerprint; import com.google.devtools.build.lib.util.io.TimestampGranularityMonitor; import com.google.devtools.build.lib.vfs.FileStateKey; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.build.lib.vfs.Root; import com.google.devtools.build.lib.vfs.RootedPath; import com.google.devtools.build.lib.vfs.SyscallCache; import com.google.devtools.build.skyframe.EvaluationContext; import com.google.devtools.build.skyframe.EvaluationResult; import com.google.devtools.build.skyframe.InMemoryMemoizingEvaluator; import com.google.devtools.build.skyframe.MemoizingEvaluator; import com.google.devtools.build.skyframe.RecordingDifferencer; import com.google.devtools.build.skyframe.SequencedRecordingDifferencer; import com.google.devtools.build.skyframe.SkyFunction; import com.google.devtools.build.skyframe.SkyFunctionException; import com.google.devtools.build.skyframe.SkyFunctionName; import com.google.devtools.build.skyframe.SkyKey; import com.google.devtools.build.skyframe.SkyValue; import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.concurrent.atomic.AtomicReference; import javax.annotation.Nullable; import net.starlark.java.eval.StarlarkSemantics; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Tests for {@link FilesetEntryFunction}. */ @RunWith(JUnit4.class) public final class FilesetEntryFunctionTest extends FoundationTestCase { private MemoizingEvaluator evaluator; private RecordingDifferencer differencer; @Before public void setUp() throws Exception { AtomicReference<PathPackageLocator> pkgLocator = new AtomicReference<>( new PathPackageLocator( outputBase, ImmutableList.of(Root.fromPath(rootDirectory)), BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY)); BlazeDirectories directories = new BlazeDirectories( new ServerDirectories(outputBase, outputBase, outputBase), rootDirectory, /* defaultSystemJavabase= */ null, TestConstants.PRODUCT_NAME); ExternalFilesHelper externalFilesHelper = ExternalFilesHelper.createForTesting( pkgLocator, ExternalFileAction.DEPEND_ON_EXTERNAL_PKG_FOR_EXTERNAL_REPO_PATHS, directories); Map<SkyFunctionName, SkyFunction> skyFunctions = new HashMap<>(); skyFunctions.put( FileStateKey.FILE_STATE, new FileStateFunction( Suppliers.ofInstance(new TimestampGranularityMonitor(BlazeClock.instance())), SyscallCache.NO_CACHE, externalFilesHelper)); skyFunctions.put(SkyFunctions.FILE, new FileFunction(pkgLocator, directories)); skyFunctions.put(SkyFunctions.DIRECTORY_LISTING, new DirectoryListingFunction()); skyFunctions.put( SkyFunctions.DIRECTORY_LISTING_STATE, new DirectoryListingStateFunction(externalFilesHelper, SyscallCache.NO_CACHE)); skyFunctions.put( SkyFunctions.RECURSIVE_FILESYSTEM_TRAVERSAL, new RecursiveFilesystemTraversalFunction(SyscallCache.NO_CACHE)); skyFunctions.put( SkyFunctions.PACKAGE_LOOKUP, new PackageLookupFunction( new AtomicReference<>(ImmutableSet.of()), CrossRepositoryLabelViolationStrategy.ERROR, BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY, BazelSkyframeExecutorConstants.EXTERNAL_PACKAGE_HELPER)); skyFunctions.put(SkyFunctions.IGNORED_SUBDIRECTORIES, IgnoredSubdirectoriesFunction.NOOP); skyFunctions.put( SkyFunctions.FILESET_ENTRY, new FilesetEntryFunction((unused) -> rootDirectory)); skyFunctions.put(SkyFunctions.WORKSPACE_NAME, new TestWorkspaceNameFunction()); skyFunctions.put( SkyFunctions.LOCAL_REPOSITORY_LOOKUP, new LocalRepositoryLookupFunction(BazelSkyframeExecutorConstants.EXTERNAL_PACKAGE_HELPER)); differencer = new SequencedRecordingDifferencer(); evaluator = new InMemoryMemoizingEvaluator(skyFunctions, differencer); PrecomputedValue.BUILD_ID.set(differencer, UUID.randomUUID()); PrecomputedValue.PATH_PACKAGE_LOCATOR.set(differencer, pkgLocator.get()); PrecomputedValue.STARLARK_SEMANTICS.set(differencer, StarlarkSemantics.DEFAULT); } private Artifact getSourceArtifact(String path) { return ActionsTestUtil.createArtifact( ArtifactRoot.asSourceRoot(Root.fromPath(rootDirectory)), path); } private Artifact createSourceArtifact(String path) throws Exception { Artifact result = getSourceArtifact(path); createFile(result, "foo"); return result; } private static RootedPath childOf(Artifact artifact, String relative) { return RootedPath.toRootedPath( artifact.getRoot().getRoot(), artifact.getRootRelativePath().getRelative(relative)); } private static RootedPath siblingOf(Artifact artifact, String relative) { PathFragment parent = Preconditions.checkNotNull(artifact.getRootRelativePath(). [MASK] ()); return RootedPath.toRootedPath(artifact.getRoot().getRoot(), parent.getRelative(relative)); } private void createFile(Path path, String... contents) throws Exception { if (!path. [MASK] ().exists()) { scratch.dir(path. [MASK] ().getPathString()); } scratch.file(path.getPathString(), contents); } private void createFile(Artifact artifact, String... contents) throws Exception { createFile(artifact.getPath(), contents); } private RootedPath createFile(RootedPath path, String... contents) throws Exception { createFile(path.asPath(), contents); return path; } private <T extends SkyValue> EvaluationResult<T> eval(SkyKey key) throws Exception { EvaluationContext evaluationContext = EvaluationContext.newBuilder() .setKeepGoing(false) .setParallelism(SkyframeExecutor.DEFAULT_THREAD_COUNT) .setEventHandler(NullEventHandler.INSTANCE) .build(); return evaluator.evaluate(ImmutableList.of(key), evaluationContext); } private FilesetEntryValue evalFilesetTraversal(FilesetTraversalParams params) throws Exception { SkyKey key = FilesetEntryKey.key(params); EvaluationResult<FilesetEntryValue> result = eval(key); assertThat(result.hasError()).isFalse(); return result.get(key); } private FilesetOutputSymlink symlink(String from, Artifact to) { return symlink(PathFragment.create(from), to.getPath().asFragment()); } private FilesetOutputSymlink symlink(String from, RootedPath to) { return symlink(PathFragment.create(from), to.asPath().asFragment()); } private FilesetOutputSymlink symlink(PathFragment from, PathFragment to) { return FilesetOutputSymlink.createForTesting(from, to, rootDirectory.asFragment()); } private void assertSymlinksCreatedInOrder( FilesetTraversalParams request, FilesetOutputSymlink... expectedSymlinks) throws Exception { Collection<FilesetOutputSymlink> actual = Collections2.transform( evalFilesetTraversal(request).getSymlinks(), // Strip the metadata from the actual results. (input) -> FilesetOutputSymlink.createAlreadyRelativizedForTesting( input.name(), input.targetPath(), input.relativeToExecRoot())); assertThat(actual).containsExactlyElementsIn(expectedSymlinks).inOrder(); } private static Label label(String label) throws Exception { return Label.parseCanonical(label); } @Test public void testFileTraversalForFile() throws Exception { Artifact file = createSourceArtifact("foo/file.real"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ file, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params, symlink("output-name", file)); } @Test public void testFileTraversalForDirectory() throws Exception { Artifact dir = getSourceArtifact("foo/dir_real"); RootedPath fileA = createFile(childOf(dir, "file.a"), "hello"); RootedPath fileB = createFile(childOf(dir, "sub/file.b"), "world"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ dir, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ true); assertSymlinksCreatedInOrder( params, symlink("output-name/file.a", fileA), symlink("output-name/sub/file.b", fileB)); } @Test public void testFileTraversalForDisallowedDirectoryThrows() throws Exception { Artifact dir = getSourceArtifact("foo/dir_real"); createFile(childOf(dir, "file.a"), "hello"); createFile(childOf(dir, "sub/file.b"), "world"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ dir, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); SkyKey key = FilesetEntryKey.key(params); EvaluationResult<FilesetEntryValue> result = eval(key); assertThat(result.hasError()).isTrue(); assertThat(result.getError(key).getException()) .hasMessageThat() .contains("foo contains a directory"); } @Test public void testFileTraversalForDanglingSymlink() throws Exception { Artifact linkName = getSourceArtifact("foo/dangling.sym"); RootedPath linkTarget = createFile(siblingOf(linkName, "target.file"), "blah"); linkName.getPath().createSymbolicLink(PathFragment.create("target.file")); linkTarget.asPath().delete(); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ linkName, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params); // expect empty results } @Test public void testFileTraversalForNonExistentFile() throws Exception { Artifact path = getSourceArtifact("foo/non-existent"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ path, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params); // expect empty results } /** * Tests that the fingerprint is a function of all arguments of the factory method. * * <p>Implementations must provide: * <ul> * <li>two different values (a domain) for each argument of the factory method and whether or not * it is expected to influence the fingerprint * <li>a way to instantiate {@link FilesetTraversalParams} with a given set of arguments from the * specified domains * </ul> * * <p>The tests will instantiate pairs of {@link FilesetTraversalParams} objects with only a given * attribute differing, and observe whether the fingerprints differ (if they are expected to) or * are the same (otherwise). */ private abstract static class FingerprintTester { private final Map<String, Domain> domains; FingerprintTester(Map<String, Domain> domains) { this.domains = domains; } abstract FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception; private Map<String, ?> getDefaultArgs() { return getKwArgs(null); } private Map<String, ?> getKwArgs(@Nullable String useAlternateFor) { Map<String, Object> values = new HashMap<>(); for (Map.Entry<String, Domain> d : domains.entrySet()) { values.put( d.getKey(), d.getKey().equals(useAlternateFor) ? d.getValue().valueA : d.getValue().valueB); } return values; } public void doTest() throws Exception { Fingerprint fp = new Fingerprint(); create(getDefaultArgs()).fingerprint(fp); String primary = fp.hexDigestAndReset(); for (String argName : domains.keySet()) { create(getKwArgs(argName)).fingerprint(fp); String secondary = fp.hexDigestAndReset(); if (domains.get(argName).includedInFingerprint) { assertWithMessage( "Argument '" + argName + "' was expected to be included in the" + " fingerprint, but wasn't") .that(primary) .isNotEqualTo(secondary); } else { assertWithMessage( "Argument '" + argName + "' was expected not to be included in the" + " fingerprint, but was") .that(primary) .isEqualTo(secondary); } } } } private static final class Domain { boolean includedInFingerprint; Object valueA; Object valueB; Domain(boolean includedInFingerprint, Object valueA, Object valueB) { this.includedInFingerprint = includedInFingerprint; this.valueA = valueA; this.valueB = valueB; } } private static Domain partOfFingerprint(Object valueA, Object valueB) { return new Domain(true, valueA, valueB); } private static Domain notPartOfFingerprint(Object valueB) { return new Domain(false, "//foo", valueB); } @Test public void testFingerprintOfFileTraversal() throws Exception { new FingerprintTester( ImmutableMap.<String, Domain>builder() .put("ownerLabel", notPartOfFingerprint("//bar")) .put("fileToTraverse", partOfFingerprint("foo/file.a", "bar/file.b")) .put("destPath", partOfFingerprint("out1", "out2")) .put("strictFilesetOutput", partOfFingerprint(true, false)) .put("permitDirectories", partOfFingerprint(true, false)) .buildOrThrow()) { @Override FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception { return FilesetTraversalParamsFactory.fileTraversal( label((String) kwArgs.get("ownerLabel")), getSourceArtifact((String) kwArgs.get("fileToTraverse")), PathFragment.create((String) kwArgs.get("destPath")), (Boolean) kwArgs.get("strictFilesetOutput"), (Boolean) kwArgs.get("permitDirectories")); } }.doTest(); } @Test public void testFingerprintOfNestedTraversal() throws Exception { Artifact nested1 = getSourceArtifact("a/b"); Artifact nested2 = getSourceArtifact("a/c"); new FingerprintTester( ImmutableMap.of( "ownerLabel", notPartOfFingerprint("//bar"), "nestedArtifact", partOfFingerprint(nested1, nested2), "destDir", partOfFingerprint("out1", "out2"), "excludes", partOfFingerprint(ImmutableSet.<String>of(), ImmutableSet.of("x")))) { @SuppressWarnings("unchecked") @Override FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception { return FilesetTraversalParamsFactory.nestedTraversal( label((String) kwArgs.get("ownerLabel")), (Artifact) kwArgs.get("nestedArtifact"), PathFragment.create((String) kwArgs.get("destDir")), (Set<String>) kwArgs.get("excludes")); } }.doTest(); } private static class TestWorkspaceNameFunction implements SkyFunction { @Nullable @Override public SkyValue compute(SkyKey skyKey, Environment env) throws SkyFunctionException, InterruptedException { return WorkspaceNameValue.withName("workspace"); } } }
getParentDirectory
// Copyright 2015 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.skyframe; import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertWithMessage; import com.google.common.base.Preconditions; import com.google.common.base.Suppliers; import com.google.common.collect.Collections2; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.ArtifactRoot; import com.google.devtools.build.lib.actions.FilesetOutputSymlink; import com.google.devtools.build.lib.actions.FilesetTraversalParams; import com.google.devtools.build.lib.actions.FilesetTraversalParamsFactory; import com.google.devtools.build.lib.actions.util.ActionsTestUtil; import com.google.devtools.build.lib.analysis.BlazeDirectories; import com.google.devtools.build.lib.analysis.ServerDirectories; import com.google.devtools.build.lib.clock.BlazeClock; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.events.NullEventHandler; import com.google.devtools.build.lib.pkgcache.PathPackageLocator; import com.google.devtools.build.lib.skyframe.ExternalFilesHelper.ExternalFileAction; import com.google.devtools.build.lib.skyframe.PackageLookupFunction.CrossRepositoryLabelViolationStrategy; import com.google.devtools.build.lib.testutil.FoundationTestCase; import com.google.devtools.build.lib.testutil.TestConstants; import com.google.devtools.build.lib.util.Fingerprint; import com.google.devtools.build.lib.util.io.TimestampGranularityMonitor; import com.google.devtools.build.lib.vfs.FileStateKey; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs. [MASK] ; import com.google.devtools.build.lib.vfs.Root; import com.google.devtools.build.lib.vfs.RootedPath; import com.google.devtools.build.lib.vfs.SyscallCache; import com.google.devtools.build.skyframe.EvaluationContext; import com.google.devtools.build.skyframe.EvaluationResult; import com.google.devtools.build.skyframe.InMemoryMemoizingEvaluator; import com.google.devtools.build.skyframe.MemoizingEvaluator; import com.google.devtools.build.skyframe.RecordingDifferencer; import com.google.devtools.build.skyframe.SequencedRecordingDifferencer; import com.google.devtools.build.skyframe.SkyFunction; import com.google.devtools.build.skyframe.SkyFunctionException; import com.google.devtools.build.skyframe.SkyFunctionName; import com.google.devtools.build.skyframe.SkyKey; import com.google.devtools.build.skyframe.SkyValue; import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.concurrent.atomic.AtomicReference; import javax.annotation.Nullable; import net.starlark.java.eval.StarlarkSemantics; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Tests for {@link FilesetEntryFunction}. */ @RunWith(JUnit4.class) public final class FilesetEntryFunctionTest extends FoundationTestCase { private MemoizingEvaluator evaluator; private RecordingDifferencer differencer; @Before public void setUp() throws Exception { AtomicReference<PathPackageLocator> pkgLocator = new AtomicReference<>( new PathPackageLocator( outputBase, ImmutableList.of(Root.fromPath(rootDirectory)), BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY)); BlazeDirectories directories = new BlazeDirectories( new ServerDirectories(outputBase, outputBase, outputBase), rootDirectory, /* defaultSystemJavabase= */ null, TestConstants.PRODUCT_NAME); ExternalFilesHelper externalFilesHelper = ExternalFilesHelper.createForTesting( pkgLocator, ExternalFileAction.DEPEND_ON_EXTERNAL_PKG_FOR_EXTERNAL_REPO_PATHS, directories); Map<SkyFunctionName, SkyFunction> skyFunctions = new HashMap<>(); skyFunctions.put( FileStateKey.FILE_STATE, new FileStateFunction( Suppliers.ofInstance(new TimestampGranularityMonitor(BlazeClock.instance())), SyscallCache.NO_CACHE, externalFilesHelper)); skyFunctions.put(SkyFunctions.FILE, new FileFunction(pkgLocator, directories)); skyFunctions.put(SkyFunctions.DIRECTORY_LISTING, new DirectoryListingFunction()); skyFunctions.put( SkyFunctions.DIRECTORY_LISTING_STATE, new DirectoryListingStateFunction(externalFilesHelper, SyscallCache.NO_CACHE)); skyFunctions.put( SkyFunctions.RECURSIVE_FILESYSTEM_TRAVERSAL, new RecursiveFilesystemTraversalFunction(SyscallCache.NO_CACHE)); skyFunctions.put( SkyFunctions.PACKAGE_LOOKUP, new PackageLookupFunction( new AtomicReference<>(ImmutableSet.of()), CrossRepositoryLabelViolationStrategy.ERROR, BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY, BazelSkyframeExecutorConstants.EXTERNAL_PACKAGE_HELPER)); skyFunctions.put(SkyFunctions.IGNORED_SUBDIRECTORIES, IgnoredSubdirectoriesFunction.NOOP); skyFunctions.put( SkyFunctions.FILESET_ENTRY, new FilesetEntryFunction((unused) -> rootDirectory)); skyFunctions.put(SkyFunctions.WORKSPACE_NAME, new TestWorkspaceNameFunction()); skyFunctions.put( SkyFunctions.LOCAL_REPOSITORY_LOOKUP, new LocalRepositoryLookupFunction(BazelSkyframeExecutorConstants.EXTERNAL_PACKAGE_HELPER)); differencer = new SequencedRecordingDifferencer(); evaluator = new InMemoryMemoizingEvaluator(skyFunctions, differencer); PrecomputedValue.BUILD_ID.set(differencer, UUID.randomUUID()); PrecomputedValue.PATH_PACKAGE_LOCATOR.set(differencer, pkgLocator.get()); PrecomputedValue.STARLARK_SEMANTICS.set(differencer, StarlarkSemantics.DEFAULT); } private Artifact getSourceArtifact(String path) { return ActionsTestUtil.createArtifact( ArtifactRoot.asSourceRoot(Root.fromPath(rootDirectory)), path); } private Artifact createSourceArtifact(String path) throws Exception { Artifact result = getSourceArtifact(path); createFile(result, "foo"); return result; } private static RootedPath childOf(Artifact artifact, String relative) { return RootedPath.toRootedPath( artifact.getRoot().getRoot(), artifact.getRootRelativePath().getRelative(relative)); } private static RootedPath siblingOf(Artifact artifact, String relative) { [MASK] parent = Preconditions.checkNotNull(artifact.getRootRelativePath().getParentDirectory()); return RootedPath.toRootedPath(artifact.getRoot().getRoot(), parent.getRelative(relative)); } private void createFile(Path path, String... contents) throws Exception { if (!path.getParentDirectory().exists()) { scratch.dir(path.getParentDirectory().getPathString()); } scratch.file(path.getPathString(), contents); } private void createFile(Artifact artifact, String... contents) throws Exception { createFile(artifact.getPath(), contents); } private RootedPath createFile(RootedPath path, String... contents) throws Exception { createFile(path.asPath(), contents); return path; } private <T extends SkyValue> EvaluationResult<T> eval(SkyKey key) throws Exception { EvaluationContext evaluationContext = EvaluationContext.newBuilder() .setKeepGoing(false) .setParallelism(SkyframeExecutor.DEFAULT_THREAD_COUNT) .setEventHandler(NullEventHandler.INSTANCE) .build(); return evaluator.evaluate(ImmutableList.of(key), evaluationContext); } private FilesetEntryValue evalFilesetTraversal(FilesetTraversalParams params) throws Exception { SkyKey key = FilesetEntryKey.key(params); EvaluationResult<FilesetEntryValue> result = eval(key); assertThat(result.hasError()).isFalse(); return result.get(key); } private FilesetOutputSymlink symlink(String from, Artifact to) { return symlink( [MASK] .create(from), to.getPath().asFragment()); } private FilesetOutputSymlink symlink(String from, RootedPath to) { return symlink( [MASK] .create(from), to.asPath().asFragment()); } private FilesetOutputSymlink symlink( [MASK] from, [MASK] to) { return FilesetOutputSymlink.createForTesting(from, to, rootDirectory.asFragment()); } private void assertSymlinksCreatedInOrder( FilesetTraversalParams request, FilesetOutputSymlink... expectedSymlinks) throws Exception { Collection<FilesetOutputSymlink> actual = Collections2.transform( evalFilesetTraversal(request).getSymlinks(), // Strip the metadata from the actual results. (input) -> FilesetOutputSymlink.createAlreadyRelativizedForTesting( input.name(), input.targetPath(), input.relativeToExecRoot())); assertThat(actual).containsExactlyElementsIn(expectedSymlinks).inOrder(); } private static Label label(String label) throws Exception { return Label.parseCanonical(label); } @Test public void testFileTraversalForFile() throws Exception { Artifact file = createSourceArtifact("foo/file.real"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ file, [MASK] .create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params, symlink("output-name", file)); } @Test public void testFileTraversalForDirectory() throws Exception { Artifact dir = getSourceArtifact("foo/dir_real"); RootedPath fileA = createFile(childOf(dir, "file.a"), "hello"); RootedPath fileB = createFile(childOf(dir, "sub/file.b"), "world"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ dir, [MASK] .create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ true); assertSymlinksCreatedInOrder( params, symlink("output-name/file.a", fileA), symlink("output-name/sub/file.b", fileB)); } @Test public void testFileTraversalForDisallowedDirectoryThrows() throws Exception { Artifact dir = getSourceArtifact("foo/dir_real"); createFile(childOf(dir, "file.a"), "hello"); createFile(childOf(dir, "sub/file.b"), "world"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ dir, [MASK] .create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); SkyKey key = FilesetEntryKey.key(params); EvaluationResult<FilesetEntryValue> result = eval(key); assertThat(result.hasError()).isTrue(); assertThat(result.getError(key).getException()) .hasMessageThat() .contains("foo contains a directory"); } @Test public void testFileTraversalForDanglingSymlink() throws Exception { Artifact linkName = getSourceArtifact("foo/dangling.sym"); RootedPath linkTarget = createFile(siblingOf(linkName, "target.file"), "blah"); linkName.getPath().createSymbolicLink( [MASK] .create("target.file")); linkTarget.asPath().delete(); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ linkName, [MASK] .create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params); // expect empty results } @Test public void testFileTraversalForNonExistentFile() throws Exception { Artifact path = getSourceArtifact("foo/non-existent"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ path, [MASK] .create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params); // expect empty results } /** * Tests that the fingerprint is a function of all arguments of the factory method. * * <p>Implementations must provide: * <ul> * <li>two different values (a domain) for each argument of the factory method and whether or not * it is expected to influence the fingerprint * <li>a way to instantiate {@link FilesetTraversalParams} with a given set of arguments from the * specified domains * </ul> * * <p>The tests will instantiate pairs of {@link FilesetTraversalParams} objects with only a given * attribute differing, and observe whether the fingerprints differ (if they are expected to) or * are the same (otherwise). */ private abstract static class FingerprintTester { private final Map<String, Domain> domains; FingerprintTester(Map<String, Domain> domains) { this.domains = domains; } abstract FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception; private Map<String, ?> getDefaultArgs() { return getKwArgs(null); } private Map<String, ?> getKwArgs(@Nullable String useAlternateFor) { Map<String, Object> values = new HashMap<>(); for (Map.Entry<String, Domain> d : domains.entrySet()) { values.put( d.getKey(), d.getKey().equals(useAlternateFor) ? d.getValue().valueA : d.getValue().valueB); } return values; } public void doTest() throws Exception { Fingerprint fp = new Fingerprint(); create(getDefaultArgs()).fingerprint(fp); String primary = fp.hexDigestAndReset(); for (String argName : domains.keySet()) { create(getKwArgs(argName)).fingerprint(fp); String secondary = fp.hexDigestAndReset(); if (domains.get(argName).includedInFingerprint) { assertWithMessage( "Argument '" + argName + "' was expected to be included in the" + " fingerprint, but wasn't") .that(primary) .isNotEqualTo(secondary); } else { assertWithMessage( "Argument '" + argName + "' was expected not to be included in the" + " fingerprint, but was") .that(primary) .isEqualTo(secondary); } } } } private static final class Domain { boolean includedInFingerprint; Object valueA; Object valueB; Domain(boolean includedInFingerprint, Object valueA, Object valueB) { this.includedInFingerprint = includedInFingerprint; this.valueA = valueA; this.valueB = valueB; } } private static Domain partOfFingerprint(Object valueA, Object valueB) { return new Domain(true, valueA, valueB); } private static Domain notPartOfFingerprint(Object valueB) { return new Domain(false, "//foo", valueB); } @Test public void testFingerprintOfFileTraversal() throws Exception { new FingerprintTester( ImmutableMap.<String, Domain>builder() .put("ownerLabel", notPartOfFingerprint("//bar")) .put("fileToTraverse", partOfFingerprint("foo/file.a", "bar/file.b")) .put("destPath", partOfFingerprint("out1", "out2")) .put("strictFilesetOutput", partOfFingerprint(true, false)) .put("permitDirectories", partOfFingerprint(true, false)) .buildOrThrow()) { @Override FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception { return FilesetTraversalParamsFactory.fileTraversal( label((String) kwArgs.get("ownerLabel")), getSourceArtifact((String) kwArgs.get("fileToTraverse")), [MASK] .create((String) kwArgs.get("destPath")), (Boolean) kwArgs.get("strictFilesetOutput"), (Boolean) kwArgs.get("permitDirectories")); } }.doTest(); } @Test public void testFingerprintOfNestedTraversal() throws Exception { Artifact nested1 = getSourceArtifact("a/b"); Artifact nested2 = getSourceArtifact("a/c"); new FingerprintTester( ImmutableMap.of( "ownerLabel", notPartOfFingerprint("//bar"), "nestedArtifact", partOfFingerprint(nested1, nested2), "destDir", partOfFingerprint("out1", "out2"), "excludes", partOfFingerprint(ImmutableSet.<String>of(), ImmutableSet.of("x")))) { @SuppressWarnings("unchecked") @Override FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception { return FilesetTraversalParamsFactory.nestedTraversal( label((String) kwArgs.get("ownerLabel")), (Artifact) kwArgs.get("nestedArtifact"), [MASK] .create((String) kwArgs.get("destDir")), (Set<String>) kwArgs.get("excludes")); } }.doTest(); } private static class TestWorkspaceNameFunction implements SkyFunction { @Nullable @Override public SkyValue compute(SkyKey skyKey, Environment env) throws SkyFunctionException, InterruptedException { return WorkspaceNameValue.withName("workspace"); } } }
PathFragment
/* * Copyright (c) 2014, 2024, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package stream; import static jaxp.library.JAXPTestUtilities.USER_DIR; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.InputStream; import java.io.OutputStream; import javax.xml.stream.XMLInputFactory; import javax.xml.stream.XMLOutputFactory; import javax.xml.stream.XMLStreamReader; import javax.xml.stream.XMLStreamWriter; import org.testng.Assert; import org.testng.annotations.Test; /* * @test * @bug 6688002 * @library /javax/xml/jaxp/libs /javax/xml/jaxp/unittest * @run testng/othervm stream.Bug6688002Test * @summary Test single instance of XMLOutputFactory/XMLInputFactory create multiple Writer/Readers in parallel. */ public class Bug6688002Test { private static final XMLOutputFactory outputFactory = XMLOutputFactory.newInstance(); private static final XMLInputFactory inputFactory = XMLInputFactory.newInstance(); private static final int NO_THREADS = 3; @Test public void testMulti [MASK] () throws Exception { [MASK] [] threads = new [MASK] [NO_THREADS]; for (int i = 0; i < NO_THREADS; i++) { threads[i] = new [MASK] (new MyRunnable(i)); } for (int i = 0; i < NO_THREADS; i++) { threads[i].start(); } for (int i = 0; i < NO_THREADS; i++) { threads[i].join(); } } public class MyRunnable implements Runnable { final String no; MyRunnable(int no) { this.no = String.valueOf(no); } public void run() { try { FileOutputStream fos = new FileOutputStream(USER_DIR + no); XMLStreamWriter w = getWriter(fos); // System.out.println("Writer="+w+" [MASK] ="+ [MASK] .current [MASK] ()); w.writeStartDocument(); w.writeStartElement("hello"); for (int j = 0; j < 50; j++) { w.writeStartElement("a" + j); w.writeEndElement(); } w.writeEndElement(); w.writeEndDocument(); w.close(); fos.close(); FileInputStream fis = new FileInputStream(USER_DIR + no); XMLStreamReader r = getReader(fis); while (r.hasNext()) { r.next(); } r.close(); fis.close(); } catch (Exception e) { Assert.fail(e.getMessage()); } } } public static/* synchronized */XMLStreamReader getReader(InputStream is) throws Exception { return inputFactory.createXMLStreamReader(is); // return XMLStreamReaderFactory.create(null, is, true); } public static/* synchronized */XMLStreamWriter getWriter(OutputStream os) throws Exception { return outputFactory.createXMLStreamWriter(os); // return XMLStreamWriterFactory.createXMLStreamWriter(os); } }
Thread
/* * Copyright (c) 2014, 2024, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package stream; import static jaxp.library.JAXPTestUtilities.USER_DIR; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.InputStream; import java.io.OutputStream; import javax.xml.stream. [MASK] ; import javax.xml.stream.XMLOutputFactory; import javax.xml.stream.XMLStreamReader; import javax.xml.stream.XMLStreamWriter; import org.testng.Assert; import org.testng.annotations.Test; /* * @test * @bug 6688002 * @library /javax/xml/jaxp/libs /javax/xml/jaxp/unittest * @run testng/othervm stream.Bug6688002Test * @summary Test single instance of XMLOutputFactory/ [MASK] create multiple Writer/Readers in parallel. */ public class Bug6688002Test { private static final XMLOutputFactory outputFactory = XMLOutputFactory.newInstance(); private static final [MASK] inputFactory = [MASK] .newInstance(); private static final int NO_THREADS = 3; @Test public void testMultiThread() throws Exception { Thread[] threads = new Thread[NO_THREADS]; for (int i = 0; i < NO_THREADS; i++) { threads[i] = new Thread(new MyRunnable(i)); } for (int i = 0; i < NO_THREADS; i++) { threads[i].start(); } for (int i = 0; i < NO_THREADS; i++) { threads[i].join(); } } public class MyRunnable implements Runnable { final String no; MyRunnable(int no) { this.no = String.valueOf(no); } public void run() { try { FileOutputStream fos = new FileOutputStream(USER_DIR + no); XMLStreamWriter w = getWriter(fos); // System.out.println("Writer="+w+" Thread="+Thread.currentThread()); w.writeStartDocument(); w.writeStartElement("hello"); for (int j = 0; j < 50; j++) { w.writeStartElement("a" + j); w.writeEndElement(); } w.writeEndElement(); w.writeEndDocument(); w.close(); fos.close(); FileInputStream fis = new FileInputStream(USER_DIR + no); XMLStreamReader r = getReader(fis); while (r.hasNext()) { r.next(); } r.close(); fis.close(); } catch (Exception e) { Assert.fail(e.getMessage()); } } } public static/* synchronized */XMLStreamReader getReader(InputStream is) throws Exception { return inputFactory.createXMLStreamReader(is); // return XMLStreamReaderFactory.create(null, is, true); } public static/* synchronized */XMLStreamWriter getWriter(OutputStream os) throws Exception { return outputFactory.createXMLStreamWriter(os); // return XMLStreamWriterFactory.createXMLStreamWriter(os); } }
XMLInputFactory
/* * Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. * */ // This is a utlitity test class for loading classes-under-test // by means of custom class loader. // See AppCDS/jvmti/transformRelatedClasses/TransformRelatedClasses.java // for an example. // Use this test app in conjunction with other tests // to load and exercise classes using custom class loader(s). // This class is intended to be called by the "main test driver" // inside a child process, normally with sharing enabled. // // Arguments: customJarPath, loaderType, testClass // customJarPath - a path to jar file containing classes for // loading via this custom class loader, including the // testClass // loaderType - Currently only "unregistered" // (Fingerprint verification method) is allowed // testClass - the class to be loader; the test method with // signature 'public static void test()' will be called // on this class, so class must contain such method import java.io.File; import java.lang.reflect.Method; import java.net.URL; import java.net. [MASK] ; import java.util.logging.Logger; public class CustomLoaderApp { public static void ping() {}; private static void log(String msg) { System.out.println("CustomLoaderApp: " + msg); } public static void main(String[] args) throws Exception { String path = args[0]; URL url = new File(path).toURI().toURL(); URL[] urls = new URL[] {url}; String loaderType = args[1]; log("loaderType = " + loaderType); for (int i = 2; i < args.length; i++) { String testClass = args[i]; log("testClass = " + testClass); switch(loaderType) { case "unregistered": loadAndUseWithUnregisteredLoader(urls, testClass); break; default: throw new IllegalArgumentException("loader type is wrong: " + loaderType); } } } // Load the test classes using unregistered loader // (i.e. loader that is not using AppCDS API) private static void loadAndUseWithUnregisteredLoader(URL[] urls, String testClass) throws Exception { [MASK] urlClassLoader = new [MASK] (urls); callTestMethod(loadAndCheck(urlClassLoader, testClass)); } private static Class loadAndCheck(ClassLoader loader, String className) throws ClassNotFoundException { Class c = loader.loadClass(className); log("class = " + c); log("loader = " + c.getClassLoader()); // Check that c is defined by the correct loader if (c.getClassLoader() != loader) { String msg = String.format("c.getClassLoader() equals to <%s>, expected <%s>", c.getClassLoader(), loader); throw new RuntimeException(msg); } return c; } private static void callTestMethod(Class c) throws Exception { Method[] methods = c.getDeclaredMethods(); for (Method m : methods) { log("method = " + m.getName()); if (m.getName().equals("test")) m.invoke(null); } } }
URLClassLoader
/* * Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. * */ // This is a utlitity test class for loading classes-under-test // by means of custom class [MASK] . // See AppCDS/jvmti/transformRelatedClasses/TransformRelatedClasses.java // for an example. // Use this test app in conjunction with other tests // to load and exercise classes using custom class [MASK] (s). // This class is intended to be called by the "main test driver" // inside a child process, normally with sharing enabled. // // Arguments: customJarPath, [MASK] Type, testClass // customJarPath - a path to jar file containing classes for // loading via this custom class [MASK] , including the // testClass // [MASK] Type - Currently only "unregistered" // (Fingerprint verification method) is allowed // testClass - the class to be [MASK] ; the test method with // signature 'public static void test()' will be called // on this class, so class must contain such method import java.io.File; import java.lang.reflect.Method; import java.net.URL; import java.net.URLClassLoader; import java.util.logging.Logger; public class CustomLoaderApp { public static void ping() {}; private static void log(String msg) { System.out.println("CustomLoaderApp: " + msg); } public static void main(String[] args) throws Exception { String path = args[0]; URL url = new File(path).toURI().toURL(); URL[] urls = new URL[] {url}; String [MASK] Type = args[1]; log(" [MASK] Type = " + [MASK] Type); for (int i = 2; i < args.length; i++) { String testClass = args[i]; log("testClass = " + testClass); switch( [MASK] Type) { case "unregistered": loadAndUseWithUnregisteredLoader(urls, testClass); break; default: throw new IllegalArgumentException(" [MASK] type is wrong: " + [MASK] Type); } } } // Load the test classes using unregistered [MASK] // (i.e. [MASK] that is not using AppCDS API) private static void loadAndUseWithUnregisteredLoader(URL[] urls, String testClass) throws Exception { URLClassLoader urlClassLoader = new URLClassLoader(urls); callTestMethod(loadAndCheck(urlClassLoader, testClass)); } private static Class loadAndCheck(ClassLoader [MASK] , String className) throws ClassNotFoundException { Class c = [MASK] .loadClass(className); log("class = " + c); log(" [MASK] = " + c.getClassLoader()); // Check that c is defined by the correct [MASK] if (c.getClassLoader() != [MASK] ) { String msg = String.format("c.getClassLoader() equals to <%s>, expected <%s>", c.getClassLoader(), [MASK] ); throw new RuntimeException(msg); } return c; } private static void callTestMethod(Class c) throws Exception { Method[] methods = c.getDeclaredMethods(); for (Method m : methods) { log("method = " + m.getName()); if (m.getName().equals("test")) m.invoke(null); } } }
loader
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.hudi; import com.facebook.presto.common.type. [MASK] ; import com.facebook.presto.hive.HdfsEnvironment; import com.facebook.presto.hive.MetastoreClientConfig; import com.facebook.presto.hive.metastore.ExtendedHiveMetastore; import com.facebook.presto.hive.metastore.InMemoryCachingHiveMetastore; import com.facebook.presto.spi.connector.ConnectorMetadata; import javax.inject.Inject; import static java.util.Objects.requireNonNull; public class HudiMetadataFactory { private final ExtendedHiveMetastore metastore; private final HdfsEnvironment hdfsEnvironment; private final [MASK] typeManager; private final long perTransactionCacheMaximumSize; private final boolean metastoreImpersonationEnabled; private final int metastorePartitionCacheMaxColumnCount; @Inject public HudiMetadataFactory( ExtendedHiveMetastore metastore, HdfsEnvironment hdfsEnvironment, [MASK] typeManager, MetastoreClientConfig metastoreClientConfig) { this.metastore = requireNonNull(metastore, "metastore is null"); this.hdfsEnvironment = requireNonNull(hdfsEnvironment, "hdfsEnvironment is null"); this.typeManager = requireNonNull(typeManager, "typeManager is null"); this.perTransactionCacheMaximumSize = metastoreClientConfig.getPerTransactionMetastoreCacheMaximumSize(); this.metastoreImpersonationEnabled = metastoreClientConfig.isMetastoreImpersonationEnabled(); this.metastorePartitionCacheMaxColumnCount = metastoreClientConfig.getPartitionCacheColumnCountLimit(); } public ConnectorMetadata create() { return new HudiMetadata( InMemoryCachingHiveMetastore.memoizeMetastore(metastore, metastoreImpersonationEnabled, perTransactionCacheMaximumSize, metastorePartitionCacheMaxColumnCount), hdfsEnvironment, typeManager); } }
TypeManager
// Copyright 2015 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.skyframe; import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertWithMessage; import com.google.common.base.Preconditions; import com.google.common.base.Suppliers; import com.google.common.collect.Collections2; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.ArtifactRoot; import com.google.devtools.build.lib.actions.FilesetOutputSymlink; import com.google.devtools.build.lib.actions.FilesetTraversalParams; import com.google.devtools.build.lib.actions.FilesetTraversalParamsFactory; import com.google.devtools.build.lib.actions.util.ActionsTestUtil; import com.google.devtools.build.lib.analysis.BlazeDirectories; import com.google.devtools.build.lib.analysis.ServerDirectories; import com.google.devtools.build.lib.clock.BlazeClock; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.events.NullEventHandler; import com.google.devtools.build.lib.pkgcache.PathPackageLocator; import com.google.devtools.build.lib.skyframe.ExternalFilesHelper.ExternalFileAction; import com.google.devtools.build.lib.skyframe.PackageLookupFunction.CrossRepositoryLabelViolationStrategy; import com.google.devtools.build.lib.testutil.FoundationTestCase; import com.google.devtools.build.lib.testutil.TestConstants; import com.google.devtools.build.lib.util.Fingerprint; import com.google.devtools.build.lib.util.io.TimestampGranularityMonitor; import com.google.devtools.build.lib.vfs.FileStateKey; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.build.lib.vfs.Root; import com.google.devtools.build.lib.vfs.RootedPath; import com.google.devtools.build.lib.vfs.SyscallCache; import com.google.devtools.build.skyframe.EvaluationContext; import com.google.devtools.build.skyframe.EvaluationResult; import com.google.devtools.build.skyframe.InMemoryMemoizingEvaluator; import com.google.devtools.build.skyframe.MemoizingEvaluator; import com.google.devtools.build.skyframe.RecordingDifferencer; import com.google.devtools.build.skyframe.SequencedRecordingDifferencer; import com.google.devtools.build.skyframe.SkyFunction; import com.google.devtools.build.skyframe.SkyFunctionException; import com.google.devtools.build.skyframe.SkyFunctionName; import com.google.devtools.build.skyframe.SkyKey; import com.google.devtools.build.skyframe.SkyValue; import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.concurrent.atomic.AtomicReference; import javax.annotation.Nullable; import net.starlark.java.eval.StarlarkSemantics; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Tests for {@link FilesetEntryFunction}. */ @RunWith(JUnit4.class) public final class FilesetEntryFunctionTest extends FoundationTestCase { private MemoizingEvaluator evaluator; private RecordingDifferencer differencer; @Before public void setUp() throws Exception { AtomicReference<PathPackageLocator> pkgLocator = new AtomicReference<>( new PathPackageLocator( outputBase, ImmutableList.of(Root.fromPath(rootDirectory)), BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY)); BlazeDirectories [MASK] = new BlazeDirectories( new ServerDirectories(outputBase, outputBase, outputBase), rootDirectory, /* defaultSystemJavabase= */ null, TestConstants.PRODUCT_NAME); ExternalFilesHelper externalFilesHelper = ExternalFilesHelper.createForTesting( pkgLocator, ExternalFileAction.DEPEND_ON_EXTERNAL_PKG_FOR_EXTERNAL_REPO_PATHS, [MASK] ); Map<SkyFunctionName, SkyFunction> skyFunctions = new HashMap<>(); skyFunctions.put( FileStateKey.FILE_STATE, new FileStateFunction( Suppliers.ofInstance(new TimestampGranularityMonitor(BlazeClock.instance())), SyscallCache.NO_CACHE, externalFilesHelper)); skyFunctions.put(SkyFunctions.FILE, new FileFunction(pkgLocator, [MASK] )); skyFunctions.put(SkyFunctions.DIRECTORY_LISTING, new DirectoryListingFunction()); skyFunctions.put( SkyFunctions.DIRECTORY_LISTING_STATE, new DirectoryListingStateFunction(externalFilesHelper, SyscallCache.NO_CACHE)); skyFunctions.put( SkyFunctions.RECURSIVE_FILESYSTEM_TRAVERSAL, new RecursiveFilesystemTraversalFunction(SyscallCache.NO_CACHE)); skyFunctions.put( SkyFunctions.PACKAGE_LOOKUP, new PackageLookupFunction( new AtomicReference<>(ImmutableSet.of()), CrossRepositoryLabelViolationStrategy.ERROR, BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY, BazelSkyframeExecutorConstants.EXTERNAL_PACKAGE_HELPER)); skyFunctions.put(SkyFunctions.IGNORED_SUBDIRECTORIES, IgnoredSub [MASK] Function.NOOP); skyFunctions.put( SkyFunctions.FILESET_ENTRY, new FilesetEntryFunction((unused) -> rootDirectory)); skyFunctions.put(SkyFunctions.WORKSPACE_NAME, new TestWorkspaceNameFunction()); skyFunctions.put( SkyFunctions.LOCAL_REPOSITORY_LOOKUP, new LocalRepositoryLookupFunction(BazelSkyframeExecutorConstants.EXTERNAL_PACKAGE_HELPER)); differencer = new SequencedRecordingDifferencer(); evaluator = new InMemoryMemoizingEvaluator(skyFunctions, differencer); PrecomputedValue.BUILD_ID.set(differencer, UUID.randomUUID()); PrecomputedValue.PATH_PACKAGE_LOCATOR.set(differencer, pkgLocator.get()); PrecomputedValue.STARLARK_SEMANTICS.set(differencer, StarlarkSemantics.DEFAULT); } private Artifact getSourceArtifact(String path) { return ActionsTestUtil.createArtifact( ArtifactRoot.asSourceRoot(Root.fromPath(rootDirectory)), path); } private Artifact createSourceArtifact(String path) throws Exception { Artifact result = getSourceArtifact(path); createFile(result, "foo"); return result; } private static RootedPath childOf(Artifact artifact, String relative) { return RootedPath.toRootedPath( artifact.getRoot().getRoot(), artifact.getRootRelativePath().getRelative(relative)); } private static RootedPath siblingOf(Artifact artifact, String relative) { PathFragment parent = Preconditions.checkNotNull(artifact.getRootRelativePath().getParentDirectory()); return RootedPath.toRootedPath(artifact.getRoot().getRoot(), parent.getRelative(relative)); } private void createFile(Path path, String... contents) throws Exception { if (!path.getParentDirectory().exists()) { scratch.dir(path.getParentDirectory().getPathString()); } scratch.file(path.getPathString(), contents); } private void createFile(Artifact artifact, String... contents) throws Exception { createFile(artifact.getPath(), contents); } private RootedPath createFile(RootedPath path, String... contents) throws Exception { createFile(path.asPath(), contents); return path; } private <T extends SkyValue> EvaluationResult<T> eval(SkyKey key) throws Exception { EvaluationContext evaluationContext = EvaluationContext.newBuilder() .setKeepGoing(false) .setParallelism(SkyframeExecutor.DEFAULT_THREAD_COUNT) .setEventHandler(NullEventHandler.INSTANCE) .build(); return evaluator.evaluate(ImmutableList.of(key), evaluationContext); } private FilesetEntryValue evalFilesetTraversal(FilesetTraversalParams params) throws Exception { SkyKey key = FilesetEntryKey.key(params); EvaluationResult<FilesetEntryValue> result = eval(key); assertThat(result.hasError()).isFalse(); return result.get(key); } private FilesetOutputSymlink symlink(String from, Artifact to) { return symlink(PathFragment.create(from), to.getPath().asFragment()); } private FilesetOutputSymlink symlink(String from, RootedPath to) { return symlink(PathFragment.create(from), to.asPath().asFragment()); } private FilesetOutputSymlink symlink(PathFragment from, PathFragment to) { return FilesetOutputSymlink.createForTesting(from, to, rootDirectory.asFragment()); } private void assertSymlinksCreatedInOrder( FilesetTraversalParams request, FilesetOutputSymlink... expectedSymlinks) throws Exception { Collection<FilesetOutputSymlink> actual = Collections2.transform( evalFilesetTraversal(request).getSymlinks(), // Strip the metadata from the actual results. (input) -> FilesetOutputSymlink.createAlreadyRelativizedForTesting( input.name(), input.targetPath(), input.relativeToExecRoot())); assertThat(actual).containsExactlyElementsIn(expectedSymlinks).inOrder(); } private static Label label(String label) throws Exception { return Label.parseCanonical(label); } @Test public void testFileTraversalForFile() throws Exception { Artifact file = createSourceArtifact("foo/file.real"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ file, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params, symlink("output-name", file)); } @Test public void testFileTraversalForDirectory() throws Exception { Artifact dir = getSourceArtifact("foo/dir_real"); RootedPath fileA = createFile(childOf(dir, "file.a"), "hello"); RootedPath fileB = createFile(childOf(dir, "sub/file.b"), "world"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ dir, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ true); assertSymlinksCreatedInOrder( params, symlink("output-name/file.a", fileA), symlink("output-name/sub/file.b", fileB)); } @Test public void testFileTraversalForDisallowedDirectoryThrows() throws Exception { Artifact dir = getSourceArtifact("foo/dir_real"); createFile(childOf(dir, "file.a"), "hello"); createFile(childOf(dir, "sub/file.b"), "world"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ dir, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); SkyKey key = FilesetEntryKey.key(params); EvaluationResult<FilesetEntryValue> result = eval(key); assertThat(result.hasError()).isTrue(); assertThat(result.getError(key).getException()) .hasMessageThat() .contains("foo contains a directory"); } @Test public void testFileTraversalForDanglingSymlink() throws Exception { Artifact linkName = getSourceArtifact("foo/dangling.sym"); RootedPath linkTarget = createFile(siblingOf(linkName, "target.file"), "blah"); linkName.getPath().createSymbolicLink(PathFragment.create("target.file")); linkTarget.asPath().delete(); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ linkName, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params); // expect empty results } @Test public void testFileTraversalForNonExistentFile() throws Exception { Artifact path = getSourceArtifact("foo/non-existent"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ path, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params); // expect empty results } /** * Tests that the fingerprint is a function of all arguments of the factory method. * * <p>Implementations must provide: * <ul> * <li>two different values (a domain) for each argument of the factory method and whether or not * it is expected to influence the fingerprint * <li>a way to instantiate {@link FilesetTraversalParams} with a given set of arguments from the * specified domains * </ul> * * <p>The tests will instantiate pairs of {@link FilesetTraversalParams} objects with only a given * attribute differing, and observe whether the fingerprints differ (if they are expected to) or * are the same (otherwise). */ private abstract static class FingerprintTester { private final Map<String, Domain> domains; FingerprintTester(Map<String, Domain> domains) { this.domains = domains; } abstract FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception; private Map<String, ?> getDefaultArgs() { return getKwArgs(null); } private Map<String, ?> getKwArgs(@Nullable String useAlternateFor) { Map<String, Object> values = new HashMap<>(); for (Map.Entry<String, Domain> d : domains.entrySet()) { values.put( d.getKey(), d.getKey().equals(useAlternateFor) ? d.getValue().valueA : d.getValue().valueB); } return values; } public void doTest() throws Exception { Fingerprint fp = new Fingerprint(); create(getDefaultArgs()).fingerprint(fp); String primary = fp.hexDigestAndReset(); for (String argName : domains.keySet()) { create(getKwArgs(argName)).fingerprint(fp); String secondary = fp.hexDigestAndReset(); if (domains.get(argName).includedInFingerprint) { assertWithMessage( "Argument '" + argName + "' was expected to be included in the" + " fingerprint, but wasn't") .that(primary) .isNotEqualTo(secondary); } else { assertWithMessage( "Argument '" + argName + "' was expected not to be included in the" + " fingerprint, but was") .that(primary) .isEqualTo(secondary); } } } } private static final class Domain { boolean includedInFingerprint; Object valueA; Object valueB; Domain(boolean includedInFingerprint, Object valueA, Object valueB) { this.includedInFingerprint = includedInFingerprint; this.valueA = valueA; this.valueB = valueB; } } private static Domain partOfFingerprint(Object valueA, Object valueB) { return new Domain(true, valueA, valueB); } private static Domain notPartOfFingerprint(Object valueB) { return new Domain(false, "//foo", valueB); } @Test public void testFingerprintOfFileTraversal() throws Exception { new FingerprintTester( ImmutableMap.<String, Domain>builder() .put("ownerLabel", notPartOfFingerprint("//bar")) .put("fileToTraverse", partOfFingerprint("foo/file.a", "bar/file.b")) .put("destPath", partOfFingerprint("out1", "out2")) .put("strictFilesetOutput", partOfFingerprint(true, false)) .put("permitDirectories", partOfFingerprint(true, false)) .buildOrThrow()) { @Override FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception { return FilesetTraversalParamsFactory.fileTraversal( label((String) kwArgs.get("ownerLabel")), getSourceArtifact((String) kwArgs.get("fileToTraverse")), PathFragment.create((String) kwArgs.get("destPath")), (Boolean) kwArgs.get("strictFilesetOutput"), (Boolean) kwArgs.get("permitDirectories")); } }.doTest(); } @Test public void testFingerprintOfNestedTraversal() throws Exception { Artifact nested1 = getSourceArtifact("a/b"); Artifact nested2 = getSourceArtifact("a/c"); new FingerprintTester( ImmutableMap.of( "ownerLabel", notPartOfFingerprint("//bar"), "nestedArtifact", partOfFingerprint(nested1, nested2), "destDir", partOfFingerprint("out1", "out2"), "excludes", partOfFingerprint(ImmutableSet.<String>of(), ImmutableSet.of("x")))) { @SuppressWarnings("unchecked") @Override FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception { return FilesetTraversalParamsFactory.nestedTraversal( label((String) kwArgs.get("ownerLabel")), (Artifact) kwArgs.get("nestedArtifact"), PathFragment.create((String) kwArgs.get("destDir")), (Set<String>) kwArgs.get("excludes")); } }.doTest(); } private static class TestWorkspaceNameFunction implements SkyFunction { @Nullable @Override public SkyValue compute(SkyKey skyKey, Environment env) throws SkyFunctionException, InterruptedException { return WorkspaceNameValue.withName("workspace"); } } }
directories
/* * Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. * */ // This is a utlitity test class for loading classes-under-test // by means of custom class loader. // See AppCDS/jvmti/transformRelatedClasses/TransformRelatedClasses.java // for an example. // Use this test app in conjunction with other tests // to load and exercise classes using custom class loader(s). // This class is intended to be called by the "main test driver" // inside a child process, normally with sharing enabled. // // Arguments: customJarPath, loaderType, testClass // customJarPath - a path to jar file containing classes for // loading via this custom class loader, including the // testClass // loaderType - Currently only "unregistered" // (Fingerprint verification method) is allowed // testClass - the class to be loader; the test method with // signature 'public static void test()' will be called // on this class, so class must contain such method import java.io.File; import java.lang.reflect.Method; import java.net.URL; import java.net.URLClassLoader; import java.util.logging.Logger; public class CustomLoaderApp { public static void ping() {}; private static void log(String msg) { System.out.println("CustomLoaderApp: " + msg); } public static void main(String[] args) throws Exception { String path = args[0]; URL url = new File(path).toURI().toURL(); URL[] urls = new URL[] {url}; String loaderType = args[1]; log("loaderType = " + loaderType); for (int i = 2; i < args.length; i++) { String testClass = args[i]; log("testClass = " + testClass); switch(loaderType) { case "unregistered": loadAndUseWithUnregisteredLoader(urls, testClass); break; default: throw new IllegalArgumentException("loader type is wrong: " + loaderType); } } } // Load the test classes using unregistered loader // (i.e. loader that is not using AppCDS API) private static void loadAndUseWithUnregisteredLoader(URL[] urls, String testClass) throws Exception { URLClassLoader urlClassLoader = new URLClassLoader(urls); callTestMethod(loadAndCheck(urlClassLoader, testClass)); } private static Class loadAndCheck(ClassLoader loader, String className) throws ClassNotFoundException { Class c = loader.loadClass(className); log("class = " + c); log("loader = " + c. [MASK] ()); // Check that c is defined by the correct loader if (c. [MASK] () != loader) { String msg = String.format("c. [MASK] () equals to <%s>, expected <%s>", c. [MASK] (), loader); throw new RuntimeException(msg); } return c; } private static void callTestMethod(Class c) throws Exception { Method[] methods = c.getDeclaredMethods(); for (Method m : methods) { log("method = " + m.getName()); if (m.getName().equals("test")) m.invoke(null); } } }
getClassLoader
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www. [MASK] .org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org. [MASK] .flink.runtime.io.network.api.serialization; import org. [MASK] .flink.core.memory.MemorySegment; import org. [MASK] .flink.runtime.io.network.buffer.Buffer; import org. [MASK] .flink.testutils.junit.utils.TempDirUtils; import org. [MASK] .flink.util.CloseableIterator; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.io.TempDir; import java.io.File; import java.nio.file.Path; import java.util.ArrayList; import java.util.List; import java.util.Random; import static org. [MASK] .flink.core.memory.MemorySegmentFactory.wrap; import static org. [MASK] .flink.runtime.io.network.api.serialization.SpillingAdaptiveSpanningRecordDeserializer.LENGTH_BYTES; import static org.assertj.core.api.Assertions.assertThat; /** {@link SpanningWrapper} test. */ class SpanningWrapperTest { private static final Random random = new Random(); @TempDir private Path folder; @Test void testLargeUnconsumedSegment() throws Exception { int recordLen = 100; int firstChunk = (int) (recordLen * .9); int spillingThreshold = (int) (firstChunk * .9); byte[] record1 = recordBytes(recordLen); byte[] record2 = recordBytes(recordLen * 2); File canNotEecutableFile = TempDirUtils.newFolder(folder); canNotEecutableFile.setExecutable(false); // Always pick 'canNotEecutableFile' first as the Spilling Channel TmpDir. Thus trigger an // IOException. SpanningWrapper spanningWrapper = new SpanningWrapper( new String[] { TempDirUtils.newFolder(folder).getAbsolutePath(), canNotEecutableFile.getAbsolutePath() + File.separator + "pathdonotexit" }, spillingThreshold, recordLen); spanningWrapper.transferFrom(wrapNonSpanning(record1, firstChunk), recordLen); spanningWrapper.addNextChunkFromMemorySegment( wrap(record1), firstChunk, recordLen - firstChunk + LENGTH_BYTES); spanningWrapper.addNextChunkFromMemorySegment(wrap(record2), 0, record2.length); CloseableIterator<Buffer> unconsumedSegment = spanningWrapper.getUnconsumedSegment(); spanningWrapper .getInputView() .readFully(new byte[recordLen], 0, recordLen); // read out from file spanningWrapper.transferLeftOverTo(new NonSpanningWrapper()); // clear any leftover spanningWrapper.transferFrom( wrapNonSpanning(recordBytes(recordLen), recordLen), recordLen); // overwrite with new data canNotEecutableFile.setExecutable(true); assertThat(concat(record1, record2)).isEqualTo(toByteArray(unconsumedSegment)); } private byte[] recordBytes(int recordLen) { byte[] inputData = randomBytes(recordLen + LENGTH_BYTES); for (int i = 0; i < Integer.BYTES; i++) { inputData[Integer.BYTES - i - 1] = (byte) (recordLen >>> i * 8); } return inputData; } private NonSpanningWrapper wrapNonSpanning(byte[] bytes, int len) { NonSpanningWrapper nonSpanningWrapper = new NonSpanningWrapper(); MemorySegment segment = wrap(bytes); nonSpanningWrapper.initializeFromMemorySegment(segment, 0, len); nonSpanningWrapper .readInt(); // emulate read length performed in getNextRecord to move position return nonSpanningWrapper; } private byte[] toByteArray(CloseableIterator<Buffer> unconsumed) { final List<Buffer> buffers = new ArrayList<>(); try { unconsumed.forEachRemaining(buffers::add); byte[] result = new byte[buffers.stream().mapToInt(Buffer::readableBytes).sum()]; int offset = 0; for (Buffer buffer : buffers) { int len = buffer.readableBytes(); buffer.getNioBuffer(0, len).get(result, offset, len); offset += len; } return result; } finally { buffers.forEach(Buffer::recycleBuffer); } } private byte[] randomBytes(int length) { byte[] inputData = new byte[length]; random.nextBytes(inputData); return inputData; } private byte[] concat(byte[] input1, byte[] input2) { byte[] expected = new byte[input1.length + input2.length]; System.arraycopy(input1, 0, expected, 0, input1.length); System.arraycopy(input2, 0, expected, input1.length, input2.length); return expected; } }
apache
/* ### * IP: GHIDRA * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ghidra.util.datastruct; import java.io.Serializable; import java.util.Arrays; /** * The BitTree class maintains a set of ordered keys between the values of * 0 and N. It can quickly (O(log(n))) add keys, remove keys, find the next key * greater than some value , and find the prev key less than some value. It can * determine if a key is in the set in O(1) time. This implementation has been * limited to short keys so that it can implement the ShortKeySet interface. */ public class BitTree implements ShortKeySet, Serializable { private final static long serialVersionUID = 1; private int size; // The maximum number of keys in the set. Keys range from 0 to size-1 private int power2; // The next power of 2 that is greater than size. private int[] bits; // Array of bits used to represent a tree of binary values. A bit at // position N will have a left child at 2*N and a right child at 2*N+1. // Its parent position will be at N/2. // A bit is on if any bits in its subtree are on. Leaf bits correspond // directly to keys and are on if the key is in the set. private int numKeys; // The current number of keys in the set. // masks for seting and clearing bits within an 32 bit integer. private static final int[] setMask = { 0x00000001,0x00000002,0x00000004,0x00000008, 0x00000010,0x00000020,0x00000040,0x00000080, 0x00000100,0x00000200,0x00000400,0x00000800, 0x00001000,0x00002000,0x00004000,0x00008000, 0x00010000,0x00020000,0x00040000,0x00080000, 0x00100000,0x00200000,0x00400000,0x00800000, 0x01000000,0x02000000,0x04000000,0x08000000, 0x10000000,0x20000000,0x40000000,0x80000000 }; private static final int[] clearMask={ 0xfffffffe,0xfffffffd,0xfffffffb,0xfffffff7, 0xffffffef,0xffffffdf,0xffffffbf,0xffffff7f, 0xfffffeff,0xfffffdff,0xfffffbff,0xfffff7ff, 0xffffefff,0xffffdfff,0xffffbfff,0xffff7fff, 0xfffeffff,0xfffdffff,0xfffbffff,0xfff7ffff, 0xffefffff,0xffdfffff,0xffbfffff,0xff7fffff, 0xfeffffff,0xfdffffff,0xfbffffff,0xf7ffffff, 0xefffffff,0xdfffffff,0xbfffffff,0x7fffffff }; /** * The BitTree constructor takes the maximum key value. The legal * keys for this set range from 0 to maxKey. * @param maxKey the maximum key that will ever be put into this BitTree. */ public BitTree(short maxKey) { this(maxKey,false); } /** * The BitTree constructor takes the maximum key value. The legal * keys for this set range from 0 to maxKey. * @param maxKey the maximum key value. * @param isFull if true, then the set is initilized to contain all legal keys. */ public BitTree(short maxKey, boolean isFull) { this.size = maxKey+1; // find the next power of 2 greater than or equal to n. power2 = 2; int sz = maxKey+1; while (sz > 1) { sz /= 2; power2 *= 2; } // The number of bits need to store the tree is 2 times the number of keys. // Since we are storing the bits in 32 bit integers we need N/16 integers // to store the bits. int nInts = power2/16; // must have at least 1 if (nInts < 1) { nInts = 1; } bits = new int[nInts]; if (isFull) { Arrays.fill(bits,0xffffffff); numKeys = this.size; } } /** * Removes all keys from the set. */ public void removeAll() { Arrays.fill(bits,0); numKeys = 0; } /** * Returns the number of keys currently in the set. */ public int size() { return numKeys; } /** * Adds a key to the set. * @param key to be added. * @exception IndexOutOfBoundsException if the given key is not * in the range [0, size-1]. */ public void put(short key) { if ((key < 0) || (key >= size)) { throw new IndexOutOfBoundsException(); } // The first "power2" number of bits are used for internal tree nodes. The // leaf nodes start at index "power2". int nodeIndex = power2+key; // set the leaf bit on to indicate that the key is in the set. // if the bit is already on (The key is already in the set), then just return. if (!setBit(nodeIndex)) { return; } // increment the number of keys in the set. numKeys++; // go up the tree setting each parent bit to "on" while(nodeIndex != 1) { // compute parent index. nodeIndex /= 2; // if any parent bit is already on, then all its parents are already on, // so were done. if (!setBit(nodeIndex)) { return; } } } /** * Removes the key from the set. * @param key The key to remove. * @exception IndexOutOfBoundsException if the given key is not * in the range [0, size-1]. */ public boolean remove(short key) { if ((key < 0) || (key >= size)) { throw new IndexOutOfBoundsException(); } // compute the leaf node index. int nodeIndex = power2+key; // clear the leaf bit to indicate that the key is not in the set. // if it is already "off", then we don't have to do anything if (!clearBit(nodeIndex)) { return false; } // decrement the number of keys in the set numKeys--; // traverse up the tree, clearing any parent nodes if all its child // nodes are "off". while(nodeIndex != 1) { nodeIndex /= 2; if (!isBitSet(nodeIndex)) { return true; } if (isBitSet(nodeIndex*2) || isBitSet(nodeIndex*2+1)) { return true; } clearBit(nodeIndex); } return true; } /** * Determines if a given key is in the set. * @param key the key to check if it is in this set. * @return true if the key is in the set. */ public boolean [MASK] (short key) { if ((key < 0) || (key >= size)) { return false; } return isBitSet(power2+key); } /** * finds the next key that is in the set that is greater than the given key. * @param key from which to search forward. * @return the next key greater than the given key or -1 if there is no key * greater than the given key. * @exception IndexOutOfBoundsException if the given key is not * in the range [0, size-1]. */ public short getNext(short key) { if ((key < 0) || (key >= size)) { throw new IndexOutOfBoundsException(); } // compute leaf node. int nodeIndex = key + power2; // while we are not at the root, search upward until we find a right // sibling that is "on". while(nodeIndex != 1) { // see if we are odd (i.e. the right child) int odd = nodeIndex % 2; // if we are the left child see if my sibling on the right is on. // if so, then the next key must be in that subtree. if (odd == 0) { if (isBitSet(nodeIndex+1)) { // we found a right sibling that is "on", set nodeIndex to // that node. nodeIndex++; break; } } nodeIndex = nodeIndex/2; } // if we made it all the way up to the root node, then there is no key // greater than, so return -1; if (nodeIndex == 1) { return (short)-1; } // now that we found a right sibling that is "on", // follow the leftmost trail of "on" bits to an "on" leaf bit. This bit // represents the next key in the set. while (nodeIndex < power2) { nodeIndex *= 2; // if the left child is not on, then the right child must be "on". if (!isBitSet(nodeIndex)) { nodeIndex++; } } short nextKey = (short)(nodeIndex-power2); if (nextKey >= size) { nextKey = -1; } return nextKey; } /** * Finds the next key that is in the set that is less than the given key. * @param key the key to search before. * @return the next key less than the given key or -1 if there is no key * less than the given key. * @exception IndexOutOfBoundsException if the given key is not * in the range [0, size-1]. */ public short getPrevious(short key) { if ((key < 0) || (key >= size)) { throw new IndexOutOfBoundsException(); } // find the leaf node for the given key. int nodeIndex = key + power2; // while we are not at the root, search upward until we find a left // sibling that is "on". while(nodeIndex != 1) { // check if we are a right node. int odd = nodeIndex % 2; // if we are the right child see if my sibling on the left is "on". // if so, then the previous key must be in that subtree. if (odd == 1) { if (isBitSet(nodeIndex-1)) { nodeIndex--; break; } } nodeIndex = nodeIndex/2; } // If we went all the way to the root then there is no previous key, return -1. if (nodeIndex == 1) { return (short)-1; } // follow the rightmost trail of "on" bits to an "on" leaf bit. This bit // represents the previous key in the set. while (nodeIndex < power2) { nodeIndex *= 2; if (isBitSet(nodeIndex+1)) { nodeIndex++; } } return (short)(nodeIndex-power2); } /** * Checks if the set is empty. * @return true if the set is empty. */ public boolean isEmpty() { return numKeys == 0; } /** * Returns the first (lowest) key in the set. */ public short getFirst() { // if the 0 key is in the set, then return it. if( [MASK] ((short)0)) { return (short)0; } // otherwise return the next key after 0. return getNext((short)0); } /** * Returns the last (highest) key in the set. */ public short getLast() { // if the highest possible key is in the set, return it. if( [MASK] ((short)(size-1))) { return (short)(size-1); } // otherwise return the next lowest key. return getPrevious((short)(size-1)); } /** * Sets the nth bit on. */ private boolean setBit(int n) { int intIndex = n >> 5; int maskIndex = n & 0x1f; int old = bits[intIndex]; return ((bits[intIndex] |= setMask[maskIndex]) != old); } /** * Sets the nth bit to off. */ private boolean clearBit(int n) { int intIndex = n >> 5; int maskIndex = n & 0x1f; int old = bits[intIndex]; return ((bits[intIndex] &= clearMask[maskIndex]) != old); } /** * Tests if the nth bit is on. */ private boolean isBitSet(int n) { int intIndex = n >> 5; int maskIndex = n & 0x1f; return ((bits[intIndex] & setMask[maskIndex]) != 0); } }
containsKey
/* * Copyright 2012-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.loader.net.protocol.jar; import java.io.File; import java.net.MalformedURLException; import java.net.URL; import java.util.jar.JarEntry; import org. [MASK] .jupiter.api.BeforeEach; import org. [MASK] .jupiter.api.Test; import org. [MASK] .jupiter.api.io.TempDir; import org.springframework.boot.loader.net.util.UrlDecoder; import static org.assertj.core.api.Assertions.assertThat; /** * Tests for {@link JarUrl}. * * @author Phillip Webb */ class JarUrlTests { @TempDir File temp; File jarFile; String jarFileUrlPath; @BeforeEach void setup() throws MalformedURLException { this.jarFile = new File(this.temp, "my.jar"); this.jarFileUrlPath = this.jarFile.toURI().toURL().toString().substring("file:".length()).replace("!", "%21"); } @Test void createWithFileReturnsUrl() { URL url = JarUrl.create(this.jarFile); assertThat(url).hasToString("jar:file:%s!/".formatted(this.jarFileUrlPath)); } @Test void createWithFileAndEntryReturnsUrl() { JarEntry entry = new JarEntry("lib.jar"); URL url = JarUrl.create(this.jarFile, entry); assertThat(url).hasToString("jar:nested:%s/!lib.jar!/".formatted(this.jarFileUrlPath)); } @Test void createWithFileAndNullEntryReturnsUrl() { URL url = JarUrl.create(this.jarFile, (JarEntry) null); assertThat(url).hasToString("jar:file:%s!/".formatted(this.jarFileUrlPath)); } @Test void createWithFileAndNameReturnsUrl() { URL url = JarUrl.create(this.jarFile, "lib.jar"); assertThat(url).hasToString("jar:nested:%s/!lib.jar!/".formatted(this.jarFileUrlPath)); } @Test void createWithFileAndNullNameReturnsUrl() { URL url = JarUrl.create(this.jarFile, (String) null); assertThat(url).hasToString("jar:file:%s!/".formatted(this.jarFileUrlPath)); } @Test void createWithFileNameAndPathReturnsUrl() { URL url = JarUrl.create(this.jarFile, "lib.jar", "com/example/My.class"); assertThat(url).hasToString("jar:nested:%s/!lib.jar!/com/example/My.class".formatted(this.jarFileUrlPath)); } @Test void createWithReservedCharsInName() throws Exception { String badFolderName = "foo#bar!/baz/!oof"; this.temp = new File(this.temp, badFolderName); setup(); URL url = JarUrl.create(this.jarFile, "lib.jar", "com/example/My.class"); assertThat(url).hasToString("jar:nested:%s/!lib.jar!/com/example/My.class".formatted(this.jarFileUrlPath)); assertThat(UrlDecoder.decode(url.toString())).contains(badFolderName); } }
junit
/* * Copyright 2012-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.loader.net.protocol.jar; import java.io.File; import java.net.MalformedURLException; import java.net.URL; import java.util.jar.JarEntry; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.io.TempDir; import org.springframework.boot.loader.net.util.UrlDecoder; import static org.assertj.core.api.Assertions.assertThat; /** * Tests for {@link JarUrl}. * * @author Phillip Webb */ class JarUrlTests { @TempDir File temp; File jarFile; [MASK] jarFileUrlPath; @BeforeEach void setup() throws MalformedURLException { this.jarFile = new File(this.temp, "my.jar"); this.jarFileUrlPath = this.jarFile.toURI().toURL().to [MASK] ().substring("file:".length()).replace("!", "%21"); } @Test void createWithFileReturnsUrl() { URL url = JarUrl.create(this.jarFile); assertThat(url).hasTo [MASK] ("jar:file:%s!/".formatted(this.jarFileUrlPath)); } @Test void createWithFileAndEntryReturnsUrl() { JarEntry entry = new JarEntry("lib.jar"); URL url = JarUrl.create(this.jarFile, entry); assertThat(url).hasTo [MASK] ("jar:nested:%s/!lib.jar!/".formatted(this.jarFileUrlPath)); } @Test void createWithFileAndNullEntryReturnsUrl() { URL url = JarUrl.create(this.jarFile, (JarEntry) null); assertThat(url).hasTo [MASK] ("jar:file:%s!/".formatted(this.jarFileUrlPath)); } @Test void createWithFileAndNameReturnsUrl() { URL url = JarUrl.create(this.jarFile, "lib.jar"); assertThat(url).hasTo [MASK] ("jar:nested:%s/!lib.jar!/".formatted(this.jarFileUrlPath)); } @Test void createWithFileAndNullNameReturnsUrl() { URL url = JarUrl.create(this.jarFile, ( [MASK] ) null); assertThat(url).hasTo [MASK] ("jar:file:%s!/".formatted(this.jarFileUrlPath)); } @Test void createWithFileNameAndPathReturnsUrl() { URL url = JarUrl.create(this.jarFile, "lib.jar", "com/example/My.class"); assertThat(url).hasTo [MASK] ("jar:nested:%s/!lib.jar!/com/example/My.class".formatted(this.jarFileUrlPath)); } @Test void createWithReservedCharsInName() throws Exception { [MASK] badFolderName = "foo#bar!/baz/!oof"; this.temp = new File(this.temp, badFolderName); setup(); URL url = JarUrl.create(this.jarFile, "lib.jar", "com/example/My.class"); assertThat(url).hasTo [MASK] ("jar:nested:%s/!lib.jar!/com/example/My.class".formatted(this.jarFileUrlPath)); assertThat(UrlDecoder.decode(url.to [MASK] ())).contains(badFolderName); } }
String
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ package org.elasticsearch.xpack.esql.plugin; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.RefCountingListener; import org.elasticsearch.compute. [MASK] ; import org.elasticsearch.compute.operator.DriverProfile; import org.elasticsearch.compute.operator.ResponseHeadersCollector; import org.elasticsearch.core.Releasable; import org.elasticsearch.threadpool.ThreadPool; import java.util.ArrayList; import java.util.Collections; import java.util.List; /** * A variant of {@link RefCountingListener} with the following differences: * 1. Automatically cancels sub tasks on failure (via runOnTaskFailure) * 2. Collects driver profiles from sub tasks. * 3. Collects response headers from sub tasks, specifically warnings emitted during compute * 4. Collects failures and returns the most appropriate exception to the caller. */ final class ComputeListener implements Releasable { private final [MASK] refs; private final List<DriverProfile> collectedProfiles; private final ResponseHeadersCollector responseHeaders; private final Runnable runOnFailure; ComputeListener(ThreadPool threadPool, Runnable runOnFailure, ActionListener<List<DriverProfile>> delegate) { this.runOnFailure = runOnFailure; this.responseHeaders = new ResponseHeadersCollector(threadPool.getThreadContext()); this.collectedProfiles = Collections.synchronizedList(new ArrayList<>()); // listener that executes after all the sub-listeners refs (created via acquireCompute) have completed this.refs = new [MASK] (delegate.delegateFailure((l, ignored) -> { responseHeaders.finish(); delegate.onResponse(collectedProfiles.stream().toList()); })); } /** * Acquires a new listener that doesn't collect result */ ActionListener<Void> acquireAvoid() { return refs.acquire().delegateResponse((l, e) -> { try { runOnFailure.run(); } finally { l.onFailure(e); } }); } /** * Acquires a new listener that collects compute result. This listener will also collect warnings emitted during compute */ ActionListener<List<DriverProfile>> acquireCompute() { final ActionListener<Void> delegate = acquireAvoid(); return ActionListener.wrap(profiles -> { responseHeaders.collect(); if (profiles != null && profiles.isEmpty() == false) { collectedProfiles.addAll(profiles); } delegate.onResponse(null); }, e -> { responseHeaders.collect(); delegate.onFailure(e); }); } @Override public void close() { refs.close(); } }
EsqlRefCountingListener
/* * DBeaver - Universal Database Manager * Copyright (C) 2010-2024 DBeaver Corp and others * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jkiss.dbeaver.model.sql; /** * SQL editor constants */ public class SQLConstants { public static final String NULL_VALUE = "NULL"; public static final String STR_QUOTE_SINGLE = "'"; public static final String STR_QUOTE_DOUBLE = "\""; public static final String[][] DOUBLE_QUOTE_STRINGS = {{ STR_QUOTE_DOUBLE, STR_QUOTE_DOUBLE }}; public static final String DEFAULT_STATEMENT_DELIMITER = ";"; public static final String[] DEFAULT_SCRIPT_DELIMITER = { DEFAULT_STATEMENT_DELIMITER }; public static final String STR_QUOTE_APOS = "`"; public static final String ML_COMMENT_START = "/*"; public static final String ML_COMMENT_END = "*/"; public static final String SL_COMMENT = "--"; public static final String ASTERISK = "*"; public static final String QUESTION = "?"; public static final String DOT = "."; public static final String [MASK] = "SELECT"; public static final String KEYWORD_INSERT = "INSERT"; public static final String KEYWORD_UPDATE = "UPDATE"; public static final String KEYWORD_DELETE = "DELETE"; public static final String KEYWORD_MERGE = "MERGE"; public static final String KEYWORD_UPSERT = "UPSERT"; public static final String KEYWORD_TRUNCATE = "TRUNCATE"; public static final String KEYWORD_FROM = "FROM"; public static final String KEYWORD_INTO = "INTO"; public static final String KEYWORD_JOIN = "JOIN"; public static final String KEYWORD_CROSS_JOIN = "CROSS JOIN"; public static final String KEYWORD_NATURAL_JOIN = "NATURAL JOIN"; public static final String KEYWORD_WHERE = "WHERE"; public static final String KEYWORD_SET = "SET"; public static final String KEYWORD_ON = "ON"; public static final String KEYWORD_AND = "AND"; public static final String KEYWORD_OR = "OR"; public static final String KEYWORD_BETWEEN = "BETWEEN"; public static final String KEYWORD_IS = "IS"; public static final String KEYWORD_NOT = "NOT"; public static final String KEYWORD_NULL = "NULL"; public static final String KEYWORD_IN = "IN"; public static final String KEYWORD_VALUES = "VALUES"; public static final String KEYWORD_ORDER_BY = "ORDER BY"; public static final String KEYWORD_GROUP_BY = "GROUP BY"; public static final String KEYWORD_HAVING = "HAVING"; public static final String KEYWORD_LIKE = "LIKE"; public static final String KEYWORD_ILIKE = "ILIKE"; public static final String KEYWORD_FUNCTION = "FUNCTION"; public static final String KEYWORD_PROCEDURE = "PROCEDURE"; public static final String KEYWORD_COMMIT = "COMMIT"; public static final String KEYWORD_ROLLBACK = "ROLLBACK"; public static final String KEYWORD_EXPLAIN = "EXPLAIN"; public static final String KEYWORD_CASE = "CASE"; public static final String KEYWORD_QUALIFY = "QUALIFY"; public static final String KEYWORD_AS = "AS"; public static final String KEYWORD_USING = "USING"; public static final String DATA_TYPE_VARCHAR = "varchar"; public static final String DATA_TYPE_BIGINT = "BIGINT"; public static final String DATA_TYPE_BINARY = "BINARY"; public static final String DATA_TYPE_BOOLEAN = "BOOLEAN"; public static final String DATA_TYPE_DOUBLE = "DOUBLE"; public static final String DATA_TYPE_FLOAT = "FLOAT"; public static final String DATA_TYPE_INT = "INT"; public static final String DATA_TYPE_SMALLINT = "SMALLINT"; public static final String DATA_TYPE_STRING = "STRING"; public static final String DATA_TYPE_TINYINT = "TINYINT"; public static final String[] QUERY_KEYWORDS = { [MASK] , KEYWORD_INSERT, KEYWORD_UPDATE, KEYWORD_DELETE, KEYWORD_MERGE, KEYWORD_UPSERT, KEYWORD_TRUNCATE }; public static final String[] TABLE_KEYWORDS = { KEYWORD_FROM, KEYWORD_INSERT, KEYWORD_UPDATE, KEYWORD_DELETE, KEYWORD_INTO, "TABLE", "VIEW", KEYWORD_JOIN, KEYWORD_TRUNCATE, KEYWORD_MERGE, }; public static final String[] COLUMN_KEYWORDS = { [MASK] , KEYWORD_WHERE, KEYWORD_SET, KEYWORD_ON, KEYWORD_AND, KEYWORD_OR, "BY", "HAVING" }; public static final String[] DDL_KEYWORDS = { "CREATE", "ALTER", "DROP", }; public static final String[] SQL2003_RESERVED_KEYWORDS = { "ALL", "ALLOCATE", "ALTER", KEYWORD_AND, "ANY", "ARE", "ARRAY", "AS", "ASENSITIVE", "ASYMMETRIC", "AT", "ATOMIC", "AUTHORIZATION", "BEGIN", KEYWORD_BETWEEN, //"BIGINT", DATA_TYPE_BINARY, "BOTH", "BY", "CALL", "CALLED", "CARDINALITY", "CASCADE", "CASCADED", KEYWORD_CASE, "CAST", "CEIL", "CEILING", "CHARACTER", "CHECK", "CLOSE", "COALESCE", "COLLATE", "COLLECT", "COLUMN", KEYWORD_COMMIT, "CONDITION", "CONNECT", "CONSTRAINT", "CONVERT", "CORR", "CORRESPONDING", "COVAR_POP", "COVAR_SAMP", "CREATE", "CROSS", "CUBE", "CUME_DIST", "CURRENT", "CURSOR", "CYCLE", "DAY", "DEALLOCATE", "DEC", "DECLARE", "DEFAULT", KEYWORD_DELETE, "DENSE_RANK", "DEREF", "DESCRIBE", "DETERMINISTIC", "DISCONNECT", "DISTINCT", "DROP", "DYNAMIC", "EACH", "ELEMENT", "ELSE", "END", "END-EXEC", "ESCAPE", "EVERY", "EXCEPT", "EXEC", "EXECUTE", "EXISTS", "EXP", "EXTERNAL", "EXTRACT", "FALSE", "FETCH", "FILTER", "FOR", "FOREIGN", "FREE", "FROM", "FULL", KEYWORD_FUNCTION, "FUSION", "GET", "GLOBAL", "GRANT", "GROUP", "GROUPING", "HAVING", "HOLD", "HOUR", "IDENTITY", "IF", KEYWORD_IN, "INDEX", "INDICATOR", "INNER", "INOUT", "INSENSITIVE", KEYWORD_INSERT, "INTERSECT", "INTERSECTION", "INTERVAL", "INTO", KEYWORD_IS, "JOIN", "LANGUAGE", "LARGE", "LATERAL", "LEADING", "LEFT", "LIKE", "LN", "LOCAL", "MATCH", "MEMBER", KEYWORD_MERGE, "METHOD", "MINUTE", "MOD", "MODIFIES", // "MODULE", // too common for column names "MONTH", "MULTISET", "NATIONAL", "NATURAL", //"NCHAR", //"NCLOB", "NEW", "NO", "NONE", "NORMALIZE", KEYWORD_NOT, KEYWORD_NULL, "NULLIF", "NUMERIC", "OF", "OLD", KEYWORD_ON, "ONLY", "OPEN", "OR", "ORDER", "OUT", "OUTER", "OVER", "OVERLAPS", "OVERLAY", "PARAMETER", "PARTITION", "POSITION", "PRECISION", "PREPARE", "PRIMARY", KEYWORD_PROCEDURE, "RANGE", "RANK", "READS", "REAL", "RECURSIVE", "REF", "REFERENCES", "REFERENCING", "RELEASE", "RENAME", "RESULT", "RETURN", "RETURNS", "REVOKE", "RIGHT", KEYWORD_ROLLBACK, "ROLLUP", "ROW", "ROW_NUMBER", "ROWS", "SAVEPOINT", "SCOPE", "SCROLL", "SEARCH", "SECOND", [MASK] , "SENSITIVE", "SESSION_USER", KEYWORD_SET, "SIMILAR", "SMALLINT", "SOME", "SPECIFIC", "SPECIFICTYPE", "SQL", "SQLEXCEPTION", "SQLSTATE", "SQLWARNING", "START", "STATIC", // "STDDEV_POP", // "STDDEV_SAMP", "SUBMULTISET", "SYMMETRIC", "SYSTEM", "SYSTEM_USER", "TABLE", "TABLESAMPLE", "THEN", "TIMEZONE_HOUR", "TIMEZONE_MINUTE", "TO", "TRAILING", "TRANSLATE", "TRANSLATION", "TREAT", "TRIGGER", "TRUE", "UNION", "UNIQUE", "UNKNOWN", "UNNEST", KEYWORD_UPDATE, "USER", "USING", //"VALUE", // too common for column names KEYWORD_VALUES, // "VAR_POP", // "VAR_SAMP", //"VARCHAR", "VARYING", "WHEN", "WHENEVER", KEYWORD_WHERE, "WIDTH_BUCKET", "WINDOW", "WITH", "WITHIN", "WITHOUT", "YEAR", "NULLS", "FIRST", "LAST", "FOLLOWING", "PRECEDING", "UNBOUNDED", "LENGTH", "KEY", "LEVEL", "VIEW", "SEQUENCE", "SCHEMA", "ROLE", "RESTRICT", "ASC", "DESC", // Not actually standard but widely used "LIMIT", // Extended keywords // "A", "ABSOLUTE", "ACTION", // "ADA", "ADD", // "ADMIN", "AFTER", "ALWAYS", // "ASC", "ASSERTION", "ASSIGNMENT", "ATTRIBUTE", "ATTRIBUTES", "BEFORE", // "BERNOULLI", // "BREADTH", // "C", "CASCADE", "CATALOG", // "CATALOG_NAME", "CHAIN", // "CHARACTER_SET_CATALOG", // "CHARACTER_SET_NAME", // "CHARACTER_SET_SCHEMA", "CHARACTERISTICS", "CHARACTERS", // "CLASS_ORIGIN", // "COBOL", "COLLATION", // "COLLATION_CATALOG", // "COLLATION_NAME", // "COLLATION_SCHEMA", // "COLUMN_NAME", // "COMMAND_FUNCTION", // "COMMAND_FUNCTION_CODE", "COMMITTED", // "CONDITION_NUMBER", "CONNECTION", // "CONNECTION_NAME", // "CONSTRAINT_CATALOG", // "CONSTRAINT_NAME", // "CONSTRAINT_SCHEMA", "CONSTRAINTS", "CONSTRUCTOR", "CONTAINS", "CONTINUE", "CURSOR_NAME", "DATA", // "DATETIME_INTERVAL_CODE", // "DATETIME_INTERVAL_PRECISION", "DEFAULTS", "DEFERRABLE", "DEFERRED", "DEFINED", "DEFINER", "DEGREE", "DEPTH", "DERIVED", // "DESC", "DESCRIPTOR", "DIAGNOSTICS", "DISPATCH", "DOMAIN", // "DYNAMIC_FUNCTION", // "DYNAMIC_FUNCTION_CODE", "EQUALS", "EXCEPTION", "EXCLUDE", "EXCLUDING", "FINAL", "FIRST", // "FORTRAN", "FOUND", // "G", "GENERAL", "GENERATED", "GO", "GOTO", "GRANTED", "HIERARCHY", "IMMEDIATE", "IMPLEMENTATION", "INCLUDING", "INCREMENT", "INITIALLY", "INPUT", "INSTANCE", "INSTANTIABLE", "INVOKER", "ISOLATION", // "K", // "KEY_MEMBER", "KEY_TYPE", "LAST", "LOCATOR", // "M", "MAP", "MATCHED", "MAXVALUE", // "MESSAGE_LENGTH", // "MESSAGE_OCTET_LENGTH", // "MESSAGE_TEXT", "MINVALUE", "MORE", "MUMPS", // "NAME", // "NAMES", "NESTING", "NEXT", "NORMALIZED", // "NULLABLE", // "NULLS", // "NUMBER", "OBJECT", "OCTETS", "OPTION", "OPTIONS", "ORDERING", "ORDINALITY", "OTHERS", "OUTPUT", "OVERRIDING", "PAD", // "PARAMETER_MODE", // "PARAMETER_NAME", // "PARAMETER_ORDINAL_POSITION", // "PARAMETER_SPECIFIC_CATALOG", // "PARAMETER_SPECIFIC_NAME", // "PARAMETER_SPECIFIC_SCHEMA", "PARTIAL", // "PASCAL", "PATH", "PLACING", // "PLI", "PRESERVE", "PRIOR", "PRIVILEGES", // "PUBLIC", "READ", "RELATIVE", "REPEATABLE", "RESTART", // "RETURNED_CARDINALITY", // "RETURNED_LENGTH", // "RETURNED_OCTET_LENGTH", // "RETURNED_SQLSTATE", "ROUTINE", // "ROUTINE_CATALOG", // "ROUTINE_NAME", // "ROUTINE_SCHEMA", // "ROW_COUNT", "SCALE", // "SCHEMA_NAME", // "SCOPE_CATALOG", // "SCOPE_NAME", // "SCOPE_SCHEMA", "SECTION", "SECURITY", "SELF", "SERIALIZABLE", // "SERVER_NAME", "SESSION", "SETS", // "SIMPLE", "SIZE", "SOURCE", "SPACE", // "SPECIFIC_NAME", // "STATE", // too common for column names "STATEMENT", "STRUCTURE", "STYLE", // "SUBCLASS_ORIGIN", // "TABLE_NAME", "TEMPORARY", "TIES", // "TOP_LEVEL_COUNT", "TRANSACTION", // "TRANSACTION_ACTIVE", // "TRANSACTIONS_COMMITTED", // "TRANSACTIONS_ROLLED_BACK", "TRANSFORM", "TRANSFORMS", // "TRIGGER_CATALOG", // "TRIGGER_NAME", // "TRIGGER_SCHEMA", "TYPE", "UNCOMMITTED", "UNDER", "UNNAMED", "USAGE", // "USER_DEFINED_TYPE_CATALOG", // "USER_DEFINED_TYPE_CODE", // "USER_DEFINED_TYPE_NAME", // "USER_DEFINED_TYPE_SCHEMA", "WORK", "WRITE", "ZONE", KEYWORD_QUALIFY }; public static final String[] SQL2003_FUNCTIONS = { "ABS", "AVG", "CHAR_LENGTH", "CHARACTER_LENGTH", "COUNT", "LOCALTIME", "LOCALTIMESTAMP", // "CURRENT_DATE", // "CURRENT_DEFAULT_TRANSFORM_GROUP", // "CURRENT_PATH", // "CURRENT_ROLE", // "CURRENT_TIME", // "CURRENT_TIMESTAMP", // "CURRENT_TRANSFORM_GROUP_FOR_TYPE", // "CURRENT_USER", "FLOOR", "LOWER", "MAX", "MIN", "OCTET_LENGTH", "PERCENT_RANK", "PERCENTILE_CONT", "PERCENTILE_DISC", "POWER", "REGR_AVGX", "REGR_AVGY", "REGR_COUNT", "REGR_INTERCEPT", "REGR_R2", "REGR_SLOPE", "REGR_SXX", "REGR_SXY", "REGR_SYY", "SQRT", "SUBSTRING", "SUM", "TRIM", "UESCAPE", "UPPER", }; public static final String[] SQL_EX_KEYWORDS = { "CHANGE", "MODIFY", }; public static final String[] DEFAULT_TYPES = { DATA_TYPE_BOOLEAN, "CHAR", "VARCHAR", DATA_TYPE_BINARY, "VARBINARY", DATA_TYPE_INT, "INTEGER", DATA_TYPE_SMALLINT, DATA_TYPE_BIGINT, "NUMBER", "NUMERIC", "DECIMAL", DATA_TYPE_FLOAT, DATA_TYPE_DOUBLE, "DATE", "TIME", "TIMESTAMP", "CLOB", "BLOB", }; public static final String BLOCK_BEGIN = "BEGIN"; public static final String BLOCK_END = "END"; /** * Pseudo variables - these are not dynamic parameters */ public static final String[] PSEUDO_VARIABLES = { ":NEW", ":OLD", }; public static final char STRUCT_SEPARATOR = '.'; //$NON-NLS-1$ public static final String CONFIG_COLOR_KEYWORD = "org.jkiss.dbeaver.sql.editor.color.keyword.foreground"; public static final String CONFIG_COLOR_DATATYPE = "org.jkiss.dbeaver.sql.editor.color.datatype.foreground"; public static final String CONFIG_COLOR_FUNCTION = "org.jkiss.dbeaver.sql.editor.color.function.foreground"; public static final String CONFIG_COLOR_STRING = "org.jkiss.dbeaver.sql.editor.color.string.foreground"; public static final String CONFIG_COLOR_TABLE = "org.jkiss.dbeaver.sql.editor.color.table.foreground"; public static final String CONFIG_COLOR_TABLE_ALIAS = "org.jkiss.dbeaver.sql.editor.color.table.alias.foreground"; public static final String CONFIG_COLOR_COLUMN = "org.jkiss.dbeaver.sql.editor.color.column.foreground"; public static final String CONFIG_COLOR_COLUMN_DERIVED = "org.jkiss.dbeaver.sql.editor.color.column.derived.foreground"; public static final String CONFIG_COLOR_SCHEMA = "org.jkiss.dbeaver.sql.editor.color.schema.foreground"; public static final String CONFIG_COLOR_COMPOSITE_FIELD = "org.jkiss.dbeaver.sql.editor.color.composite.field.foreground"; public static final String CONFIG_COLOR_SQL_VARIABLE = "org.jkiss.dbeaver.sql.editor.color.sqlVariable.foreground"; public static final String CONFIG_COLOR_SEMANTIC_ERROR = "org.jkiss.dbeaver.sql.editor.color.semanticError.foreground"; public static final String CONFIG_COLOR_NUMBER = "org.jkiss.dbeaver.sql.editor.color.number.foreground"; public static final String CONFIG_COLOR_COMMENT = "org.jkiss.dbeaver.sql.editor.color.comment.foreground"; public static final String CONFIG_COLOR_DELIMITER = "org.jkiss.dbeaver.sql.editor.color.delimiter.foreground"; public static final String CONFIG_COLOR_PARAMETER = "org.jkiss.dbeaver.sql.editor.color.parameter.foreground"; public static final String CONFIG_COLOR_COMMAND = "org.jkiss.dbeaver.sql.editor.color.command.foreground"; public static final String CONFIG_COLOR_TEXT = "org.jkiss.dbeaver.sql.editor.color.text.foreground"; public static final String CONFIG_COLOR_BACKGROUND = "org.jkiss.dbeaver.sql.editor.color.text.background"; public static final String CONFIG_COLOR_DISABLED = "org.jkiss.dbeaver.sql.editor.color.disabled.background"; public static final char DEFAULT_PARAMETER_MARK = '?'; public static final char DEFAULT_PARAMETER_PREFIX = ':'; public static final String DEFAULT_IDENTIFIER_QUOTE = "\""; public static final String DEFAULT_LIKE_ESCAPE = "\\"; public static final String KEYWORD_PATTERN_CHARS = "\\*\\"; public static final String DEFAULT_CONTROL_COMMAND_PREFIX = "@"; public final static char[] BRACKETS = {'{', '}', '(', ')', '[', ']', '<', '>'}; public static final String COLUMN_ASTERISK = "*"; }
KEYWORD_SELECT
// Copyright 2015 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib. [MASK] ; import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertWithMessage; import com.google.common.base.Preconditions; import com.google.common.base.Suppliers; import com.google.common.collect.Collections2; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.ArtifactRoot; import com.google.devtools.build.lib.actions.FilesetOutputSymlink; import com.google.devtools.build.lib.actions.FilesetTraversalParams; import com.google.devtools.build.lib.actions.FilesetTraversalParamsFactory; import com.google.devtools.build.lib.actions.util.ActionsTestUtil; import com.google.devtools.build.lib.analysis.BlazeDirectories; import com.google.devtools.build.lib.analysis.ServerDirectories; import com.google.devtools.build.lib.clock.BlazeClock; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.events.NullEventHandler; import com.google.devtools.build.lib.pkgcache.PathPackageLocator; import com.google.devtools.build.lib. [MASK] .ExternalFilesHelper.ExternalFileAction; import com.google.devtools.build.lib. [MASK] .PackageLookupFunction.CrossRepositoryLabelViolationStrategy; import com.google.devtools.build.lib.testutil.FoundationTestCase; import com.google.devtools.build.lib.testutil.TestConstants; import com.google.devtools.build.lib.util.Fingerprint; import com.google.devtools.build.lib.util.io.TimestampGranularityMonitor; import com.google.devtools.build.lib.vfs.FileStateKey; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.build.lib.vfs.Root; import com.google.devtools.build.lib.vfs.RootedPath; import com.google.devtools.build.lib.vfs.SyscallCache; import com.google.devtools.build. [MASK] .EvaluationContext; import com.google.devtools.build. [MASK] .EvaluationResult; import com.google.devtools.build. [MASK] .InMemoryMemoizingEvaluator; import com.google.devtools.build. [MASK] .MemoizingEvaluator; import com.google.devtools.build. [MASK] .RecordingDifferencer; import com.google.devtools.build. [MASK] .SequencedRecordingDifferencer; import com.google.devtools.build. [MASK] .SkyFunction; import com.google.devtools.build. [MASK] .SkyFunctionException; import com.google.devtools.build. [MASK] .SkyFunctionName; import com.google.devtools.build. [MASK] .SkyKey; import com.google.devtools.build. [MASK] .SkyValue; import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.concurrent.atomic.AtomicReference; import javax.annotation.Nullable; import net.starlark.java.eval.StarlarkSemantics; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Tests for {@link FilesetEntryFunction}. */ @RunWith(JUnit4.class) public final class FilesetEntryFunctionTest extends FoundationTestCase { private MemoizingEvaluator evaluator; private RecordingDifferencer differencer; @Before public void setUp() throws Exception { AtomicReference<PathPackageLocator> pkgLocator = new AtomicReference<>( new PathPackageLocator( outputBase, ImmutableList.of(Root.fromPath(rootDirectory)), BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY)); BlazeDirectories directories = new BlazeDirectories( new ServerDirectories(outputBase, outputBase, outputBase), rootDirectory, /* defaultSystemJavabase= */ null, TestConstants.PRODUCT_NAME); ExternalFilesHelper externalFilesHelper = ExternalFilesHelper.createForTesting( pkgLocator, ExternalFileAction.DEPEND_ON_EXTERNAL_PKG_FOR_EXTERNAL_REPO_PATHS, directories); Map<SkyFunctionName, SkyFunction> skyFunctions = new HashMap<>(); skyFunctions.put( FileStateKey.FILE_STATE, new FileStateFunction( Suppliers.ofInstance(new TimestampGranularityMonitor(BlazeClock.instance())), SyscallCache.NO_CACHE, externalFilesHelper)); skyFunctions.put(SkyFunctions.FILE, new FileFunction(pkgLocator, directories)); skyFunctions.put(SkyFunctions.DIRECTORY_LISTING, new DirectoryListingFunction()); skyFunctions.put( SkyFunctions.DIRECTORY_LISTING_STATE, new DirectoryListingStateFunction(externalFilesHelper, SyscallCache.NO_CACHE)); skyFunctions.put( SkyFunctions.RECURSIVE_FILESYSTEM_TRAVERSAL, new RecursiveFilesystemTraversalFunction(SyscallCache.NO_CACHE)); skyFunctions.put( SkyFunctions.PACKAGE_LOOKUP, new PackageLookupFunction( new AtomicReference<>(ImmutableSet.of()), CrossRepositoryLabelViolationStrategy.ERROR, BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY, BazelSkyframeExecutorConstants.EXTERNAL_PACKAGE_HELPER)); skyFunctions.put(SkyFunctions.IGNORED_SUBDIRECTORIES, IgnoredSubdirectoriesFunction.NOOP); skyFunctions.put( SkyFunctions.FILESET_ENTRY, new FilesetEntryFunction((unused) -> rootDirectory)); skyFunctions.put(SkyFunctions.WORKSPACE_NAME, new TestWorkspaceNameFunction()); skyFunctions.put( SkyFunctions.LOCAL_REPOSITORY_LOOKUP, new LocalRepositoryLookupFunction(BazelSkyframeExecutorConstants.EXTERNAL_PACKAGE_HELPER)); differencer = new SequencedRecordingDifferencer(); evaluator = new InMemoryMemoizingEvaluator(skyFunctions, differencer); PrecomputedValue.BUILD_ID.set(differencer, UUID.randomUUID()); PrecomputedValue.PATH_PACKAGE_LOCATOR.set(differencer, pkgLocator.get()); PrecomputedValue.STARLARK_SEMANTICS.set(differencer, StarlarkSemantics.DEFAULT); } private Artifact getSourceArtifact(String path) { return ActionsTestUtil.createArtifact( ArtifactRoot.asSourceRoot(Root.fromPath(rootDirectory)), path); } private Artifact createSourceArtifact(String path) throws Exception { Artifact result = getSourceArtifact(path); createFile(result, "foo"); return result; } private static RootedPath childOf(Artifact artifact, String relative) { return RootedPath.toRootedPath( artifact.getRoot().getRoot(), artifact.getRootRelativePath().getRelative(relative)); } private static RootedPath siblingOf(Artifact artifact, String relative) { PathFragment parent = Preconditions.checkNotNull(artifact.getRootRelativePath().getParentDirectory()); return RootedPath.toRootedPath(artifact.getRoot().getRoot(), parent.getRelative(relative)); } private void createFile(Path path, String... contents) throws Exception { if (!path.getParentDirectory().exists()) { scratch.dir(path.getParentDirectory().getPathString()); } scratch.file(path.getPathString(), contents); } private void createFile(Artifact artifact, String... contents) throws Exception { createFile(artifact.getPath(), contents); } private RootedPath createFile(RootedPath path, String... contents) throws Exception { createFile(path.asPath(), contents); return path; } private <T extends SkyValue> EvaluationResult<T> eval(SkyKey key) throws Exception { EvaluationContext evaluationContext = EvaluationContext.newBuilder() .setKeepGoing(false) .setParallelism(SkyframeExecutor.DEFAULT_THREAD_COUNT) .setEventHandler(NullEventHandler.INSTANCE) .build(); return evaluator.evaluate(ImmutableList.of(key), evaluationContext); } private FilesetEntryValue evalFilesetTraversal(FilesetTraversalParams params) throws Exception { SkyKey key = FilesetEntryKey.key(params); EvaluationResult<FilesetEntryValue> result = eval(key); assertThat(result.hasError()).isFalse(); return result.get(key); } private FilesetOutputSymlink symlink(String from, Artifact to) { return symlink(PathFragment.create(from), to.getPath().asFragment()); } private FilesetOutputSymlink symlink(String from, RootedPath to) { return symlink(PathFragment.create(from), to.asPath().asFragment()); } private FilesetOutputSymlink symlink(PathFragment from, PathFragment to) { return FilesetOutputSymlink.createForTesting(from, to, rootDirectory.asFragment()); } private void assertSymlinksCreatedInOrder( FilesetTraversalParams request, FilesetOutputSymlink... expectedSymlinks) throws Exception { Collection<FilesetOutputSymlink> actual = Collections2.transform( evalFilesetTraversal(request).getSymlinks(), // Strip the metadata from the actual results. (input) -> FilesetOutputSymlink.createAlreadyRelativizedForTesting( input.name(), input.targetPath(), input.relativeToExecRoot())); assertThat(actual).containsExactlyElementsIn(expectedSymlinks).inOrder(); } private static Label label(String label) throws Exception { return Label.parseCanonical(label); } @Test public void testFileTraversalForFile() throws Exception { Artifact file = createSourceArtifact("foo/file.real"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ file, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params, symlink("output-name", file)); } @Test public void testFileTraversalForDirectory() throws Exception { Artifact dir = getSourceArtifact("foo/dir_real"); RootedPath fileA = createFile(childOf(dir, "file.a"), "hello"); RootedPath fileB = createFile(childOf(dir, "sub/file.b"), "world"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ dir, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ true); assertSymlinksCreatedInOrder( params, symlink("output-name/file.a", fileA), symlink("output-name/sub/file.b", fileB)); } @Test public void testFileTraversalForDisallowedDirectoryThrows() throws Exception { Artifact dir = getSourceArtifact("foo/dir_real"); createFile(childOf(dir, "file.a"), "hello"); createFile(childOf(dir, "sub/file.b"), "world"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ dir, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); SkyKey key = FilesetEntryKey.key(params); EvaluationResult<FilesetEntryValue> result = eval(key); assertThat(result.hasError()).isTrue(); assertThat(result.getError(key).getException()) .hasMessageThat() .contains("foo contains a directory"); } @Test public void testFileTraversalForDanglingSymlink() throws Exception { Artifact linkName = getSourceArtifact("foo/dangling.sym"); RootedPath linkTarget = createFile(siblingOf(linkName, "target.file"), "blah"); linkName.getPath().createSymbolicLink(PathFragment.create("target.file")); linkTarget.asPath().delete(); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ linkName, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params); // expect empty results } @Test public void testFileTraversalForNonExistentFile() throws Exception { Artifact path = getSourceArtifact("foo/non-existent"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ path, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params); // expect empty results } /** * Tests that the fingerprint is a function of all arguments of the factory method. * * <p>Implementations must provide: * <ul> * <li>two different values (a domain) for each argument of the factory method and whether or not * it is expected to influence the fingerprint * <li>a way to instantiate {@link FilesetTraversalParams} with a given set of arguments from the * specified domains * </ul> * * <p>The tests will instantiate pairs of {@link FilesetTraversalParams} objects with only a given * attribute differing, and observe whether the fingerprints differ (if they are expected to) or * are the same (otherwise). */ private abstract static class FingerprintTester { private final Map<String, Domain> domains; FingerprintTester(Map<String, Domain> domains) { this.domains = domains; } abstract FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception; private Map<String, ?> getDefaultArgs() { return getKwArgs(null); } private Map<String, ?> getKwArgs(@Nullable String useAlternateFor) { Map<String, Object> values = new HashMap<>(); for (Map.Entry<String, Domain> d : domains.entrySet()) { values.put( d.getKey(), d.getKey().equals(useAlternateFor) ? d.getValue().valueA : d.getValue().valueB); } return values; } public void doTest() throws Exception { Fingerprint fp = new Fingerprint(); create(getDefaultArgs()).fingerprint(fp); String primary = fp.hexDigestAndReset(); for (String argName : domains.keySet()) { create(getKwArgs(argName)).fingerprint(fp); String secondary = fp.hexDigestAndReset(); if (domains.get(argName).includedInFingerprint) { assertWithMessage( "Argument '" + argName + "' was expected to be included in the" + " fingerprint, but wasn't") .that(primary) .isNotEqualTo(secondary); } else { assertWithMessage( "Argument '" + argName + "' was expected not to be included in the" + " fingerprint, but was") .that(primary) .isEqualTo(secondary); } } } } private static final class Domain { boolean includedInFingerprint; Object valueA; Object valueB; Domain(boolean includedInFingerprint, Object valueA, Object valueB) { this.includedInFingerprint = includedInFingerprint; this.valueA = valueA; this.valueB = valueB; } } private static Domain partOfFingerprint(Object valueA, Object valueB) { return new Domain(true, valueA, valueB); } private static Domain notPartOfFingerprint(Object valueB) { return new Domain(false, "//foo", valueB); } @Test public void testFingerprintOfFileTraversal() throws Exception { new FingerprintTester( ImmutableMap.<String, Domain>builder() .put("ownerLabel", notPartOfFingerprint("//bar")) .put("fileToTraverse", partOfFingerprint("foo/file.a", "bar/file.b")) .put("destPath", partOfFingerprint("out1", "out2")) .put("strictFilesetOutput", partOfFingerprint(true, false)) .put("permitDirectories", partOfFingerprint(true, false)) .buildOrThrow()) { @Override FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception { return FilesetTraversalParamsFactory.fileTraversal( label((String) kwArgs.get("ownerLabel")), getSourceArtifact((String) kwArgs.get("fileToTraverse")), PathFragment.create((String) kwArgs.get("destPath")), (Boolean) kwArgs.get("strictFilesetOutput"), (Boolean) kwArgs.get("permitDirectories")); } }.doTest(); } @Test public void testFingerprintOfNestedTraversal() throws Exception { Artifact nested1 = getSourceArtifact("a/b"); Artifact nested2 = getSourceArtifact("a/c"); new FingerprintTester( ImmutableMap.of( "ownerLabel", notPartOfFingerprint("//bar"), "nestedArtifact", partOfFingerprint(nested1, nested2), "destDir", partOfFingerprint("out1", "out2"), "excludes", partOfFingerprint(ImmutableSet.<String>of(), ImmutableSet.of("x")))) { @SuppressWarnings("unchecked") @Override FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception { return FilesetTraversalParamsFactory.nestedTraversal( label((String) kwArgs.get("ownerLabel")), (Artifact) kwArgs.get("nestedArtifact"), PathFragment.create((String) kwArgs.get("destDir")), (Set<String>) kwArgs.get("excludes")); } }.doTest(); } private static class TestWorkspaceNameFunction implements SkyFunction { @Nullable @Override public SkyValue compute(SkyKey skyKey, Environment env) throws SkyFunctionException, InterruptedException { return WorkspaceNameValue.withName("workspace"); } } }
skyframe
// Copyright 2015 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.skyframe; import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertWithMessage; import com.google.common.base.Preconditions; import com.google.common.base.Suppliers; import com.google.common.collect.Collections2; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.ArtifactRoot; import com.google.devtools.build.lib.actions.FilesetOutputSymlink; import com.google.devtools.build.lib.actions.FilesetTraversalParams; import com.google.devtools.build.lib.actions.FilesetTraversalParamsFactory; import com.google.devtools.build.lib.actions.util.ActionsTestUtil; import com.google.devtools.build.lib.analysis.BlazeDirectories; import com.google.devtools.build.lib.analysis.ServerDirectories; import com.google.devtools.build.lib.clock.BlazeClock; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.events.NullEventHandler; import com.google.devtools.build.lib.pkgcache.PathPackageLocator; import com.google.devtools.build.lib.skyframe.ExternalFilesHelper.ExternalFileAction; import com.google.devtools.build.lib.skyframe.PackageLookupFunction.CrossRepositoryLabelViolationStrategy; import com.google.devtools.build.lib.testutil.FoundationTestCase; import com.google.devtools.build.lib.testutil.TestConstants; import com.google.devtools.build.lib.util.Fingerprint; import com.google.devtools.build.lib.util.io.TimestampGranularityMonitor; import com.google.devtools.build.lib.vfs.FileStateKey; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.build.lib.vfs.Root; import com.google.devtools.build.lib.vfs.RootedPath; import com.google.devtools.build.lib.vfs.SyscallCache; import com.google.devtools.build.skyframe.EvaluationContext; import com.google.devtools.build.skyframe.EvaluationResult; import com.google.devtools.build.skyframe.InMemoryMemoizingEvaluator; import com.google.devtools.build.skyframe.MemoizingEvaluator; import com.google.devtools.build.skyframe.RecordingDifferencer; import com.google.devtools.build.skyframe.SequencedRecordingDifferencer; import com.google.devtools.build.skyframe.SkyFunction; import com.google.devtools.build.skyframe.SkyFunctionException; import com.google.devtools.build.skyframe.SkyFunctionName; import com.google.devtools.build.skyframe.SkyKey; import com.google.devtools.build.skyframe.SkyValue; import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.concurrent.atomic.AtomicReference; import javax.annotation.Nullable; import net.starlark.java.eval.StarlarkSemantics; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Tests for {@link FilesetEntryFunction}. */ @RunWith(JUnit4.class) public final class FilesetEntryFunctionTest extends FoundationTestCase { private MemoizingEvaluator evaluator; private RecordingDifferencer differencer; @Before public void setUp() throws Exception { AtomicReference<PathPackageLocator> pkgLocator = new AtomicReference<>( new PathPackageLocator( outputBase, ImmutableList.of(Root.fromPath(rootDirectory)), BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY)); BlazeDirectories directories = new BlazeDirectories( new ServerDirectories(outputBase, outputBase, outputBase), rootDirectory, /* defaultSystemJavabase= */ null, TestConstants.PRODUCT_NAME); ExternalFilesHelper externalFilesHelper = ExternalFilesHelper.createForTesting( pkgLocator, ExternalFileAction.DEPEND_ON_EXTERNAL_PKG_FOR_EXTERNAL_REPO_PATHS, directories); Map<SkyFunctionName, SkyFunction> skyFunctions = new HashMap<>(); skyFunctions.put( FileStateKey.FILE_STATE, new FileStateFunction( Suppliers.ofInstance(new TimestampGranularityMonitor(BlazeClock.instance())), SyscallCache.NO_CACHE, externalFilesHelper)); skyFunctions.put(SkyFunctions.FILE, new FileFunction(pkgLocator, directories)); skyFunctions.put(SkyFunctions.DIRECTORY_LISTING, new DirectoryListingFunction()); skyFunctions.put( SkyFunctions.DIRECTORY_LISTING_STATE, new DirectoryListingStateFunction(externalFilesHelper, SyscallCache.NO_CACHE)); skyFunctions.put( SkyFunctions.RECURSIVE_FILESYSTEM_TRAVERSAL, new RecursiveFilesystemTraversalFunction(SyscallCache.NO_CACHE)); skyFunctions.put( SkyFunctions.PACKAGE_LOOKUP, new PackageLookupFunction( new AtomicReference<>(ImmutableSet.of()), CrossRepositoryLabelViolationStrategy.ERROR, BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY, BazelSkyframeExecutorConstants.EXTERNAL_PACKAGE_HELPER)); skyFunctions.put(SkyFunctions.IGNORED_SUBDIRECTORIES, IgnoredSubdirectoriesFunction.NOOP); skyFunctions.put( SkyFunctions.FILESET_ENTRY, new FilesetEntryFunction((unused) -> rootDirectory)); skyFunctions.put(SkyFunctions.WORKSPACE_NAME, new TestWorkspaceNameFunction()); skyFunctions.put( SkyFunctions.LOCAL_REPOSITORY_LOOKUP, new LocalRepositoryLookupFunction(BazelSkyframeExecutorConstants.EXTERNAL_PACKAGE_HELPER)); differencer = new SequencedRecordingDifferencer(); evaluator = new InMemoryMemoizingEvaluator(skyFunctions, differencer); PrecomputedValue.BUILD_ID.set(differencer, UUID.randomUUID()); PrecomputedValue.PATH_PACKAGE_LOCATOR.set(differencer, pkgLocator.get()); PrecomputedValue.STARLARK_SEMANTICS.set(differencer, StarlarkSemantics.DEFAULT); } private Artifact getSourceArtifact(String path) { return ActionsTestUtil.createArtifact( ArtifactRoot.asSourceRoot(Root.fromPath(rootDirectory)), path); } private Artifact createSourceArtifact(String path) throws Exception { Artifact result = getSourceArtifact(path); createFile(result, "foo"); return result; } private static RootedPath childOf(Artifact artifact, String relative) { return RootedPath.toRootedPath( artifact.getRoot().getRoot(), artifact.getRootRelativePath().getRelative(relative)); } private static RootedPath siblingOf(Artifact artifact, String relative) { PathFragment parent = Preconditions.checkNotNull(artifact.getRootRelativePath().getParentDirectory()); return RootedPath.toRootedPath(artifact.getRoot().getRoot(), parent.getRelative(relative)); } private void createFile(Path path, String... contents) throws Exception { if (!path.getParentDirectory().exists()) { scratch.dir(path.getParentDirectory().getPathString()); } scratch.file(path.getPathString(), contents); } private void createFile(Artifact artifact, String... contents) throws Exception { createFile(artifact.getPath(), contents); } private RootedPath createFile(RootedPath path, String... contents) throws Exception { createFile(path.asPath(), contents); return path; } private <T extends SkyValue> EvaluationResult<T> eval(SkyKey key) throws Exception { EvaluationContext evaluationContext = EvaluationContext.newBuilder() .setKeepGoing(false) .setParallelism(SkyframeExecutor.DEFAULT_THREAD_COUNT) .setEventHandler(NullEventHandler.INSTANCE) .build(); return evaluator.evaluate(ImmutableList.of(key), evaluationContext); } private FilesetEntryValue evalFilesetTraversal(FilesetTraversalParams params) throws Exception { SkyKey key = FilesetEntryKey.key(params); EvaluationResult<FilesetEntryValue> result = eval(key); assertThat(result.hasError()).isFalse(); return result.get(key); } private FilesetOutputSymlink symlink(String from, Artifact to) { return symlink(PathFragment.create(from), to.getPath().asFragment()); } private FilesetOutputSymlink symlink(String from, RootedPath to) { return symlink(PathFragment.create(from), to.asPath().asFragment()); } private FilesetOutputSymlink symlink(PathFragment from, PathFragment to) { return FilesetOutputSymlink.createForTesting(from, to, rootDirectory.asFragment()); } private void assertSymlinksCreatedInOrder( FilesetTraversalParams request, FilesetOutputSymlink... expectedSymlinks) throws Exception { Collection<FilesetOutputSymlink> actual = Collections2.transform( evalFilesetTraversal(request).getSymlinks(), // Strip the metadata from the actual results. (input) -> FilesetOutputSymlink.createAlreadyRelativizedForTesting( input.name(), input.targetPath(), input.relativeToExecRoot())); assertThat(actual).containsExactlyElementsIn(expectedSymlinks).inOrder(); } private static Label label(String label) throws Exception { return Label.parseCanonical(label); } @Test public void testFileTraversalForFile() throws Exception { Artifact file = createSourceArtifact("foo/file.real"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ file, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params, symlink("output-name", file)); } @Test public void testFileTraversalForDirectory() throws Exception { Artifact dir = getSourceArtifact("foo/dir_real"); RootedPath fileA = createFile(childOf(dir, "file.a"), "hello"); RootedPath fileB = createFile(childOf(dir, "sub/file.b"), "world"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ dir, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ true); assertSymlinksCreatedInOrder( params, symlink("output-name/file.a", fileA), symlink("output-name/sub/file.b", fileB)); } @Test public void testFileTraversalForDisallowedDirectoryThrows() throws Exception { Artifact dir = getSourceArtifact("foo/dir_real"); createFile(childOf(dir, "file.a"), "hello"); createFile(childOf(dir, "sub/file.b"), "world"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ dir, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); SkyKey key = FilesetEntryKey.key(params); EvaluationResult<FilesetEntryValue> result = eval(key); assertThat(result.hasError()).isTrue(); assertThat(result.getError(key).getException()) .hasMessageThat() .contains("foo contains a directory"); } @Test public void testFileTraversalForDanglingSymlink() throws Exception { Artifact linkName = getSourceArtifact("foo/dangling.sym"); RootedPath linkTarget = createFile(siblingOf(linkName, "target.file"), "blah"); linkName.getPath().createSymbolicLink(PathFragment.create("target.file")); linkTarget.asPath().delete(); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ linkName, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params); // expect empty results } @Test public void testFileTraversalForNonExistentFile() throws Exception { Artifact path = getSourceArtifact("foo/non-existent"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ path, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params); // expect empty results } /** * Tests that the fingerprint is a function of all arguments of the factory method. * * <p>Implementations must provide: * <ul> * <li>two different values (a domain) for each argument of the factory method and whether or not * it is expected to influence the fingerprint * <li>a way to instantiate {@link FilesetTraversalParams} with a given set of arguments from the * specified domains * </ul> * * <p>The tests will instantiate pairs of {@link FilesetTraversalParams} objects with only a given * attribute differing, and observe whether the fingerprints differ (if they are expected to) or * are the same (otherwise). */ private abstract static class FingerprintTester { private final Map<String, Domain> domains; FingerprintTester(Map<String, Domain> domains) { this.domains = domains; } abstract FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception; private Map<String, ?> getDefaultArgs() { return getKwArgs(null); } private Map<String, ?> getKwArgs(@Nullable String useAlternateFor) { Map<String, Object> values = new HashMap<>(); for (Map.Entry<String, Domain> d : domains.entrySet()) { values.put( d.getKey(), d.getKey().equals(useAlternateFor) ? d.getValue().valueA : d.getValue().valueB); } return values; } public void doTest() throws Exception { Fingerprint fp = new Fingerprint(); create(getDefaultArgs()).fingerprint(fp); String primary = fp.hexDigestAndReset(); for (String argName : domains.keySet()) { create(getKwArgs(argName)).fingerprint(fp); String secondary = fp.hexDigestAndReset(); if (domains.get(argName).includedInFingerprint) { assertWithMessage( "Argument '" + argName + "' was expected to be included in the" + " fingerprint, but wasn't") .that(primary) .isNotEqualTo(secondary); } else { assertWithMessage( "Argument '" + argName + "' was expected not to be included in the" + " fingerprint, but was") .that(primary) .isEqualTo(secondary); } } } } private static final class Domain { boolean includedInFingerprint; Object valueA; Object valueB; Domain(boolean includedInFingerprint, Object valueA, Object valueB) { this.includedInFingerprint = includedInFingerprint; this.valueA = valueA; this.valueB = valueB; } } private static Domain partOfFingerprint(Object valueA, Object valueB) { return new Domain(true, valueA, valueB); } private static Domain notPartOfFingerprint(Object valueB) { return new Domain(false, "//foo", valueB); } @Test public void testFingerprintOfFileTraversal() throws Exception { new FingerprintTester( ImmutableMap.<String, Domain>builder() .put("ownerLabel", notPartOfFingerprint("//bar")) .put("fileToTraverse", partOfFingerprint("foo/file.a", "bar/file.b")) .put("destPath", partOfFingerprint("out1", "out2")) .put("strictFilesetOutput", partOfFingerprint(true, false)) .put("permitDirectories", partOfFingerprint(true, false)) .buildOrThrow()) { @ [MASK] FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception { return FilesetTraversalParamsFactory.fileTraversal( label((String) kwArgs.get("ownerLabel")), getSourceArtifact((String) kwArgs.get("fileToTraverse")), PathFragment.create((String) kwArgs.get("destPath")), (Boolean) kwArgs.get("strictFilesetOutput"), (Boolean) kwArgs.get("permitDirectories")); } }.doTest(); } @Test public void testFingerprintOfNestedTraversal() throws Exception { Artifact nested1 = getSourceArtifact("a/b"); Artifact nested2 = getSourceArtifact("a/c"); new FingerprintTester( ImmutableMap.of( "ownerLabel", notPartOfFingerprint("//bar"), "nestedArtifact", partOfFingerprint(nested1, nested2), "destDir", partOfFingerprint("out1", "out2"), "excludes", partOfFingerprint(ImmutableSet.<String>of(), ImmutableSet.of("x")))) { @SuppressWarnings("unchecked") @ [MASK] FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception { return FilesetTraversalParamsFactory.nestedTraversal( label((String) kwArgs.get("ownerLabel")), (Artifact) kwArgs.get("nestedArtifact"), PathFragment.create((String) kwArgs.get("destDir")), (Set<String>) kwArgs.get("excludes")); } }.doTest(); } private static class TestWorkspaceNameFunction implements SkyFunction { @Nullable @ [MASK] public SkyValue compute(SkyKey skyKey, Environment env) throws SkyFunctionException, InterruptedException { return WorkspaceNameValue.withName("workspace"); } } }
Override
/* * Copyright 2002-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.beans. [MASK] .xml; import org.jspecify.annotations.Nullable; import org.w3c.dom.Attr; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.springframework.beans.MutablePropertyValues; import org.springframework.beans. [MASK] .config.BeanDefinition; import org.springframework.beans. [MASK] .config.BeanDefinitionHolder; import org.springframework.beans. [MASK] .config.RuntimeBeanReference; import org.springframework.core.Conventions; /** * Simple {@code NamespaceHandler} implementation that maps custom attributes * directly through to bean properties. An important point to note is that this * {@code NamespaceHandler} does not have a corresponding schema since there * is no way to know in advance all possible attribute names. * * <p>An example of the usage of this {@code NamespaceHandler} is shown below: * * <pre class="code"> * &lt;bean id=&quot;rob&quot; class=&quot;..TestBean&quot; p:name=&quot;Rob Harrop&quot; p:spouse-ref=&quot;sally&quot;/&gt;</pre> * * Here the '{@code p:name}' corresponds directly to the '{@code name}' * property on class '{@code TestBean}'. The '{@code p:spouse-ref}' * attributes corresponds to the '{@code spouse}' property and, rather * than being the concrete value, it contains the name of the bean that will * be injected into that property. * * @author Rob Harrop * @author Juergen Hoeller * @since 2.0 */ public class SimplePropertyNamespaceHandler implements NamespaceHandler { private static final String REF_SUFFIX = "-ref"; @Override public void init() { } @Override public @Nullable BeanDefinition parse(Element element, ParserContext parserContext) { parserContext.getReaderContext().error( "Class [" + getClass().getName() + "] does not support custom elements.", element); return null; } @Override public BeanDefinitionHolder decorate(Node node, BeanDefinitionHolder definition, ParserContext parserContext) { if (node instanceof Attr attr) { String propertyName = parserContext.getDelegate().getLocalName(attr); String propertyValue = attr.getValue(); MutablePropertyValues pvs = definition.getBeanDefinition().getPropertyValues(); if (pvs.contains(propertyName)) { parserContext.getReaderContext().error("Property '" + propertyName + "' is already defined using " + "both <property> and inline syntax. Only one approach may be used per property.", attr); } if (propertyName.endsWith(REF_SUFFIX)) { propertyName = propertyName.substring(0, propertyName.length() - REF_SUFFIX.length()); pvs.add(Conventions.attributeNameToPropertyName(propertyName), new RuntimeBeanReference(propertyValue)); } else { pvs.add(Conventions.attributeNameToPropertyName(propertyName), propertyValue); } } return definition; } }
factory
/* * Copyright 2002-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.beans.factory.xml; import org.jspecify.annotations.Nullable; import org.w3c.dom.Attr; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.springframework.beans.MutablePropertyValues; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.beans.factory.config.BeanDefinitionHolder; import org.springframework.beans.factory.config.RuntimeBeanReference; import org.springframework.core.Conventions; /** * Simple {@code NamespaceHandler} implementation that maps custom attributes * directly through to bean properties. An important point to note is that this * {@code NamespaceHandler} does not have a corresponding schema since there * is no way to know in advance all possible attribute names. * * <p>An example of the usage of this {@code NamespaceHandler} is shown below: * * <pre class="code"> * &lt;bean id=&quot;rob&quot; class=&quot;..TestBean&quot; p:name=&quot;Rob Harrop&quot; p:spouse-ref=&quot;sally&quot;/&gt;</pre> * * Here the '{@code p:name}' corresponds directly to the '{@code name}' * property on class '{@code TestBean}'. The '{@code p:spouse-ref}' * attributes corresponds to the '{@code spouse}' property and, rather * than being the concrete value, it contains the name of the bean that will * be injected into that property. * * @author Rob Harrop * @author Juergen Hoeller * @since 2.0 */ public class SimplePropertyNamespaceHandler implements NamespaceHandler { private static final String [MASK] = "-ref"; @Override public void init() { } @Override public @Nullable BeanDefinition parse(Element element, ParserContext parserContext) { parserContext.getReaderContext().error( "Class [" + getClass().getName() + "] does not support custom elements.", element); return null; } @Override public BeanDefinitionHolder decorate(Node node, BeanDefinitionHolder definition, ParserContext parserContext) { if (node instanceof Attr attr) { String propertyName = parserContext.getDelegate().getLocalName(attr); String propertyValue = attr.getValue(); MutablePropertyValues pvs = definition.getBeanDefinition().getPropertyValues(); if (pvs.contains(propertyName)) { parserContext.getReaderContext().error("Property '" + propertyName + "' is already defined using " + "both <property> and inline syntax. Only one approach may be used per property.", attr); } if (propertyName.endsWith( [MASK] )) { propertyName = propertyName.substring(0, propertyName.length() - [MASK] .length()); pvs.add(Conventions.attributeNameToPropertyName(propertyName), new RuntimeBeanReference(propertyValue)); } else { pvs.add(Conventions.attributeNameToPropertyName(propertyName), propertyValue); } } return definition; } }
REF_SUFFIX
// Copyright 2015 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.skyframe; import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertWithMessage; import com.google.common.base.Preconditions; import com.google.common.base.Suppliers; import com.google.common.collect.Collections2; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.ArtifactRoot; import com.google.devtools.build.lib.actions.FilesetOutputSymlink; import com.google.devtools.build.lib.actions.FilesetTraversalParams; import com.google.devtools.build.lib.actions.FilesetTraversalParamsFactory; import com.google.devtools.build.lib.actions.util.ActionsTestUtil; import com.google.devtools.build.lib.analysis.BlazeDirectories; import com.google.devtools.build.lib.analysis.ServerDirectories; import com.google.devtools.build.lib.clock.BlazeClock; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.events.NullEventHandler; import com.google.devtools.build.lib.pkgcache.PathPackageLocator; import com.google.devtools.build.lib.skyframe.ExternalFilesHelper.ExternalFileAction; import com.google.devtools.build.lib.skyframe.PackageLookupFunction.CrossRepositoryLabelViolationStrategy; import com.google.devtools.build.lib.testutil.FoundationTestCase; import com.google.devtools.build.lib.testutil.TestConstants; import com.google.devtools.build.lib.util.Fingerprint; import com.google.devtools.build.lib.util.io.TimestampGranularityMonitor; import com.google.devtools.build.lib.vfs.FileStateKey; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.build.lib.vfs.Root; import com.google.devtools.build.lib.vfs.RootedPath; import com.google.devtools.build.lib.vfs.SyscallCache; import com.google.devtools.build.skyframe.EvaluationContext; import com.google.devtools.build.skyframe.EvaluationResult; import com.google.devtools.build.skyframe.InMemoryMemoizingEvaluator; import com.google.devtools.build.skyframe.MemoizingEvaluator; import com.google.devtools.build.skyframe.RecordingDifferencer; import com.google.devtools.build.skyframe.SequencedRecordingDifferencer; import com.google.devtools.build.skyframe.SkyFunction; import com.google.devtools.build.skyframe.SkyFunctionException; import com.google.devtools.build.skyframe.SkyFunctionName; import com.google.devtools.build.skyframe.SkyKey; import com.google.devtools.build.skyframe.SkyValue; import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.concurrent.atomic.AtomicReference; import javax.annotation.Nullable; import net.starlark.java.eval.StarlarkSemantics; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Tests for {@link FilesetEntryFunction}. */ @RunWith(JUnit4.class) public final class FilesetEntryFunctionTest extends FoundationTestCase { private MemoizingEvaluator [MASK] ; private RecordingDifferencer differencer; @Before public void setUp() throws Exception { AtomicReference<PathPackageLocator> pkgLocator = new AtomicReference<>( new PathPackageLocator( outputBase, ImmutableList.of(Root.fromPath(rootDirectory)), BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY)); BlazeDirectories directories = new BlazeDirectories( new ServerDirectories(outputBase, outputBase, outputBase), rootDirectory, /* defaultSystemJavabase= */ null, TestConstants.PRODUCT_NAME); ExternalFilesHelper externalFilesHelper = ExternalFilesHelper.createForTesting( pkgLocator, ExternalFileAction.DEPEND_ON_EXTERNAL_PKG_FOR_EXTERNAL_REPO_PATHS, directories); Map<SkyFunctionName, SkyFunction> skyFunctions = new HashMap<>(); skyFunctions.put( FileStateKey.FILE_STATE, new FileStateFunction( Suppliers.ofInstance(new TimestampGranularityMonitor(BlazeClock.instance())), SyscallCache.NO_CACHE, externalFilesHelper)); skyFunctions.put(SkyFunctions.FILE, new FileFunction(pkgLocator, directories)); skyFunctions.put(SkyFunctions.DIRECTORY_LISTING, new DirectoryListingFunction()); skyFunctions.put( SkyFunctions.DIRECTORY_LISTING_STATE, new DirectoryListingStateFunction(externalFilesHelper, SyscallCache.NO_CACHE)); skyFunctions.put( SkyFunctions.RECURSIVE_FILESYSTEM_TRAVERSAL, new RecursiveFilesystemTraversalFunction(SyscallCache.NO_CACHE)); skyFunctions.put( SkyFunctions.PACKAGE_LOOKUP, new PackageLookupFunction( new AtomicReference<>(ImmutableSet.of()), CrossRepositoryLabelViolationStrategy.ERROR, BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY, BazelSkyframeExecutorConstants.EXTERNAL_PACKAGE_HELPER)); skyFunctions.put(SkyFunctions.IGNORED_SUBDIRECTORIES, IgnoredSubdirectoriesFunction.NOOP); skyFunctions.put( SkyFunctions.FILESET_ENTRY, new FilesetEntryFunction((unused) -> rootDirectory)); skyFunctions.put(SkyFunctions.WORKSPACE_NAME, new TestWorkspaceNameFunction()); skyFunctions.put( SkyFunctions.LOCAL_REPOSITORY_LOOKUP, new LocalRepositoryLookupFunction(BazelSkyframeExecutorConstants.EXTERNAL_PACKAGE_HELPER)); differencer = new SequencedRecordingDifferencer(); [MASK] = new InMemoryMemoizingEvaluator(skyFunctions, differencer); PrecomputedValue.BUILD_ID.set(differencer, UUID.randomUUID()); PrecomputedValue.PATH_PACKAGE_LOCATOR.set(differencer, pkgLocator.get()); PrecomputedValue.STARLARK_SEMANTICS.set(differencer, StarlarkSemantics.DEFAULT); } private Artifact getSourceArtifact(String path) { return ActionsTestUtil.createArtifact( ArtifactRoot.asSourceRoot(Root.fromPath(rootDirectory)), path); } private Artifact createSourceArtifact(String path) throws Exception { Artifact result = getSourceArtifact(path); createFile(result, "foo"); return result; } private static RootedPath childOf(Artifact artifact, String relative) { return RootedPath.toRootedPath( artifact.getRoot().getRoot(), artifact.getRootRelativePath().getRelative(relative)); } private static RootedPath siblingOf(Artifact artifact, String relative) { PathFragment parent = Preconditions.checkNotNull(artifact.getRootRelativePath().getParentDirectory()); return RootedPath.toRootedPath(artifact.getRoot().getRoot(), parent.getRelative(relative)); } private void createFile(Path path, String... contents) throws Exception { if (!path.getParentDirectory().exists()) { scratch.dir(path.getParentDirectory().getPathString()); } scratch.file(path.getPathString(), contents); } private void createFile(Artifact artifact, String... contents) throws Exception { createFile(artifact.getPath(), contents); } private RootedPath createFile(RootedPath path, String... contents) throws Exception { createFile(path.asPath(), contents); return path; } private <T extends SkyValue> EvaluationResult<T> eval(SkyKey key) throws Exception { EvaluationContext evaluationContext = EvaluationContext.newBuilder() .setKeepGoing(false) .setParallelism(SkyframeExecutor.DEFAULT_THREAD_COUNT) .setEventHandler(NullEventHandler.INSTANCE) .build(); return [MASK] .evaluate(ImmutableList.of(key), evaluationContext); } private FilesetEntryValue evalFilesetTraversal(FilesetTraversalParams params) throws Exception { SkyKey key = FilesetEntryKey.key(params); EvaluationResult<FilesetEntryValue> result = eval(key); assertThat(result.hasError()).isFalse(); return result.get(key); } private FilesetOutputSymlink symlink(String from, Artifact to) { return symlink(PathFragment.create(from), to.getPath().asFragment()); } private FilesetOutputSymlink symlink(String from, RootedPath to) { return symlink(PathFragment.create(from), to.asPath().asFragment()); } private FilesetOutputSymlink symlink(PathFragment from, PathFragment to) { return FilesetOutputSymlink.createForTesting(from, to, rootDirectory.asFragment()); } private void assertSymlinksCreatedInOrder( FilesetTraversalParams request, FilesetOutputSymlink... expectedSymlinks) throws Exception { Collection<FilesetOutputSymlink> actual = Collections2.transform( evalFilesetTraversal(request).getSymlinks(), // Strip the metadata from the actual results. (input) -> FilesetOutputSymlink.createAlreadyRelativizedForTesting( input.name(), input.targetPath(), input.relativeToExecRoot())); assertThat(actual).containsExactlyElementsIn(expectedSymlinks).inOrder(); } private static Label label(String label) throws Exception { return Label.parseCanonical(label); } @Test public void testFileTraversalForFile() throws Exception { Artifact file = createSourceArtifact("foo/file.real"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ file, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params, symlink("output-name", file)); } @Test public void testFileTraversalForDirectory() throws Exception { Artifact dir = getSourceArtifact("foo/dir_real"); RootedPath fileA = createFile(childOf(dir, "file.a"), "hello"); RootedPath fileB = createFile(childOf(dir, "sub/file.b"), "world"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ dir, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ true); assertSymlinksCreatedInOrder( params, symlink("output-name/file.a", fileA), symlink("output-name/sub/file.b", fileB)); } @Test public void testFileTraversalForDisallowedDirectoryThrows() throws Exception { Artifact dir = getSourceArtifact("foo/dir_real"); createFile(childOf(dir, "file.a"), "hello"); createFile(childOf(dir, "sub/file.b"), "world"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ dir, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); SkyKey key = FilesetEntryKey.key(params); EvaluationResult<FilesetEntryValue> result = eval(key); assertThat(result.hasError()).isTrue(); assertThat(result.getError(key).getException()) .hasMessageThat() .contains("foo contains a directory"); } @Test public void testFileTraversalForDanglingSymlink() throws Exception { Artifact linkName = getSourceArtifact("foo/dangling.sym"); RootedPath linkTarget = createFile(siblingOf(linkName, "target.file"), "blah"); linkName.getPath().createSymbolicLink(PathFragment.create("target.file")); linkTarget.asPath().delete(); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ linkName, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params); // expect empty results } @Test public void testFileTraversalForNonExistentFile() throws Exception { Artifact path = getSourceArtifact("foo/non-existent"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ path, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params); // expect empty results } /** * Tests that the fingerprint is a function of all arguments of the factory method. * * <p>Implementations must provide: * <ul> * <li>two different values (a domain) for each argument of the factory method and whether or not * it is expected to influence the fingerprint * <li>a way to instantiate {@link FilesetTraversalParams} with a given set of arguments from the * specified domains * </ul> * * <p>The tests will instantiate pairs of {@link FilesetTraversalParams} objects with only a given * attribute differing, and observe whether the fingerprints differ (if they are expected to) or * are the same (otherwise). */ private abstract static class FingerprintTester { private final Map<String, Domain> domains; FingerprintTester(Map<String, Domain> domains) { this.domains = domains; } abstract FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception; private Map<String, ?> getDefaultArgs() { return getKwArgs(null); } private Map<String, ?> getKwArgs(@Nullable String useAlternateFor) { Map<String, Object> values = new HashMap<>(); for (Map.Entry<String, Domain> d : domains.entrySet()) { values.put( d.getKey(), d.getKey().equals(useAlternateFor) ? d.getValue().valueA : d.getValue().valueB); } return values; } public void doTest() throws Exception { Fingerprint fp = new Fingerprint(); create(getDefaultArgs()).fingerprint(fp); String primary = fp.hexDigestAndReset(); for (String argName : domains.keySet()) { create(getKwArgs(argName)).fingerprint(fp); String secondary = fp.hexDigestAndReset(); if (domains.get(argName).includedInFingerprint) { assertWithMessage( "Argument '" + argName + "' was expected to be included in the" + " fingerprint, but wasn't") .that(primary) .isNotEqualTo(secondary); } else { assertWithMessage( "Argument '" + argName + "' was expected not to be included in the" + " fingerprint, but was") .that(primary) .isEqualTo(secondary); } } } } private static final class Domain { boolean includedInFingerprint; Object valueA; Object valueB; Domain(boolean includedInFingerprint, Object valueA, Object valueB) { this.includedInFingerprint = includedInFingerprint; this.valueA = valueA; this.valueB = valueB; } } private static Domain partOfFingerprint(Object valueA, Object valueB) { return new Domain(true, valueA, valueB); } private static Domain notPartOfFingerprint(Object valueB) { return new Domain(false, "//foo", valueB); } @Test public void testFingerprintOfFileTraversal() throws Exception { new FingerprintTester( ImmutableMap.<String, Domain>builder() .put("ownerLabel", notPartOfFingerprint("//bar")) .put("fileToTraverse", partOfFingerprint("foo/file.a", "bar/file.b")) .put("destPath", partOfFingerprint("out1", "out2")) .put("strictFilesetOutput", partOfFingerprint(true, false)) .put("permitDirectories", partOfFingerprint(true, false)) .buildOrThrow()) { @Override FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception { return FilesetTraversalParamsFactory.fileTraversal( label((String) kwArgs.get("ownerLabel")), getSourceArtifact((String) kwArgs.get("fileToTraverse")), PathFragment.create((String) kwArgs.get("destPath")), (Boolean) kwArgs.get("strictFilesetOutput"), (Boolean) kwArgs.get("permitDirectories")); } }.doTest(); } @Test public void testFingerprintOfNestedTraversal() throws Exception { Artifact nested1 = getSourceArtifact("a/b"); Artifact nested2 = getSourceArtifact("a/c"); new FingerprintTester( ImmutableMap.of( "ownerLabel", notPartOfFingerprint("//bar"), "nestedArtifact", partOfFingerprint(nested1, nested2), "destDir", partOfFingerprint("out1", "out2"), "excludes", partOfFingerprint(ImmutableSet.<String>of(), ImmutableSet.of("x")))) { @SuppressWarnings("unchecked") @Override FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception { return FilesetTraversalParamsFactory.nestedTraversal( label((String) kwArgs.get("ownerLabel")), (Artifact) kwArgs.get("nestedArtifact"), PathFragment.create((String) kwArgs.get("destDir")), (Set<String>) kwArgs.get("excludes")); } }.doTest(); } private static class TestWorkspaceNameFunction implements SkyFunction { @Nullable @Override public SkyValue compute(SkyKey skyKey, Environment env) throws SkyFunctionException, InterruptedException { return WorkspaceNameValue.withName("workspace"); } } }
evaluator
// Copyright 2015 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.skyframe; import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertWithMessage; import com.google.common.base.Preconditions; import com.google.common.base.Suppliers; import com.google.common.collect.Collections2; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.ArtifactRoot; import com.google.devtools.build.lib.actions.FilesetOutputSymlink; import com.google.devtools.build.lib.actions.FilesetTraversalParams; import com.google.devtools.build.lib.actions.FilesetTraversalParamsFactory; import com.google.devtools.build.lib.actions.util.ActionsTestUtil; import com.google.devtools.build.lib.analysis.BlazeDirectories; import com.google.devtools.build.lib.analysis.ServerDirectories; import com.google.devtools.build.lib.clock.BlazeClock; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.events.NullEventHandler; import com.google.devtools.build.lib.pkgcache.PathPackageLocator; import com.google.devtools.build.lib.skyframe.ExternalFilesHelper.ExternalFileAction; import com.google.devtools.build.lib.skyframe.PackageLookupFunction.CrossRepositoryLabelViolationStrategy; import com.google.devtools.build.lib.testutil.FoundationTestCase; import com.google.devtools.build.lib.testutil.TestConstants; import com.google.devtools.build.lib.util.Fingerprint; import com.google.devtools.build.lib.util.io.TimestampGranularityMonitor; import com.google.devtools.build.lib.vfs.FileStateKey; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.build.lib.vfs.Root; import com.google.devtools.build.lib.vfs.RootedPath; import com.google.devtools.build.lib.vfs.SyscallCache; import com.google.devtools.build.skyframe.EvaluationContext; import com.google.devtools.build.skyframe.EvaluationResult; import com.google.devtools.build.skyframe.InMemoryMemoizingEvaluator; import com.google.devtools.build.skyframe.MemoizingEvaluator; import com.google.devtools.build.skyframe.RecordingDifferencer; import com.google.devtools.build.skyframe.SequencedRecordingDifferencer; import com.google.devtools.build.skyframe.SkyFunction; import com.google.devtools.build.skyframe.SkyFunctionException; import com.google.devtools.build.skyframe.SkyFunctionName; import com.google.devtools.build.skyframe.SkyKey; import com.google.devtools.build.skyframe.SkyValue; import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.concurrent.atomic.AtomicReference; import javax.annotation.Nullable; import net.starlark.java.eval.StarlarkSemantics; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Tests for {@link FilesetEntryFunction}. */ @RunWith(JUnit4.class) public final class FilesetEntryFunctionTest extends FoundationTestCase { private MemoizingEvaluator evaluator; private RecordingDifferencer differencer; @Before public void setUp() throws Exception { AtomicReference<PathPackageLocator> pkgLocator = new AtomicReference<>( new PathPackageLocator( outputBase, ImmutableList.of(Root.fromPath(rootDirectory)), BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY)); BlazeDirectories directories = new BlazeDirectories( new ServerDirectories(outputBase, outputBase, outputBase), rootDirectory, /* defaultSystemJavabase= */ null, TestConstants.PRODUCT_NAME); ExternalFilesHelper externalFilesHelper = ExternalFilesHelper.createForTesting( pkgLocator, ExternalFileAction.DEPEND_ON_EXTERNAL_PKG_FOR_EXTERNAL_REPO_PATHS, directories); Map<SkyFunctionName, SkyFunction> skyFunctions = new HashMap<>(); skyFunctions.put( FileStateKey.FILE_STATE, new FileStateFunction( Suppliers.ofInstance(new TimestampGranularityMonitor(BlazeClock.instance())), SyscallCache.NO_CACHE, externalFilesHelper)); skyFunctions.put(SkyFunctions.FILE, new FileFunction(pkgLocator, directories)); skyFunctions.put(SkyFunctions.DIRECTORY_LISTING, new DirectoryListingFunction()); skyFunctions.put( SkyFunctions.DIRECTORY_LISTING_STATE, new DirectoryListingStateFunction(externalFilesHelper, SyscallCache.NO_CACHE)); skyFunctions.put( SkyFunctions.RECURSIVE_FILESYSTEM_TRAVERSAL, new RecursiveFilesystemTraversalFunction(SyscallCache.NO_CACHE)); skyFunctions.put( SkyFunctions.PACKAGE_LOOKUP, new PackageLookupFunction( new AtomicReference<>(ImmutableSet.of()), CrossRepositoryLabelViolationStrategy.ERROR, BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY, BazelSkyframeExecutorConstants.EXTERNAL_PACKAGE_HELPER)); skyFunctions.put(SkyFunctions.IGNORED_SUBDIRECTORIES, IgnoredSubdirectoriesFunction.NOOP); skyFunctions.put( SkyFunctions.FILESET_ENTRY, new FilesetEntryFunction((unused) -> rootDirectory)); skyFunctions.put(SkyFunctions.WORKSPACE_NAME, new TestWorkspaceNameFunction()); skyFunctions.put( SkyFunctions.LOCAL_REPOSITORY_LOOKUP, new LocalRepositoryLookupFunction(BazelSkyframeExecutorConstants.EXTERNAL_PACKAGE_HELPER)); differencer = new SequencedRecordingDifferencer(); evaluator = new InMemoryMemoizingEvaluator(skyFunctions, differencer); PrecomputedValue.BUILD_ID.set(differencer, UUID.randomUUID()); PrecomputedValue.PATH_PACKAGE_LOCATOR.set(differencer, pkgLocator.get()); PrecomputedValue.STARLARK_SEMANTICS.set(differencer, StarlarkSemantics.DEFAULT); } private Artifact getSourceArtifact(String path) { return ActionsTestUtil.createArtifact( ArtifactRoot.asSourceRoot(Root.fromPath(rootDirectory)), path); } private Artifact createSourceArtifact(String path) throws Exception { Artifact result = getSourceArtifact(path); createFile(result, "foo"); return result; } private static RootedPath childOf(Artifact artifact, String relative) { return RootedPath.toRootedPath( artifact.getRoot().getRoot(), artifact.getRootRelativePath().getRelative(relative)); } private static RootedPath siblingOf(Artifact artifact, String relative) { PathFragment parent = Preconditions.checkNotNull(artifact.getRootRelativePath().getParentDirectory()); return RootedPath.toRootedPath(artifact.getRoot().getRoot(), parent.getRelative(relative)); } private void createFile(Path path, String... contents) throws Exception { if (!path.getParentDirectory().exists()) { scratch.dir(path.getParentDirectory().getPathString()); } scratch.file(path.getPathString(), contents); } private void createFile(Artifact artifact, String... contents) throws Exception { createFile(artifact.getPath(), contents); } private RootedPath createFile(RootedPath path, String... contents) throws Exception { createFile(path. [MASK] (), contents); return path; } private <T extends SkyValue> EvaluationResult<T> eval(SkyKey key) throws Exception { EvaluationContext evaluationContext = EvaluationContext.newBuilder() .setKeepGoing(false) .setParallelism(SkyframeExecutor.DEFAULT_THREAD_COUNT) .setEventHandler(NullEventHandler.INSTANCE) .build(); return evaluator.evaluate(ImmutableList.of(key), evaluationContext); } private FilesetEntryValue evalFilesetTraversal(FilesetTraversalParams params) throws Exception { SkyKey key = FilesetEntryKey.key(params); EvaluationResult<FilesetEntryValue> result = eval(key); assertThat(result.hasError()).isFalse(); return result.get(key); } private FilesetOutputSymlink symlink(String from, Artifact to) { return symlink(PathFragment.create(from), to.getPath().asFragment()); } private FilesetOutputSymlink symlink(String from, RootedPath to) { return symlink(PathFragment.create(from), to. [MASK] ().asFragment()); } private FilesetOutputSymlink symlink(PathFragment from, PathFragment to) { return FilesetOutputSymlink.createForTesting(from, to, rootDirectory.asFragment()); } private void assertSymlinksCreatedInOrder( FilesetTraversalParams request, FilesetOutputSymlink... expectedSymlinks) throws Exception { Collection<FilesetOutputSymlink> actual = Collections2.transform( evalFilesetTraversal(request).getSymlinks(), // Strip the metadata from the actual results. (input) -> FilesetOutputSymlink.createAlreadyRelativizedForTesting( input.name(), input.targetPath(), input.relativeToExecRoot())); assertThat(actual).containsExactlyElementsIn(expectedSymlinks).inOrder(); } private static Label label(String label) throws Exception { return Label.parseCanonical(label); } @Test public void testFileTraversalForFile() throws Exception { Artifact file = createSourceArtifact("foo/file.real"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ file, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params, symlink("output-name", file)); } @Test public void testFileTraversalForDirectory() throws Exception { Artifact dir = getSourceArtifact("foo/dir_real"); RootedPath fileA = createFile(childOf(dir, "file.a"), "hello"); RootedPath fileB = createFile(childOf(dir, "sub/file.b"), "world"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ dir, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ true); assertSymlinksCreatedInOrder( params, symlink("output-name/file.a", fileA), symlink("output-name/sub/file.b", fileB)); } @Test public void testFileTraversalForDisallowedDirectoryThrows() throws Exception { Artifact dir = getSourceArtifact("foo/dir_real"); createFile(childOf(dir, "file.a"), "hello"); createFile(childOf(dir, "sub/file.b"), "world"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ dir, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); SkyKey key = FilesetEntryKey.key(params); EvaluationResult<FilesetEntryValue> result = eval(key); assertThat(result.hasError()).isTrue(); assertThat(result.getError(key).getException()) .hasMessageThat() .contains("foo contains a directory"); } @Test public void testFileTraversalForDanglingSymlink() throws Exception { Artifact linkName = getSourceArtifact("foo/dangling.sym"); RootedPath linkTarget = createFile(siblingOf(linkName, "target.file"), "blah"); linkName.getPath().createSymbolicLink(PathFragment.create("target.file")); linkTarget. [MASK] ().delete(); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ linkName, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params); // expect empty results } @Test public void testFileTraversalForNonExistentFile() throws Exception { Artifact path = getSourceArtifact("foo/non-existent"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ path, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params); // expect empty results } /** * Tests that the fingerprint is a function of all arguments of the factory method. * * <p>Implementations must provide: * <ul> * <li>two different values (a domain) for each argument of the factory method and whether or not * it is expected to influence the fingerprint * <li>a way to instantiate {@link FilesetTraversalParams} with a given set of arguments from the * specified domains * </ul> * * <p>The tests will instantiate pairs of {@link FilesetTraversalParams} objects with only a given * attribute differing, and observe whether the fingerprints differ (if they are expected to) or * are the same (otherwise). */ private abstract static class FingerprintTester { private final Map<String, Domain> domains; FingerprintTester(Map<String, Domain> domains) { this.domains = domains; } abstract FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception; private Map<String, ?> getDefaultArgs() { return getKwArgs(null); } private Map<String, ?> getKwArgs(@Nullable String useAlternateFor) { Map<String, Object> values = new HashMap<>(); for (Map.Entry<String, Domain> d : domains.entrySet()) { values.put( d.getKey(), d.getKey().equals(useAlternateFor) ? d.getValue().valueA : d.getValue().valueB); } return values; } public void doTest() throws Exception { Fingerprint fp = new Fingerprint(); create(getDefaultArgs()).fingerprint(fp); String primary = fp.hexDigestAndReset(); for (String argName : domains.keySet()) { create(getKwArgs(argName)).fingerprint(fp); String secondary = fp.hexDigestAndReset(); if (domains.get(argName).includedInFingerprint) { assertWithMessage( "Argument '" + argName + "' was expected to be included in the" + " fingerprint, but wasn't") .that(primary) .isNotEqualTo(secondary); } else { assertWithMessage( "Argument '" + argName + "' was expected not to be included in the" + " fingerprint, but was") .that(primary) .isEqualTo(secondary); } } } } private static final class Domain { boolean includedInFingerprint; Object valueA; Object valueB; Domain(boolean includedInFingerprint, Object valueA, Object valueB) { this.includedInFingerprint = includedInFingerprint; this.valueA = valueA; this.valueB = valueB; } } private static Domain partOfFingerprint(Object valueA, Object valueB) { return new Domain(true, valueA, valueB); } private static Domain notPartOfFingerprint(Object valueB) { return new Domain(false, "//foo", valueB); } @Test public void testFingerprintOfFileTraversal() throws Exception { new FingerprintTester( ImmutableMap.<String, Domain>builder() .put("ownerLabel", notPartOfFingerprint("//bar")) .put("fileToTraverse", partOfFingerprint("foo/file.a", "bar/file.b")) .put("destPath", partOfFingerprint("out1", "out2")) .put("strictFilesetOutput", partOfFingerprint(true, false)) .put("permitDirectories", partOfFingerprint(true, false)) .buildOrThrow()) { @Override FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception { return FilesetTraversalParamsFactory.fileTraversal( label((String) kwArgs.get("ownerLabel")), getSourceArtifact((String) kwArgs.get("fileToTraverse")), PathFragment.create((String) kwArgs.get("destPath")), (Boolean) kwArgs.get("strictFilesetOutput"), (Boolean) kwArgs.get("permitDirectories")); } }.doTest(); } @Test public void testFingerprintOfNestedTraversal() throws Exception { Artifact nested1 = getSourceArtifact("a/b"); Artifact nested2 = getSourceArtifact("a/c"); new FingerprintTester( ImmutableMap.of( "ownerLabel", notPartOfFingerprint("//bar"), "nestedArtifact", partOfFingerprint(nested1, nested2), "destDir", partOfFingerprint("out1", "out2"), "excludes", partOfFingerprint(ImmutableSet.<String>of(), ImmutableSet.of("x")))) { @SuppressWarnings("unchecked") @Override FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception { return FilesetTraversalParamsFactory.nestedTraversal( label((String) kwArgs.get("ownerLabel")), (Artifact) kwArgs.get("nestedArtifact"), PathFragment.create((String) kwArgs.get("destDir")), (Set<String>) kwArgs.get("excludes")); } }.doTest(); } private static class TestWorkspaceNameFunction implements SkyFunction { @Nullable @Override public SkyValue compute(SkyKey skyKey, Environment env) throws SkyFunctionException, InterruptedException { return WorkspaceNameValue.withName("workspace"); } } }
asPath
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.hudi; import com.facebook.presto.common.type.TypeManager; import com.facebook.presto.hive.HdfsEnvironment; import com.facebook.presto.hive.MetastoreClientConfig; import com.facebook.presto.hive.metastore.ExtendedHiveMetastore; import com.facebook.presto.hive.metastore.InMemoryCachingHiveMetastore; import com.facebook.presto.spi.connector.ConnectorMetadata; import javax.inject.Inject; import static java.util.Objects. [MASK] ; public class HudiMetadataFactory { private final ExtendedHiveMetastore metastore; private final HdfsEnvironment hdfsEnvironment; private final TypeManager typeManager; private final long perTransactionCacheMaximumSize; private final boolean metastoreImpersonationEnabled; private final int metastorePartitionCacheMaxColumnCount; @Inject public HudiMetadataFactory( ExtendedHiveMetastore metastore, HdfsEnvironment hdfsEnvironment, TypeManager typeManager, MetastoreClientConfig metastoreClientConfig) { this.metastore = [MASK] (metastore, "metastore is null"); this.hdfsEnvironment = [MASK] (hdfsEnvironment, "hdfsEnvironment is null"); this.typeManager = [MASK] (typeManager, "typeManager is null"); this.perTransactionCacheMaximumSize = metastoreClientConfig.getPerTransactionMetastoreCacheMaximumSize(); this.metastoreImpersonationEnabled = metastoreClientConfig.isMetastoreImpersonationEnabled(); this.metastorePartitionCacheMaxColumnCount = metastoreClientConfig.getPartitionCacheColumnCountLimit(); } public ConnectorMetadata create() { return new HudiMetadata( InMemoryCachingHiveMetastore.memoizeMetastore(metastore, metastoreImpersonationEnabled, perTransactionCacheMaximumSize, metastorePartitionCacheMaxColumnCount), hdfsEnvironment, typeManager); } }
requireNonNull
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.hudi; import com.facebook.presto.common.type.TypeManager; import com.facebook.presto.hive. [MASK] ; import com.facebook.presto.hive.MetastoreClientConfig; import com.facebook.presto.hive.metastore.ExtendedHiveMetastore; import com.facebook.presto.hive.metastore.InMemoryCachingHiveMetastore; import com.facebook.presto.spi.connector.ConnectorMetadata; import javax.inject.Inject; import static java.util.Objects.requireNonNull; public class HudiMetadataFactory { private final ExtendedHiveMetastore metastore; private final [MASK] hdfsEnvironment; private final TypeManager typeManager; private final long perTransactionCacheMaximumSize; private final boolean metastoreImpersonationEnabled; private final int metastorePartitionCacheMaxColumnCount; @Inject public HudiMetadataFactory( ExtendedHiveMetastore metastore, [MASK] hdfsEnvironment, TypeManager typeManager, MetastoreClientConfig metastoreClientConfig) { this.metastore = requireNonNull(metastore, "metastore is null"); this.hdfsEnvironment = requireNonNull(hdfsEnvironment, "hdfsEnvironment is null"); this.typeManager = requireNonNull(typeManager, "typeManager is null"); this.perTransactionCacheMaximumSize = metastoreClientConfig.getPerTransactionMetastoreCacheMaximumSize(); this.metastoreImpersonationEnabled = metastoreClientConfig.isMetastoreImpersonationEnabled(); this.metastorePartitionCacheMaxColumnCount = metastoreClientConfig.getPartitionCacheColumnCountLimit(); } public ConnectorMetadata create() { return new HudiMetadata( InMemoryCachingHiveMetastore.memoizeMetastore(metastore, metastoreImpersonationEnabled, perTransactionCacheMaximumSize, metastorePartitionCacheMaxColumnCount), hdfsEnvironment, typeManager); } }
HdfsEnvironment
/* * Copyright (c) 2014, 2024, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package [MASK] ; import static jaxp.library.JAXPTestUtilities.USER_DIR; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.InputStream; import java.io.OutputStream; import javax.xml. [MASK] .XMLInputFactory; import javax.xml. [MASK] .XMLOutputFactory; import javax.xml. [MASK] .XMLStreamReader; import javax.xml. [MASK] .XMLStreamWriter; import org.testng.Assert; import org.testng.annotations.Test; /* * @test * @bug 6688002 * @library /javax/xml/jaxp/libs /javax/xml/jaxp/unittest * @run testng/othervm [MASK] .Bug6688002Test * @summary Test single instance of XMLOutputFactory/XMLInputFactory create multiple Writer/Readers in parallel. */ public class Bug6688002Test { private static final XMLOutputFactory outputFactory = XMLOutputFactory.newInstance(); private static final XMLInputFactory inputFactory = XMLInputFactory.newInstance(); private static final int NO_THREADS = 3; @Test public void testMultiThread() throws Exception { Thread[] threads = new Thread[NO_THREADS]; for (int i = 0; i < NO_THREADS; i++) { threads[i] = new Thread(new MyRunnable(i)); } for (int i = 0; i < NO_THREADS; i++) { threads[i].start(); } for (int i = 0; i < NO_THREADS; i++) { threads[i].join(); } } public class MyRunnable implements Runnable { final String no; MyRunnable(int no) { this.no = String.valueOf(no); } public void run() { try { FileOutputStream fos = new FileOutputStream(USER_DIR + no); XMLStreamWriter w = getWriter(fos); // System.out.println("Writer="+w+" Thread="+Thread.currentThread()); w.writeStartDocument(); w.writeStartElement("hello"); for (int j = 0; j < 50; j++) { w.writeStartElement("a" + j); w.writeEndElement(); } w.writeEndElement(); w.writeEndDocument(); w.close(); fos.close(); FileInputStream fis = new FileInputStream(USER_DIR + no); XMLStreamReader r = getReader(fis); while (r.hasNext()) { r.next(); } r.close(); fis.close(); } catch (Exception e) { Assert.fail(e.getMessage()); } } } public static/* synchronized */XMLStreamReader getReader(InputStream is) throws Exception { return inputFactory.createXMLStreamReader(is); // return XMLStreamReaderFactory.create(null, is, true); } public static/* synchronized */XMLStreamWriter getWriter(OutputStream os) throws Exception { return outputFactory.createXMLStreamWriter(os); // return XMLStreamWriterFactory.createXMLStreamWriter(os); } }
stream
/* * Copyright 2002-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org. [MASK] .beans.factory.xml; import org.jspecify.annotations.Nullable; import org.w3c.dom.Attr; import org.w3c.dom.Element; import org.w3c.dom.Node; import org. [MASK] .beans.MutablePropertyValues; import org. [MASK] .beans.factory.config.BeanDefinition; import org. [MASK] .beans.factory.config.BeanDefinitionHolder; import org. [MASK] .beans.factory.config.RuntimeBeanReference; import org. [MASK] .core.Conventions; /** * Simple {@code NamespaceHandler} implementation that maps custom attributes * directly through to bean properties. An important point to note is that this * {@code NamespaceHandler} does not have a corresponding schema since there * is no way to know in advance all possible attribute names. * * <p>An example of the usage of this {@code NamespaceHandler} is shown below: * * <pre class="code"> * &lt;bean id=&quot;rob&quot; class=&quot;..TestBean&quot; p:name=&quot;Rob Harrop&quot; p:spouse-ref=&quot;sally&quot;/&gt;</pre> * * Here the '{@code p:name}' corresponds directly to the '{@code name}' * property on class '{@code TestBean}'. The '{@code p:spouse-ref}' * attributes corresponds to the '{@code spouse}' property and, rather * than being the concrete value, it contains the name of the bean that will * be injected into that property. * * @author Rob Harrop * @author Juergen Hoeller * @since 2.0 */ public class SimplePropertyNamespaceHandler implements NamespaceHandler { private static final String REF_SUFFIX = "-ref"; @Override public void init() { } @Override public @Nullable BeanDefinition parse(Element element, ParserContext parserContext) { parserContext.getReaderContext().error( "Class [" + getClass().getName() + "] does not support custom elements.", element); return null; } @Override public BeanDefinitionHolder decorate(Node node, BeanDefinitionHolder definition, ParserContext parserContext) { if (node instanceof Attr attr) { String propertyName = parserContext.getDelegate().getLocalName(attr); String propertyValue = attr.getValue(); MutablePropertyValues pvs = definition.getBeanDefinition().getPropertyValues(); if (pvs.contains(propertyName)) { parserContext.getReaderContext().error("Property '" + propertyName + "' is already defined using " + "both <property> and inline syntax. Only one approach may be used per property.", attr); } if (propertyName.endsWith(REF_SUFFIX)) { propertyName = propertyName.substring(0, propertyName.length() - REF_SUFFIX.length()); pvs.add(Conventions.attributeNameToPropertyName(propertyName), new RuntimeBeanReference(propertyValue)); } else { pvs.add(Conventions.attributeNameToPropertyName(propertyName), propertyValue); } } return definition; } }
springframework
// Copyright 2015 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.skyframe; import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertWithMessage; import com.google.common.base.Preconditions; import com.google.common.base.Suppliers; import com.google.common.collect.Collections2; import com.google.common.collect.ImmutableList; import com.google.common.collect. [MASK] ; import com.google.common.collect.ImmutableSet; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.ArtifactRoot; import com.google.devtools.build.lib.actions.FilesetOutputSymlink; import com.google.devtools.build.lib.actions.FilesetTraversalParams; import com.google.devtools.build.lib.actions.FilesetTraversalParamsFactory; import com.google.devtools.build.lib.actions.util.ActionsTestUtil; import com.google.devtools.build.lib.analysis.BlazeDirectories; import com.google.devtools.build.lib.analysis.ServerDirectories; import com.google.devtools.build.lib.clock.BlazeClock; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.events.NullEventHandler; import com.google.devtools.build.lib.pkgcache.PathPackageLocator; import com.google.devtools.build.lib.skyframe.ExternalFilesHelper.ExternalFileAction; import com.google.devtools.build.lib.skyframe.PackageLookupFunction.CrossRepositoryLabelViolationStrategy; import com.google.devtools.build.lib.testutil.FoundationTestCase; import com.google.devtools.build.lib.testutil.TestConstants; import com.google.devtools.build.lib.util.Fingerprint; import com.google.devtools.build.lib.util.io.TimestampGranularityMonitor; import com.google.devtools.build.lib.vfs.FileStateKey; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.build.lib.vfs.Root; import com.google.devtools.build.lib.vfs.RootedPath; import com.google.devtools.build.lib.vfs.SyscallCache; import com.google.devtools.build.skyframe.EvaluationContext; import com.google.devtools.build.skyframe.EvaluationResult; import com.google.devtools.build.skyframe.InMemoryMemoizingEvaluator; import com.google.devtools.build.skyframe.MemoizingEvaluator; import com.google.devtools.build.skyframe.RecordingDifferencer; import com.google.devtools.build.skyframe.SequencedRecordingDifferencer; import com.google.devtools.build.skyframe.SkyFunction; import com.google.devtools.build.skyframe.SkyFunctionException; import com.google.devtools.build.skyframe.SkyFunctionName; import com.google.devtools.build.skyframe.SkyKey; import com.google.devtools.build.skyframe.SkyValue; import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.concurrent.atomic.AtomicReference; import javax.annotation.Nullable; import net.starlark.java.eval.StarlarkSemantics; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Tests for {@link FilesetEntryFunction}. */ @RunWith(JUnit4.class) public final class FilesetEntryFunctionTest extends FoundationTestCase { private MemoizingEvaluator evaluator; private RecordingDifferencer differencer; @Before public void setUp() throws Exception { AtomicReference<PathPackageLocator> pkgLocator = new AtomicReference<>( new PathPackageLocator( outputBase, ImmutableList.of(Root.fromPath(rootDirectory)), BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY)); BlazeDirectories directories = new BlazeDirectories( new ServerDirectories(outputBase, outputBase, outputBase), rootDirectory, /* defaultSystemJavabase= */ null, TestConstants.PRODUCT_NAME); ExternalFilesHelper externalFilesHelper = ExternalFilesHelper.createForTesting( pkgLocator, ExternalFileAction.DEPEND_ON_EXTERNAL_PKG_FOR_EXTERNAL_REPO_PATHS, directories); Map<SkyFunctionName, SkyFunction> skyFunctions = new HashMap<>(); skyFunctions.put( FileStateKey.FILE_STATE, new FileStateFunction( Suppliers.ofInstance(new TimestampGranularityMonitor(BlazeClock.instance())), SyscallCache.NO_CACHE, externalFilesHelper)); skyFunctions.put(SkyFunctions.FILE, new FileFunction(pkgLocator, directories)); skyFunctions.put(SkyFunctions.DIRECTORY_LISTING, new DirectoryListingFunction()); skyFunctions.put( SkyFunctions.DIRECTORY_LISTING_STATE, new DirectoryListingStateFunction(externalFilesHelper, SyscallCache.NO_CACHE)); skyFunctions.put( SkyFunctions.RECURSIVE_FILESYSTEM_TRAVERSAL, new RecursiveFilesystemTraversalFunction(SyscallCache.NO_CACHE)); skyFunctions.put( SkyFunctions.PACKAGE_LOOKUP, new PackageLookupFunction( new AtomicReference<>(ImmutableSet.of()), CrossRepositoryLabelViolationStrategy.ERROR, BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY, BazelSkyframeExecutorConstants.EXTERNAL_PACKAGE_HELPER)); skyFunctions.put(SkyFunctions.IGNORED_SUBDIRECTORIES, IgnoredSubdirectoriesFunction.NOOP); skyFunctions.put( SkyFunctions.FILESET_ENTRY, new FilesetEntryFunction((unused) -> rootDirectory)); skyFunctions.put(SkyFunctions.WORKSPACE_NAME, new TestWorkspaceNameFunction()); skyFunctions.put( SkyFunctions.LOCAL_REPOSITORY_LOOKUP, new LocalRepositoryLookupFunction(BazelSkyframeExecutorConstants.EXTERNAL_PACKAGE_HELPER)); differencer = new SequencedRecordingDifferencer(); evaluator = new InMemoryMemoizingEvaluator(skyFunctions, differencer); PrecomputedValue.BUILD_ID.set(differencer, UUID.randomUUID()); PrecomputedValue.PATH_PACKAGE_LOCATOR.set(differencer, pkgLocator.get()); PrecomputedValue.STARLARK_SEMANTICS.set(differencer, StarlarkSemantics.DEFAULT); } private Artifact getSourceArtifact(String path) { return ActionsTestUtil.createArtifact( ArtifactRoot.asSourceRoot(Root.fromPath(rootDirectory)), path); } private Artifact createSourceArtifact(String path) throws Exception { Artifact result = getSourceArtifact(path); createFile(result, "foo"); return result; } private static RootedPath childOf(Artifact artifact, String relative) { return RootedPath.toRootedPath( artifact.getRoot().getRoot(), artifact.getRootRelativePath().getRelative(relative)); } private static RootedPath siblingOf(Artifact artifact, String relative) { PathFragment parent = Preconditions.checkNotNull(artifact.getRootRelativePath().getParentDirectory()); return RootedPath.toRootedPath(artifact.getRoot().getRoot(), parent.getRelative(relative)); } private void createFile(Path path, String... contents) throws Exception { if (!path.getParentDirectory().exists()) { scratch.dir(path.getParentDirectory().getPathString()); } scratch.file(path.getPathString(), contents); } private void createFile(Artifact artifact, String... contents) throws Exception { createFile(artifact.getPath(), contents); } private RootedPath createFile(RootedPath path, String... contents) throws Exception { createFile(path.asPath(), contents); return path; } private <T extends SkyValue> EvaluationResult<T> eval(SkyKey key) throws Exception { EvaluationContext evaluationContext = EvaluationContext.newBuilder() .setKeepGoing(false) .setParallelism(SkyframeExecutor.DEFAULT_THREAD_COUNT) .setEventHandler(NullEventHandler.INSTANCE) .build(); return evaluator.evaluate(ImmutableList.of(key), evaluationContext); } private FilesetEntryValue evalFilesetTraversal(FilesetTraversalParams params) throws Exception { SkyKey key = FilesetEntryKey.key(params); EvaluationResult<FilesetEntryValue> result = eval(key); assertThat(result.hasError()).isFalse(); return result.get(key); } private FilesetOutputSymlink symlink(String from, Artifact to) { return symlink(PathFragment.create(from), to.getPath().asFragment()); } private FilesetOutputSymlink symlink(String from, RootedPath to) { return symlink(PathFragment.create(from), to.asPath().asFragment()); } private FilesetOutputSymlink symlink(PathFragment from, PathFragment to) { return FilesetOutputSymlink.createForTesting(from, to, rootDirectory.asFragment()); } private void assertSymlinksCreatedInOrder( FilesetTraversalParams request, FilesetOutputSymlink... expectedSymlinks) throws Exception { Collection<FilesetOutputSymlink> actual = Collections2.transform( evalFilesetTraversal(request).getSymlinks(), // Strip the metadata from the actual results. (input) -> FilesetOutputSymlink.createAlreadyRelativizedForTesting( input.name(), input.targetPath(), input.relativeToExecRoot())); assertThat(actual).containsExactlyElementsIn(expectedSymlinks).inOrder(); } private static Label label(String label) throws Exception { return Label.parseCanonical(label); } @Test public void testFileTraversalForFile() throws Exception { Artifact file = createSourceArtifact("foo/file.real"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ file, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params, symlink("output-name", file)); } @Test public void testFileTraversalForDirectory() throws Exception { Artifact dir = getSourceArtifact("foo/dir_real"); RootedPath fileA = createFile(childOf(dir, "file.a"), "hello"); RootedPath fileB = createFile(childOf(dir, "sub/file.b"), "world"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ dir, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ true); assertSymlinksCreatedInOrder( params, symlink("output-name/file.a", fileA), symlink("output-name/sub/file.b", fileB)); } @Test public void testFileTraversalForDisallowedDirectoryThrows() throws Exception { Artifact dir = getSourceArtifact("foo/dir_real"); createFile(childOf(dir, "file.a"), "hello"); createFile(childOf(dir, "sub/file.b"), "world"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ dir, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); SkyKey key = FilesetEntryKey.key(params); EvaluationResult<FilesetEntryValue> result = eval(key); assertThat(result.hasError()).isTrue(); assertThat(result.getError(key).getException()) .hasMessageThat() .contains("foo contains a directory"); } @Test public void testFileTraversalForDanglingSymlink() throws Exception { Artifact linkName = getSourceArtifact("foo/dangling.sym"); RootedPath linkTarget = createFile(siblingOf(linkName, "target.file"), "blah"); linkName.getPath().createSymbolicLink(PathFragment.create("target.file")); linkTarget.asPath().delete(); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ linkName, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params); // expect empty results } @Test public void testFileTraversalForNonExistentFile() throws Exception { Artifact path = getSourceArtifact("foo/non-existent"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ path, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params); // expect empty results } /** * Tests that the fingerprint is a function of all arguments of the factory method. * * <p>Implementations must provide: * <ul> * <li>two different values (a domain) for each argument of the factory method and whether or not * it is expected to influence the fingerprint * <li>a way to instantiate {@link FilesetTraversalParams} with a given set of arguments from the * specified domains * </ul> * * <p>The tests will instantiate pairs of {@link FilesetTraversalParams} objects with only a given * attribute differing, and observe whether the fingerprints differ (if they are expected to) or * are the same (otherwise). */ private abstract static class FingerprintTester { private final Map<String, Domain> domains; FingerprintTester(Map<String, Domain> domains) { this.domains = domains; } abstract FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception; private Map<String, ?> getDefaultArgs() { return getKwArgs(null); } private Map<String, ?> getKwArgs(@Nullable String useAlternateFor) { Map<String, Object> values = new HashMap<>(); for (Map.Entry<String, Domain> d : domains.entrySet()) { values.put( d.getKey(), d.getKey().equals(useAlternateFor) ? d.getValue().valueA : d.getValue().valueB); } return values; } public void doTest() throws Exception { Fingerprint fp = new Fingerprint(); create(getDefaultArgs()).fingerprint(fp); String primary = fp.hexDigestAndReset(); for (String argName : domains.keySet()) { create(getKwArgs(argName)).fingerprint(fp); String secondary = fp.hexDigestAndReset(); if (domains.get(argName).includedInFingerprint) { assertWithMessage( "Argument '" + argName + "' was expected to be included in the" + " fingerprint, but wasn't") .that(primary) .isNotEqualTo(secondary); } else { assertWithMessage( "Argument '" + argName + "' was expected not to be included in the" + " fingerprint, but was") .that(primary) .isEqualTo(secondary); } } } } private static final class Domain { boolean includedInFingerprint; Object valueA; Object valueB; Domain(boolean includedInFingerprint, Object valueA, Object valueB) { this.includedInFingerprint = includedInFingerprint; this.valueA = valueA; this.valueB = valueB; } } private static Domain partOfFingerprint(Object valueA, Object valueB) { return new Domain(true, valueA, valueB); } private static Domain notPartOfFingerprint(Object valueB) { return new Domain(false, "//foo", valueB); } @Test public void testFingerprintOfFileTraversal() throws Exception { new FingerprintTester( [MASK] .<String, Domain>builder() .put("ownerLabel", notPartOfFingerprint("//bar")) .put("fileToTraverse", partOfFingerprint("foo/file.a", "bar/file.b")) .put("destPath", partOfFingerprint("out1", "out2")) .put("strictFilesetOutput", partOfFingerprint(true, false)) .put("permitDirectories", partOfFingerprint(true, false)) .buildOrThrow()) { @Override FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception { return FilesetTraversalParamsFactory.fileTraversal( label((String) kwArgs.get("ownerLabel")), getSourceArtifact((String) kwArgs.get("fileToTraverse")), PathFragment.create((String) kwArgs.get("destPath")), (Boolean) kwArgs.get("strictFilesetOutput"), (Boolean) kwArgs.get("permitDirectories")); } }.doTest(); } @Test public void testFingerprintOfNestedTraversal() throws Exception { Artifact nested1 = getSourceArtifact("a/b"); Artifact nested2 = getSourceArtifact("a/c"); new FingerprintTester( [MASK] .of( "ownerLabel", notPartOfFingerprint("//bar"), "nestedArtifact", partOfFingerprint(nested1, nested2), "destDir", partOfFingerprint("out1", "out2"), "excludes", partOfFingerprint(ImmutableSet.<String>of(), ImmutableSet.of("x")))) { @SuppressWarnings("unchecked") @Override FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception { return FilesetTraversalParamsFactory.nestedTraversal( label((String) kwArgs.get("ownerLabel")), (Artifact) kwArgs.get("nestedArtifact"), PathFragment.create((String) kwArgs.get("destDir")), (Set<String>) kwArgs.get("excludes")); } }.doTest(); } private static class TestWorkspaceNameFunction implements SkyFunction { @Nullable @Override public SkyValue compute(SkyKey skyKey, Environment env) throws SkyFunctionException, InterruptedException { return WorkspaceNameValue.withName("workspace"); } } }
ImmutableMap
/* ### * IP: GHIDRA * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ghidra.util.datastruct; import java.io.Serializable; import java.util.Arrays; /** * The BitTree class maintains a set of ordered keys between the values of * 0 and N. It can quickly (O(log(n))) add keys, remove keys, find the next key * greater than some value , and find the prev key less than some value. It can * determine if a key is in the set in O(1) time. This implementation has been * limited to short keys so that it can implement the ShortKeySet interface. */ public class BitTree implements ShortKeySet, Serializable { private final static long serialVersionUID = 1; private int size; // The maximum number of keys in the set. Keys range from 0 to size-1 private int power2; // The next power of 2 that is greater than size. private int[] bits; // Array of bits used to represent a tree of binary values. A bit at // position N will have a left child at 2*N and a right child at 2*N+1. // Its parent position will be at N/2. // A bit is on if any bits in its subtree are on. Leaf bits correspond // directly to keys and are on if the key is in the set. private int numKeys; // The current number of keys in the set. // masks for seting and clearing bits within an 32 bit integer. private static final int[] setMask = { 0x00000001,0x00000002,0x00000004,0x00000008, 0x00000010,0x00000020,0x00000040,0x00000080, 0x00000100,0x00000200,0x00000400,0x00000800, 0x00001000,0x00002000,0x00004000,0x00008000, 0x00010000,0x00020000,0x00040000,0x00080000, 0x00100000,0x00200000,0x00400000,0x00800000, 0x01000000,0x02000000,0x04000000,0x08000000, 0x10000000,0x20000000,0x40000000,0x80000000 }; private static final int[] clearMask={ 0xfffffffe,0xfffffffd,0xfffffffb,0xfffffff7, 0xffffffef,0xffffffdf,0xffffffbf,0xffffff7f, 0xfffffeff,0xfffffdff,0xfffffbff,0xfffff7ff, 0xffffefff,0xffffdfff,0xffffbfff,0xffff7fff, 0xfffeffff,0xfffdffff,0xfffbffff,0xfff7ffff, 0xffefffff,0xffdfffff,0xffbfffff,0xff7fffff, 0xfeffffff,0xfdffffff,0xfbffffff,0xf7ffffff, 0xefffffff,0xdfffffff,0xbfffffff,0x7fffffff }; /** * The BitTree constructor takes the maximum key value. The legal * keys for this set range from 0 to maxKey. * @param maxKey the maximum key that will ever be put into this BitTree. */ public BitTree(short maxKey) { this(maxKey,false); } /** * The BitTree constructor takes the maximum key value. The legal * keys for this set range from 0 to maxKey. * @param maxKey the maximum key value. * @param isFull if true, then the set is initilized to contain all legal keys. */ public BitTree(short maxKey, boolean isFull) { this.size = maxKey+1; // find the next power of 2 greater than or equal to n. power2 = 2; int sz = maxKey+1; while (sz > 1) { sz /= 2; power2 *= 2; } // The number of bits need to store the tree is 2 times the number of keys. // Since we are storing the bits in 32 bit integers we need N/16 integers // to store the bits. int nInts = power2/16; // must have at least 1 if (nInts < 1) { nInts = 1; } bits = new int[nInts]; if (isFull) { Arrays.fill(bits,0xffffffff); numKeys = this.size; } } /** * Removes all keys from the set. */ public void removeAll() { Arrays.fill(bits,0); numKeys = 0; } /** * Returns the number of keys currently in the set. */ public int size() { return numKeys; } /** * Adds a key to the set. * @param key to be added. * @exception IndexOutOfBoundsException if the given key is not * in the range [0, size-1]. */ public void put(short key) { if ((key < 0) || (key >= size)) { throw new IndexOutOfBoundsException(); } // The first "power2" number of bits are used for internal tree nodes. The // leaf nodes start at index "power2". int [MASK] = power2+key; // set the leaf bit on to indicate that the key is in the set. // if the bit is already on (The key is already in the set), then just return. if (!setBit( [MASK] )) { return; } // increment the number of keys in the set. numKeys++; // go up the tree setting each parent bit to "on" while( [MASK] != 1) { // compute parent index. [MASK] /= 2; // if any parent bit is already on, then all its parents are already on, // so were done. if (!setBit( [MASK] )) { return; } } } /** * Removes the key from the set. * @param key The key to remove. * @exception IndexOutOfBoundsException if the given key is not * in the range [0, size-1]. */ public boolean remove(short key) { if ((key < 0) || (key >= size)) { throw new IndexOutOfBoundsException(); } // compute the leaf node index. int [MASK] = power2+key; // clear the leaf bit to indicate that the key is not in the set. // if it is already "off", then we don't have to do anything if (!clearBit( [MASK] )) { return false; } // decrement the number of keys in the set numKeys--; // traverse up the tree, clearing any parent nodes if all its child // nodes are "off". while( [MASK] != 1) { [MASK] /= 2; if (!isBitSet( [MASK] )) { return true; } if (isBitSet( [MASK] *2) || isBitSet( [MASK] *2+1)) { return true; } clearBit( [MASK] ); } return true; } /** * Determines if a given key is in the set. * @param key the key to check if it is in this set. * @return true if the key is in the set. */ public boolean containsKey(short key) { if ((key < 0) || (key >= size)) { return false; } return isBitSet(power2+key); } /** * finds the next key that is in the set that is greater than the given key. * @param key from which to search forward. * @return the next key greater than the given key or -1 if there is no key * greater than the given key. * @exception IndexOutOfBoundsException if the given key is not * in the range [0, size-1]. */ public short getNext(short key) { if ((key < 0) || (key >= size)) { throw new IndexOutOfBoundsException(); } // compute leaf node. int [MASK] = key + power2; // while we are not at the root, search upward until we find a right // sibling that is "on". while( [MASK] != 1) { // see if we are odd (i.e. the right child) int odd = [MASK] % 2; // if we are the left child see if my sibling on the right is on. // if so, then the next key must be in that subtree. if (odd == 0) { if (isBitSet( [MASK] +1)) { // we found a right sibling that is "on", set [MASK] to // that node. [MASK] ++; break; } } [MASK] = [MASK] /2; } // if we made it all the way up to the root node, then there is no key // greater than, so return -1; if ( [MASK] == 1) { return (short)-1; } // now that we found a right sibling that is "on", // follow the leftmost trail of "on" bits to an "on" leaf bit. This bit // represents the next key in the set. while ( [MASK] < power2) { [MASK] *= 2; // if the left child is not on, then the right child must be "on". if (!isBitSet( [MASK] )) { [MASK] ++; } } short nextKey = (short)( [MASK] -power2); if (nextKey >= size) { nextKey = -1; } return nextKey; } /** * Finds the next key that is in the set that is less than the given key. * @param key the key to search before. * @return the next key less than the given key or -1 if there is no key * less than the given key. * @exception IndexOutOfBoundsException if the given key is not * in the range [0, size-1]. */ public short getPrevious(short key) { if ((key < 0) || (key >= size)) { throw new IndexOutOfBoundsException(); } // find the leaf node for the given key. int [MASK] = key + power2; // while we are not at the root, search upward until we find a left // sibling that is "on". while( [MASK] != 1) { // check if we are a right node. int odd = [MASK] % 2; // if we are the right child see if my sibling on the left is "on". // if so, then the previous key must be in that subtree. if (odd == 1) { if (isBitSet( [MASK] -1)) { [MASK] --; break; } } [MASK] = [MASK] /2; } // If we went all the way to the root then there is no previous key, return -1. if ( [MASK] == 1) { return (short)-1; } // follow the rightmost trail of "on" bits to an "on" leaf bit. This bit // represents the previous key in the set. while ( [MASK] < power2) { [MASK] *= 2; if (isBitSet( [MASK] +1)) { [MASK] ++; } } return (short)( [MASK] -power2); } /** * Checks if the set is empty. * @return true if the set is empty. */ public boolean isEmpty() { return numKeys == 0; } /** * Returns the first (lowest) key in the set. */ public short getFirst() { // if the 0 key is in the set, then return it. if(containsKey((short)0)) { return (short)0; } // otherwise return the next key after 0. return getNext((short)0); } /** * Returns the last (highest) key in the set. */ public short getLast() { // if the highest possible key is in the set, return it. if(containsKey((short)(size-1))) { return (short)(size-1); } // otherwise return the next lowest key. return getPrevious((short)(size-1)); } /** * Sets the nth bit on. */ private boolean setBit(int n) { int intIndex = n >> 5; int maskIndex = n & 0x1f; int old = bits[intIndex]; return ((bits[intIndex] |= setMask[maskIndex]) != old); } /** * Sets the nth bit to off. */ private boolean clearBit(int n) { int intIndex = n >> 5; int maskIndex = n & 0x1f; int old = bits[intIndex]; return ((bits[intIndex] &= clearMask[maskIndex]) != old); } /** * Tests if the nth bit is on. */ private boolean isBitSet(int n) { int intIndex = n >> 5; int maskIndex = n & 0x1f; return ((bits[intIndex] & setMask[maskIndex]) != 0); } }
nodeIndex
// Copyright 2015 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.skyframe; import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertWithMessage; import com.google.common.base.Preconditions; import com.google.common.base.Suppliers; import com.google.common.collect.Collections2; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.ArtifactRoot; import com.google.devtools.build.lib.actions.FilesetOutputSymlink; import com.google.devtools.build.lib.actions.FilesetTraversalParams; import com.google.devtools.build.lib.actions.FilesetTraversalParamsFactory; import com.google.devtools.build.lib.actions.util.ActionsTestUtil; import com.google.devtools.build.lib.analysis.BlazeDirectories; import com.google.devtools.build.lib.analysis.ServerDirectories; import com.google.devtools.build.lib.clock.BlazeClock; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.events.NullEventHandler; import com.google.devtools.build.lib.pkgcache.PathPackageLocator; import com.google.devtools.build.lib.skyframe.ExternalFilesHelper.ExternalFileAction; import com.google.devtools.build.lib.skyframe.PackageLookupFunction.CrossRepositoryLabelViolationStrategy; import com.google.devtools.build.lib.testutil.FoundationTestCase; import com.google.devtools.build.lib.testutil.TestConstants; import com.google.devtools.build.lib.util.Fingerprint; import com.google.devtools.build.lib.util.io.TimestampGranularityMonitor; import com.google.devtools.build.lib.vfs.FileStateKey; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.build.lib.vfs.Root; import com.google.devtools.build.lib.vfs.RootedPath; import com.google.devtools.build.lib.vfs.SyscallCache; import com.google.devtools.build.skyframe. [MASK] ; import com.google.devtools.build.skyframe.EvaluationResult; import com.google.devtools.build.skyframe.InMemoryMemoizingEvaluator; import com.google.devtools.build.skyframe.MemoizingEvaluator; import com.google.devtools.build.skyframe.RecordingDifferencer; import com.google.devtools.build.skyframe.SequencedRecordingDifferencer; import com.google.devtools.build.skyframe.SkyFunction; import com.google.devtools.build.skyframe.SkyFunctionException; import com.google.devtools.build.skyframe.SkyFunctionName; import com.google.devtools.build.skyframe.SkyKey; import com.google.devtools.build.skyframe.SkyValue; import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.concurrent.atomic.AtomicReference; import javax.annotation.Nullable; import net.starlark.java.eval.StarlarkSemantics; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Tests for {@link FilesetEntryFunction}. */ @RunWith(JUnit4.class) public final class FilesetEntryFunctionTest extends FoundationTestCase { private MemoizingEvaluator evaluator; private RecordingDifferencer differencer; @Before public void setUp() throws Exception { AtomicReference<PathPackageLocator> pkgLocator = new AtomicReference<>( new PathPackageLocator( outputBase, ImmutableList.of(Root.fromPath(rootDirectory)), BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY)); BlazeDirectories directories = new BlazeDirectories( new ServerDirectories(outputBase, outputBase, outputBase), rootDirectory, /* defaultSystemJavabase= */ null, TestConstants.PRODUCT_NAME); ExternalFilesHelper externalFilesHelper = ExternalFilesHelper.createForTesting( pkgLocator, ExternalFileAction.DEPEND_ON_EXTERNAL_PKG_FOR_EXTERNAL_REPO_PATHS, directories); Map<SkyFunctionName, SkyFunction> skyFunctions = new HashMap<>(); skyFunctions.put( FileStateKey.FILE_STATE, new FileStateFunction( Suppliers.ofInstance(new TimestampGranularityMonitor(BlazeClock.instance())), SyscallCache.NO_CACHE, externalFilesHelper)); skyFunctions.put(SkyFunctions.FILE, new FileFunction(pkgLocator, directories)); skyFunctions.put(SkyFunctions.DIRECTORY_LISTING, new DirectoryListingFunction()); skyFunctions.put( SkyFunctions.DIRECTORY_LISTING_STATE, new DirectoryListingStateFunction(externalFilesHelper, SyscallCache.NO_CACHE)); skyFunctions.put( SkyFunctions.RECURSIVE_FILESYSTEM_TRAVERSAL, new RecursiveFilesystemTraversalFunction(SyscallCache.NO_CACHE)); skyFunctions.put( SkyFunctions.PACKAGE_LOOKUP, new PackageLookupFunction( new AtomicReference<>(ImmutableSet.of()), CrossRepositoryLabelViolationStrategy.ERROR, BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY, BazelSkyframeExecutorConstants.EXTERNAL_PACKAGE_HELPER)); skyFunctions.put(SkyFunctions.IGNORED_SUBDIRECTORIES, IgnoredSubdirectoriesFunction.NOOP); skyFunctions.put( SkyFunctions.FILESET_ENTRY, new FilesetEntryFunction((unused) -> rootDirectory)); skyFunctions.put(SkyFunctions.WORKSPACE_NAME, new TestWorkspaceNameFunction()); skyFunctions.put( SkyFunctions.LOCAL_REPOSITORY_LOOKUP, new LocalRepositoryLookupFunction(BazelSkyframeExecutorConstants.EXTERNAL_PACKAGE_HELPER)); differencer = new SequencedRecordingDifferencer(); evaluator = new InMemoryMemoizingEvaluator(skyFunctions, differencer); PrecomputedValue.BUILD_ID.set(differencer, UUID.randomUUID()); PrecomputedValue.PATH_PACKAGE_LOCATOR.set(differencer, pkgLocator.get()); PrecomputedValue.STARLARK_SEMANTICS.set(differencer, StarlarkSemantics.DEFAULT); } private Artifact getSourceArtifact(String path) { return ActionsTestUtil.createArtifact( ArtifactRoot.asSourceRoot(Root.fromPath(rootDirectory)), path); } private Artifact createSourceArtifact(String path) throws Exception { Artifact result = getSourceArtifact(path); createFile(result, "foo"); return result; } private static RootedPath childOf(Artifact artifact, String relative) { return RootedPath.toRootedPath( artifact.getRoot().getRoot(), artifact.getRootRelativePath().getRelative(relative)); } private static RootedPath siblingOf(Artifact artifact, String relative) { PathFragment parent = Preconditions.checkNotNull(artifact.getRootRelativePath().getParentDirectory()); return RootedPath.toRootedPath(artifact.getRoot().getRoot(), parent.getRelative(relative)); } private void createFile(Path path, String... contents) throws Exception { if (!path.getParentDirectory().exists()) { scratch.dir(path.getParentDirectory().getPathString()); } scratch.file(path.getPathString(), contents); } private void createFile(Artifact artifact, String... contents) throws Exception { createFile(artifact.getPath(), contents); } private RootedPath createFile(RootedPath path, String... contents) throws Exception { createFile(path.asPath(), contents); return path; } private <T extends SkyValue> EvaluationResult<T> eval(SkyKey key) throws Exception { [MASK] evaluationContext = [MASK] .newBuilder() .setKeepGoing(false) .setParallelism(SkyframeExecutor.DEFAULT_THREAD_COUNT) .setEventHandler(NullEventHandler.INSTANCE) .build(); return evaluator.evaluate(ImmutableList.of(key), evaluationContext); } private FilesetEntryValue evalFilesetTraversal(FilesetTraversalParams params) throws Exception { SkyKey key = FilesetEntryKey.key(params); EvaluationResult<FilesetEntryValue> result = eval(key); assertThat(result.hasError()).isFalse(); return result.get(key); } private FilesetOutputSymlink symlink(String from, Artifact to) { return symlink(PathFragment.create(from), to.getPath().asFragment()); } private FilesetOutputSymlink symlink(String from, RootedPath to) { return symlink(PathFragment.create(from), to.asPath().asFragment()); } private FilesetOutputSymlink symlink(PathFragment from, PathFragment to) { return FilesetOutputSymlink.createForTesting(from, to, rootDirectory.asFragment()); } private void assertSymlinksCreatedInOrder( FilesetTraversalParams request, FilesetOutputSymlink... expectedSymlinks) throws Exception { Collection<FilesetOutputSymlink> actual = Collections2.transform( evalFilesetTraversal(request).getSymlinks(), // Strip the metadata from the actual results. (input) -> FilesetOutputSymlink.createAlreadyRelativizedForTesting( input.name(), input.targetPath(), input.relativeToExecRoot())); assertThat(actual).containsExactlyElementsIn(expectedSymlinks).inOrder(); } private static Label label(String label) throws Exception { return Label.parseCanonical(label); } @Test public void testFileTraversalForFile() throws Exception { Artifact file = createSourceArtifact("foo/file.real"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ file, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params, symlink("output-name", file)); } @Test public void testFileTraversalForDirectory() throws Exception { Artifact dir = getSourceArtifact("foo/dir_real"); RootedPath fileA = createFile(childOf(dir, "file.a"), "hello"); RootedPath fileB = createFile(childOf(dir, "sub/file.b"), "world"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ dir, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ true); assertSymlinksCreatedInOrder( params, symlink("output-name/file.a", fileA), symlink("output-name/sub/file.b", fileB)); } @Test public void testFileTraversalForDisallowedDirectoryThrows() throws Exception { Artifact dir = getSourceArtifact("foo/dir_real"); createFile(childOf(dir, "file.a"), "hello"); createFile(childOf(dir, "sub/file.b"), "world"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ dir, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); SkyKey key = FilesetEntryKey.key(params); EvaluationResult<FilesetEntryValue> result = eval(key); assertThat(result.hasError()).isTrue(); assertThat(result.getError(key).getException()) .hasMessageThat() .contains("foo contains a directory"); } @Test public void testFileTraversalForDanglingSymlink() throws Exception { Artifact linkName = getSourceArtifact("foo/dangling.sym"); RootedPath linkTarget = createFile(siblingOf(linkName, "target.file"), "blah"); linkName.getPath().createSymbolicLink(PathFragment.create("target.file")); linkTarget.asPath().delete(); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ linkName, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params); // expect empty results } @Test public void testFileTraversalForNonExistentFile() throws Exception { Artifact path = getSourceArtifact("foo/non-existent"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ path, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params); // expect empty results } /** * Tests that the fingerprint is a function of all arguments of the factory method. * * <p>Implementations must provide: * <ul> * <li>two different values (a domain) for each argument of the factory method and whether or not * it is expected to influence the fingerprint * <li>a way to instantiate {@link FilesetTraversalParams} with a given set of arguments from the * specified domains * </ul> * * <p>The tests will instantiate pairs of {@link FilesetTraversalParams} objects with only a given * attribute differing, and observe whether the fingerprints differ (if they are expected to) or * are the same (otherwise). */ private abstract static class FingerprintTester { private final Map<String, Domain> domains; FingerprintTester(Map<String, Domain> domains) { this.domains = domains; } abstract FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception; private Map<String, ?> getDefaultArgs() { return getKwArgs(null); } private Map<String, ?> getKwArgs(@Nullable String useAlternateFor) { Map<String, Object> values = new HashMap<>(); for (Map.Entry<String, Domain> d : domains.entrySet()) { values.put( d.getKey(), d.getKey().equals(useAlternateFor) ? d.getValue().valueA : d.getValue().valueB); } return values; } public void doTest() throws Exception { Fingerprint fp = new Fingerprint(); create(getDefaultArgs()).fingerprint(fp); String primary = fp.hexDigestAndReset(); for (String argName : domains.keySet()) { create(getKwArgs(argName)).fingerprint(fp); String secondary = fp.hexDigestAndReset(); if (domains.get(argName).includedInFingerprint) { assertWithMessage( "Argument '" + argName + "' was expected to be included in the" + " fingerprint, but wasn't") .that(primary) .isNotEqualTo(secondary); } else { assertWithMessage( "Argument '" + argName + "' was expected not to be included in the" + " fingerprint, but was") .that(primary) .isEqualTo(secondary); } } } } private static final class Domain { boolean includedInFingerprint; Object valueA; Object valueB; Domain(boolean includedInFingerprint, Object valueA, Object valueB) { this.includedInFingerprint = includedInFingerprint; this.valueA = valueA; this.valueB = valueB; } } private static Domain partOfFingerprint(Object valueA, Object valueB) { return new Domain(true, valueA, valueB); } private static Domain notPartOfFingerprint(Object valueB) { return new Domain(false, "//foo", valueB); } @Test public void testFingerprintOfFileTraversal() throws Exception { new FingerprintTester( ImmutableMap.<String, Domain>builder() .put("ownerLabel", notPartOfFingerprint("//bar")) .put("fileToTraverse", partOfFingerprint("foo/file.a", "bar/file.b")) .put("destPath", partOfFingerprint("out1", "out2")) .put("strictFilesetOutput", partOfFingerprint(true, false)) .put("permitDirectories", partOfFingerprint(true, false)) .buildOrThrow()) { @Override FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception { return FilesetTraversalParamsFactory.fileTraversal( label((String) kwArgs.get("ownerLabel")), getSourceArtifact((String) kwArgs.get("fileToTraverse")), PathFragment.create((String) kwArgs.get("destPath")), (Boolean) kwArgs.get("strictFilesetOutput"), (Boolean) kwArgs.get("permitDirectories")); } }.doTest(); } @Test public void testFingerprintOfNestedTraversal() throws Exception { Artifact nested1 = getSourceArtifact("a/b"); Artifact nested2 = getSourceArtifact("a/c"); new FingerprintTester( ImmutableMap.of( "ownerLabel", notPartOfFingerprint("//bar"), "nestedArtifact", partOfFingerprint(nested1, nested2), "destDir", partOfFingerprint("out1", "out2"), "excludes", partOfFingerprint(ImmutableSet.<String>of(), ImmutableSet.of("x")))) { @SuppressWarnings("unchecked") @Override FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception { return FilesetTraversalParamsFactory.nestedTraversal( label((String) kwArgs.get("ownerLabel")), (Artifact) kwArgs.get("nestedArtifact"), PathFragment.create((String) kwArgs.get("destDir")), (Set<String>) kwArgs.get("excludes")); } }.doTest(); } private static class TestWorkspaceNameFunction implements SkyFunction { @Nullable @Override public SkyValue compute(SkyKey skyKey, Environment env) throws SkyFunctionException, InterruptedException { return WorkspaceNameValue.withName("workspace"); } } }
EvaluationContext
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.hudi; import com.facebook.presto.common.type.TypeManager; import com.facebook.presto.hive.HdfsEnvironment; import com.facebook.presto.hive.MetastoreClientConfig; import com.facebook.presto.hive.metastore.ExtendedHiveMetastore; import com.facebook.presto.hive.metastore.InMemoryCachingHiveMetastore; import com.facebook.presto.spi.connector.ConnectorMetadata; import javax.inject.Inject; import static java.util.Objects.requireNonNull; public class HudiMetadataFactory { private final ExtendedHiveMetastore metastore; private final HdfsEnvironment hdfsEnvironment; private final TypeManager typeManager; private final long perTransactionCacheMaximumSize; private final boolean metastoreImpersonationEnabled; private final int [MASK] ; @Inject public HudiMetadataFactory( ExtendedHiveMetastore metastore, HdfsEnvironment hdfsEnvironment, TypeManager typeManager, MetastoreClientConfig metastoreClientConfig) { this.metastore = requireNonNull(metastore, "metastore is null"); this.hdfsEnvironment = requireNonNull(hdfsEnvironment, "hdfsEnvironment is null"); this.typeManager = requireNonNull(typeManager, "typeManager is null"); this.perTransactionCacheMaximumSize = metastoreClientConfig.getPerTransactionMetastoreCacheMaximumSize(); this.metastoreImpersonationEnabled = metastoreClientConfig.isMetastoreImpersonationEnabled(); this. [MASK] = metastoreClientConfig.getPartitionCacheColumnCountLimit(); } public ConnectorMetadata create() { return new HudiMetadata( InMemoryCachingHiveMetastore.memoizeMetastore(metastore, metastoreImpersonationEnabled, perTransactionCacheMaximumSize, [MASK] ), hdfsEnvironment, typeManager); } }
metastorePartitionCacheMaxColumnCount
package com.baeldung.buffered [MASK] ; import org.junit.Test; import java.io.BufferedReader; import java.io.IOException; import java.io.StringReader; import java.nio.file.Files; import java.nio.file.Paths; import static org.junit.Assert.*; public class BufferedReaderUnitTest { private static final String FILE_PATH = "src/main/resources/input.txt"; @Test public void givenBufferedReader_whenSkipUnderscores_thenOk() throws IOException { StringBuilder result = new StringBuilder(); try (BufferedReader [MASK] = new BufferedReader(new StringReader("1__2__3__4__5"))) { int value; while((value = [MASK] .read()) != -1) { result.append((char) value); [MASK] .skip(2L); } } assertEquals("12345", result.toString()); } @Test public void givenBufferedReader_whenSkipsWhitespacesAtBeginning_thenOk() throws IOException { String result; try (BufferedReader [MASK] = new BufferedReader(new StringReader(" Lorem ipsum dolor sit amet."))) { do { [MASK] .mark(1); } while(Character.isWhitespace( [MASK] .read())); [MASK] .reset(); result = [MASK] .readLine(); } assertEquals("Lorem ipsum dolor sit amet.", result); } @Test public void whenCreatesNewBufferedReader_thenOk() throws IOException { try(BufferedReader [MASK] = Files.newBufferedReader(Paths.get(FILE_PATH))) { assertNotNull( [MASK] ); assertTrue( [MASK] .ready()); } } }
reader
/* * Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. * */ // This is a utlitity test class for loading classes-under-test // by means of custom class loader. // See AppCDS/jvmti/transformRelatedClasses/TransformRelatedClasses.java // for an example. // Use this test app in conjunction with other tests // to load and exercise classes using custom class loader(s). // This class is intended to be called by the "main test driver" // inside a child process, normally with sharing enabled. // // Arguments: customJarPath, loaderType, testClass // customJarPath - a path to jar file containing classes for // loading via this custom class loader, including the // testClass // loaderType - Currently only "unregistered" // (Fingerprint verification method) is allowed // testClass - the class to be loader; the test method with // signature 'public static void test()' will be called // on this class, so class must contain such method import java.io.File; import java.lang.reflect.Method; import java.net.URL; import java.net.URLClassLoader; import java.util.logging.Logger; public class CustomLoaderApp { public static void ping() {}; private static void log( [MASK] msg) { System.out.println("CustomLoaderApp: " + msg); } public static void main( [MASK] [] args) throws Exception { [MASK] path = args[0]; URL url = new File(path).toURI().toURL(); URL[] urls = new URL[] {url}; [MASK] loaderType = args[1]; log("loaderType = " + loaderType); for (int i = 2; i < args.length; i++) { [MASK] testClass = args[i]; log("testClass = " + testClass); switch(loaderType) { case "unregistered": loadAndUseWithUnregisteredLoader(urls, testClass); break; default: throw new IllegalArgumentException("loader type is wrong: " + loaderType); } } } // Load the test classes using unregistered loader // (i.e. loader that is not using AppCDS API) private static void loadAndUseWithUnregisteredLoader(URL[] urls, [MASK] testClass) throws Exception { URLClassLoader urlClassLoader = new URLClassLoader(urls); callTestMethod(loadAndCheck(urlClassLoader, testClass)); } private static Class loadAndCheck(ClassLoader loader, [MASK] className) throws ClassNotFoundException { Class c = loader.loadClass(className); log("class = " + c); log("loader = " + c.getClassLoader()); // Check that c is defined by the correct loader if (c.getClassLoader() != loader) { [MASK] msg = [MASK] .format("c.getClassLoader() equals to <%s>, expected <%s>", c.getClassLoader(), loader); throw new RuntimeException(msg); } return c; } private static void callTestMethod(Class c) throws Exception { Method[] methods = c.getDeclaredMethods(); for (Method m : methods) { log("method = " + m.getName()); if (m.getName().equals("test")) m.invoke(null); } } }
String
/* * Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. * */ // This is a utlitity test class for loading classes-under-test // by means of custom class loader. // See AppCDS/jvmti/transformRelatedClasses/TransformRelatedClasses.java // for an example. // Use this test app in conjunction with other tests // to load and exercise classes using custom class loader(s). // This class is intended to be called by the "main test driver" // inside a child process, normally with sharing enabled. // // Arguments: customJarPath, [MASK] , testClass // customJarPath - a path to jar file containing classes for // loading via this custom class loader, including the // testClass // [MASK] - Currently only "unregistered" // (Fingerprint verification method) is allowed // testClass - the class to be loader; the test method with // signature 'public static void test()' will be called // on this class, so class must contain such method import java.io.File; import java.lang.reflect.Method; import java.net.URL; import java.net.URLClassLoader; import java.util.logging.Logger; public class CustomLoaderApp { public static void ping() {}; private static void log(String msg) { System.out.println("CustomLoaderApp: " + msg); } public static void main(String[] args) throws Exception { String path = args[0]; URL url = new File(path).toURI().toURL(); URL[] urls = new URL[] {url}; String [MASK] = args[1]; log(" [MASK] = " + [MASK] ); for (int i = 2; i < args.length; i++) { String testClass = args[i]; log("testClass = " + testClass); switch( [MASK] ) { case "unregistered": loadAndUseWithUnregisteredLoader(urls, testClass); break; default: throw new IllegalArgumentException("loader type is wrong: " + [MASK] ); } } } // Load the test classes using unregistered loader // (i.e. loader that is not using AppCDS API) private static void loadAndUseWithUnregisteredLoader(URL[] urls, String testClass) throws Exception { URLClassLoader urlClassLoader = new URLClassLoader(urls); callTestMethod(loadAndCheck(urlClassLoader, testClass)); } private static Class loadAndCheck(ClassLoader loader, String className) throws ClassNotFoundException { Class c = loader.loadClass(className); log("class = " + c); log("loader = " + c.getClassLoader()); // Check that c is defined by the correct loader if (c.getClassLoader() != loader) { String msg = String.format("c.getClassLoader() equals to <%s>, expected <%s>", c.getClassLoader(), loader); throw new RuntimeException(msg); } return c; } private static void callTestMethod(Class c) throws Exception { Method[] methods = c.getDeclaredMethods(); for (Method m : methods) { log("method = " + m.getName()); if (m.getName().equals("test")) m.invoke(null); } } }
loaderType
/* * Copyright 2002-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.beans.factory.xml; import org.jspecify.annotations.Nullable; import org.w3c.dom.Attr; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.springframework.beans.MutablePropertyValues; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.beans.factory.config.BeanDefinitionHolder; import org.springframework.beans.factory.config.RuntimeBeanReference; import org.springframework.core.Conventions; /** * Simple {@code NamespaceHandler} implementation that maps custom attributes * directly through to bean properties. An important point to note is that this * {@code NamespaceHandler} does not have a corresponding schema since there * is no way to know in advance all possible attribute names. * * <p>An example of the usage of this {@code NamespaceHandler} is shown below: * * <pre class="code"> * &lt;bean id=&quot;rob&quot; class=&quot;..TestBean&quot; p:name=&quot;Rob Harrop&quot; p:spouse-ref=&quot;sally&quot;/&gt;</pre> * * Here the '{@code p:name}' corresponds directly to the '{@code name}' * property on class '{@code TestBean}'. The '{@code p:spouse-ref}' * attributes corresponds to the '{@code spouse}' property and, rather * than being the concrete value, it contains the name of the bean that will * be injected into that property. * * @author Rob Harrop * @author Juergen Hoeller * @since 2.0 */ public class SimplePropertyNamespaceHandler implements NamespaceHandler { private static final [MASK] REF_SUFFIX = "-ref"; @Override public void init() { } @Override public @Nullable BeanDefinition parse(Element element, ParserContext parserContext) { parserContext.getReaderContext().error( "Class [" + getClass().getName() + "] does not support custom elements.", element); return null; } @Override public BeanDefinitionHolder decorate(Node node, BeanDefinitionHolder definition, ParserContext parserContext) { if (node instanceof Attr attr) { [MASK] propertyName = parserContext.getDelegate().getLocalName(attr); [MASK] propertyValue = attr.getValue(); MutablePropertyValues pvs = definition.getBeanDefinition().getPropertyValues(); if (pvs.contains(propertyName)) { parserContext.getReaderContext().error("Property '" + propertyName + "' is already defined using " + "both <property> and inline syntax. Only one approach may be used per property.", attr); } if (propertyName.endsWith(REF_SUFFIX)) { propertyName = propertyName.substring(0, propertyName.length() - REF_SUFFIX.length()); pvs.add(Conventions.attributeNameToPropertyName(propertyName), new RuntimeBeanReference(propertyValue)); } else { pvs.add(Conventions.attributeNameToPropertyName(propertyName), propertyValue); } } return definition; } }
String
/* ### * IP: GHIDRA * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ghidra.util.datastruct; import java.io.Serializable; import java.util.Arrays; /** * The BitTree class maintains a set of ordered keys between the values of * 0 and N. It can quickly (O(log(n))) add keys, remove keys, find the next key * greater than some value , and find the prev key less than some value. It can * determine if a key is in the set in O(1) time. This implementation has been * limited to short keys so that it can implement the ShortKeySet interface. */ public class BitTree implements ShortKeySet, Serializable { private final static long serialVersionUID = 1; private int size; // The maximum number of keys in the set. Keys range from 0 to size-1 private int power2; // The next power of 2 that is greater than size. private int[] bits; // Array of bits used to represent a tree of binary values. A bit at // position N will have a left child at 2*N and a right child at 2*N+1. // Its parent position will be at N/2. // A bit is on if any bits in its subtree are on. Leaf bits correspond // directly to keys and are on if the key is in the set. private int numKeys; // The current number of keys in the set. // masks for seting and clearing bits within an 32 bit integer. private static final int[] setMask = { 0x00000001,0x00000002,0x00000004,0x00000008, 0x00000010,0x00000020,0x00000040,0x00000080, 0x00000100,0x00000200,0x00000400,0x00000800, 0x00001000,0x00002000,0x00004000,0x00008000, 0x00010000,0x00020000,0x00040000,0x00080000, 0x00100000,0x00200000,0x00400000,0x00800000, 0x01000000,0x02000000,0x04000000,0x08000000, 0x10000000,0x20000000,0x40000000,0x80000000 }; private static final int[] clearMask={ 0xfffffffe,0xfffffffd,0xfffffffb,0xfffffff7, 0xffffffef,0xffffffdf,0xffffffbf,0xffffff7f, 0xfffffeff,0xfffffdff,0xfffffbff,0xfffff7ff, 0xffffefff,0xffffdfff,0xffffbfff,0xffff7fff, 0xfffeffff,0xfffdffff,0xfffbffff,0xfff7ffff, 0xffefffff,0xffdfffff,0xffbfffff,0xff7fffff, 0xfeffffff,0xfdffffff,0xfbffffff,0xf7ffffff, 0xefffffff,0xdfffffff,0xbfffffff,0x7fffffff }; /** * The BitTree constructor takes the maximum key value. The legal * keys for this set range from 0 to maxKey. * @param maxKey the maximum key that will ever be put into this BitTree. */ public BitTree(short maxKey) { this(maxKey,false); } /** * The BitTree constructor takes the maximum key value. The legal * keys for this set range from 0 to maxKey. * @param maxKey the maximum key value. * @param isFull if true, then the set is initilized to contain all legal keys. */ public BitTree(short maxKey, boolean isFull) { this.size = maxKey+1; // find the next power of 2 greater than or equal to n. power2 = 2; int sz = maxKey+1; while (sz > 1) { sz /= 2; power2 *= 2; } // The number of bits need to store the tree is 2 times the number of keys. // Since we are storing the bits in 32 bit integers we need N/16 integers // to store the bits. int nInts = power2/16; // must have at least 1 if (nInts < 1) { nInts = 1; } bits = new int[nInts]; if (isFull) { Arrays.fill(bits,0xffffffff); numKeys = this.size; } } /** * Removes all keys from the set. */ public void removeAll() { Arrays.fill(bits,0); numKeys = 0; } /** * Returns the number of keys currently in the set. */ public int size() { return numKeys; } /** * Adds a key to the set. * @param key to be added. * @exception IndexOutOfBoundsException if the given key is not * in the range [0, size-1]. */ public void put(short key) { if ((key < 0) || (key >= size)) { throw new IndexOutOfBoundsException(); } // The first "power2" number of bits are used for internal tree nodes. The // leaf nodes start at index "power2". int nodeIndex = power2+key; // set the leaf bit on to indicate that the key is in the set. // if the bit is already on (The key is already in the set), then just return. if (!setBit(nodeIndex)) { return; } // increment the number of keys in the set. numKeys++; // go up the tree setting each parent bit to "on" while(nodeIndex != 1) { // compute parent index. nodeIndex /= 2; // if any parent bit is already on, then all its parents are already on, // so were done. if (!setBit(nodeIndex)) { return; } } } /** * Removes the key from the set. * @param key The key to remove. * @exception IndexOutOfBoundsException if the given key is not * in the range [0, size-1]. */ public boolean remove(short key) { if ((key < 0) || (key >= size)) { throw new IndexOutOfBoundsException(); } // compute the leaf node index. int nodeIndex = power2+key; // clear the leaf bit to indicate that the key is not in the set. // if it is already "off", then we don't have to do anything if (!clearBit(nodeIndex)) { return false; } // decrement the number of keys in the set numKeys--; // traverse up the tree, clearing any parent nodes if all its child // nodes are "off". while(nodeIndex != 1) { nodeIndex /= 2; if (! [MASK] (nodeIndex)) { return true; } if ( [MASK] (nodeIndex*2) || [MASK] (nodeIndex*2+1)) { return true; } clearBit(nodeIndex); } return true; } /** * Determines if a given key is in the set. * @param key the key to check if it is in this set. * @return true if the key is in the set. */ public boolean containsKey(short key) { if ((key < 0) || (key >= size)) { return false; } return [MASK] (power2+key); } /** * finds the next key that is in the set that is greater than the given key. * @param key from which to search forward. * @return the next key greater than the given key or -1 if there is no key * greater than the given key. * @exception IndexOutOfBoundsException if the given key is not * in the range [0, size-1]. */ public short getNext(short key) { if ((key < 0) || (key >= size)) { throw new IndexOutOfBoundsException(); } // compute leaf node. int nodeIndex = key + power2; // while we are not at the root, search upward until we find a right // sibling that is "on". while(nodeIndex != 1) { // see if we are odd (i.e. the right child) int odd = nodeIndex % 2; // if we are the left child see if my sibling on the right is on. // if so, then the next key must be in that subtree. if (odd == 0) { if ( [MASK] (nodeIndex+1)) { // we found a right sibling that is "on", set nodeIndex to // that node. nodeIndex++; break; } } nodeIndex = nodeIndex/2; } // if we made it all the way up to the root node, then there is no key // greater than, so return -1; if (nodeIndex == 1) { return (short)-1; } // now that we found a right sibling that is "on", // follow the leftmost trail of "on" bits to an "on" leaf bit. This bit // represents the next key in the set. while (nodeIndex < power2) { nodeIndex *= 2; // if the left child is not on, then the right child must be "on". if (! [MASK] (nodeIndex)) { nodeIndex++; } } short nextKey = (short)(nodeIndex-power2); if (nextKey >= size) { nextKey = -1; } return nextKey; } /** * Finds the next key that is in the set that is less than the given key. * @param key the key to search before. * @return the next key less than the given key or -1 if there is no key * less than the given key. * @exception IndexOutOfBoundsException if the given key is not * in the range [0, size-1]. */ public short getPrevious(short key) { if ((key < 0) || (key >= size)) { throw new IndexOutOfBoundsException(); } // find the leaf node for the given key. int nodeIndex = key + power2; // while we are not at the root, search upward until we find a left // sibling that is "on". while(nodeIndex != 1) { // check if we are a right node. int odd = nodeIndex % 2; // if we are the right child see if my sibling on the left is "on". // if so, then the previous key must be in that subtree. if (odd == 1) { if ( [MASK] (nodeIndex-1)) { nodeIndex--; break; } } nodeIndex = nodeIndex/2; } // If we went all the way to the root then there is no previous key, return -1. if (nodeIndex == 1) { return (short)-1; } // follow the rightmost trail of "on" bits to an "on" leaf bit. This bit // represents the previous key in the set. while (nodeIndex < power2) { nodeIndex *= 2; if ( [MASK] (nodeIndex+1)) { nodeIndex++; } } return (short)(nodeIndex-power2); } /** * Checks if the set is empty. * @return true if the set is empty. */ public boolean isEmpty() { return numKeys == 0; } /** * Returns the first (lowest) key in the set. */ public short getFirst() { // if the 0 key is in the set, then return it. if(containsKey((short)0)) { return (short)0; } // otherwise return the next key after 0. return getNext((short)0); } /** * Returns the last (highest) key in the set. */ public short getLast() { // if the highest possible key is in the set, return it. if(containsKey((short)(size-1))) { return (short)(size-1); } // otherwise return the next lowest key. return getPrevious((short)(size-1)); } /** * Sets the nth bit on. */ private boolean setBit(int n) { int intIndex = n >> 5; int maskIndex = n & 0x1f; int old = bits[intIndex]; return ((bits[intIndex] |= setMask[maskIndex]) != old); } /** * Sets the nth bit to off. */ private boolean clearBit(int n) { int intIndex = n >> 5; int maskIndex = n & 0x1f; int old = bits[intIndex]; return ((bits[intIndex] &= clearMask[maskIndex]) != old); } /** * Tests if the nth bit is on. */ private boolean [MASK] (int n) { int intIndex = n >> 5; int maskIndex = n & 0x1f; return ((bits[intIndex] & setMask[maskIndex]) != 0); } }
isBitSet
/* * DBeaver - Universal Database Manager * Copyright (C) 2010-2024 DBeaver Corp and others * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jkiss.dbeaver.model.sql; /** * SQL editor constants */ public class SQLConstants { public static final String NULL_VALUE = "NULL"; public static final String STR_QUOTE_SINGLE = "'"; public static final String STR_QUOTE_DOUBLE = "\""; public static final String[][] DOUBLE_QUOTE_STRINGS = {{ STR_QUOTE_DOUBLE, STR_QUOTE_DOUBLE }}; public static final String DEFAULT_STATEMENT_DELIMITER = ";"; public static final String[] DEFAULT_SCRIPT_DELIMITER = { DEFAULT_STATEMENT_DELIMITER }; public static final String STR_QUOTE_APOS = "`"; public static final String ML_COMMENT_START = "/*"; public static final String ML_COMMENT_END = "*/"; public static final String SL_COMMENT = "--"; public static final String ASTERISK = "*"; public static final String QUESTION = "?"; public static final String DOT = "."; public static final String KEYWORD_SELECT = "SELECT"; public static final String KEYWORD_INSERT = "INSERT"; public static final String KEYWORD_UPDATE = "UPDATE"; public static final String KEYWORD_DELETE = "DELETE"; public static final String KEYWORD_MERGE = "MERGE"; public static final String KEYWORD_UPSERT = "UPSERT"; public static final String KEYWORD_TRUNCATE = "TRUNCATE"; public static final String KEYWORD_FROM = "FROM"; public static final String KEYWORD_INTO = "INTO"; public static final String KEYWORD_JOIN = "JOIN"; public static final String KEYWORD_CROSS_JOIN = "CROSS JOIN"; public static final String KEYWORD_NATURAL_JOIN = "NATURAL JOIN"; public static final String KEYWORD_WHERE = "WHERE"; public static final String KEYWORD_SET = "SET"; public static final String KEYWORD_ON = "ON"; public static final String KEYWORD_AND = "AND"; public static final String KEYWORD_OR = "OR"; public static final String KEYWORD_BETWEEN = "BETWEEN"; public static final String KEYWORD_IS = "IS"; public static final String KEYWORD_NOT = "NOT"; public static final String KEYWORD_NULL = "NULL"; public static final String KEYWORD_IN = "IN"; public static final String KEYWORD_VALUES = "VALUES"; public static final String KEYWORD_ORDER_BY = "ORDER BY"; public static final String KEYWORD_GROUP_BY = "GROUP BY"; public static final String KEYWORD_HAVING = "HAVING"; public static final String KEYWORD_LIKE = "LIKE"; public static final String KEYWORD_ILIKE = "ILIKE"; public static final String KEYWORD_FUNCTION = "FUNCTION"; public static final String KEYWORD_PROCEDURE = "PROCEDURE"; public static final String KEYWORD_COMMIT = "COMMIT"; public static final String KEYWORD_ROLLBACK = "ROLLBACK"; public static final String KEYWORD_EXPLAIN = "EXPLAIN"; public static final String KEYWORD_CASE = "CASE"; public static final String KEYWORD_QUALIFY = "QUALIFY"; public static final String KEYWORD_AS = "AS"; public static final String KEYWORD_USING = "USING"; public static final String DATA_TYPE_VARCHAR = "varchar"; public static final String DATA_TYPE_BIGINT = "BIGINT"; public static final String [MASK] = "BINARY"; public static final String DATA_TYPE_BOOLEAN = "BOOLEAN"; public static final String DATA_TYPE_DOUBLE = "DOUBLE"; public static final String DATA_TYPE_FLOAT = "FLOAT"; public static final String DATA_TYPE_INT = "INT"; public static final String DATA_TYPE_SMALLINT = "SMALLINT"; public static final String DATA_TYPE_STRING = "STRING"; public static final String DATA_TYPE_TINYINT = "TINYINT"; public static final String[] QUERY_KEYWORDS = { KEYWORD_SELECT, KEYWORD_INSERT, KEYWORD_UPDATE, KEYWORD_DELETE, KEYWORD_MERGE, KEYWORD_UPSERT, KEYWORD_TRUNCATE }; public static final String[] TABLE_KEYWORDS = { KEYWORD_FROM, KEYWORD_INSERT, KEYWORD_UPDATE, KEYWORD_DELETE, KEYWORD_INTO, "TABLE", "VIEW", KEYWORD_JOIN, KEYWORD_TRUNCATE, KEYWORD_MERGE, }; public static final String[] COLUMN_KEYWORDS = { KEYWORD_SELECT, KEYWORD_WHERE, KEYWORD_SET, KEYWORD_ON, KEYWORD_AND, KEYWORD_OR, "BY", "HAVING" }; public static final String[] DDL_KEYWORDS = { "CREATE", "ALTER", "DROP", }; public static final String[] SQL2003_RESERVED_KEYWORDS = { "ALL", "ALLOCATE", "ALTER", KEYWORD_AND, "ANY", "ARE", "ARRAY", "AS", "ASENSITIVE", "ASYMMETRIC", "AT", "ATOMIC", "AUTHORIZATION", "BEGIN", KEYWORD_BETWEEN, //"BIGINT", [MASK] , "BOTH", "BY", "CALL", "CALLED", "CARDINALITY", "CASCADE", "CASCADED", KEYWORD_CASE, "CAST", "CEIL", "CEILING", "CHARACTER", "CHECK", "CLOSE", "COALESCE", "COLLATE", "COLLECT", "COLUMN", KEYWORD_COMMIT, "CONDITION", "CONNECT", "CONSTRAINT", "CONVERT", "CORR", "CORRESPONDING", "COVAR_POP", "COVAR_SAMP", "CREATE", "CROSS", "CUBE", "CUME_DIST", "CURRENT", "CURSOR", "CYCLE", "DAY", "DEALLOCATE", "DEC", "DECLARE", "DEFAULT", KEYWORD_DELETE, "DENSE_RANK", "DEREF", "DESCRIBE", "DETERMINISTIC", "DISCONNECT", "DISTINCT", "DROP", "DYNAMIC", "EACH", "ELEMENT", "ELSE", "END", "END-EXEC", "ESCAPE", "EVERY", "EXCEPT", "EXEC", "EXECUTE", "EXISTS", "EXP", "EXTERNAL", "EXTRACT", "FALSE", "FETCH", "FILTER", "FOR", "FOREIGN", "FREE", "FROM", "FULL", KEYWORD_FUNCTION, "FUSION", "GET", "GLOBAL", "GRANT", "GROUP", "GROUPING", "HAVING", "HOLD", "HOUR", "IDENTITY", "IF", KEYWORD_IN, "INDEX", "INDICATOR", "INNER", "INOUT", "INSENSITIVE", KEYWORD_INSERT, "INTERSECT", "INTERSECTION", "INTERVAL", "INTO", KEYWORD_IS, "JOIN", "LANGUAGE", "LARGE", "LATERAL", "LEADING", "LEFT", "LIKE", "LN", "LOCAL", "MATCH", "MEMBER", KEYWORD_MERGE, "METHOD", "MINUTE", "MOD", "MODIFIES", // "MODULE", // too common for column names "MONTH", "MULTISET", "NATIONAL", "NATURAL", //"NCHAR", //"NCLOB", "NEW", "NO", "NONE", "NORMALIZE", KEYWORD_NOT, KEYWORD_NULL, "NULLIF", "NUMERIC", "OF", "OLD", KEYWORD_ON, "ONLY", "OPEN", "OR", "ORDER", "OUT", "OUTER", "OVER", "OVERLAPS", "OVERLAY", "PARAMETER", "PARTITION", "POSITION", "PRECISION", "PREPARE", "PRIMARY", KEYWORD_PROCEDURE, "RANGE", "RANK", "READS", "REAL", "RECURSIVE", "REF", "REFERENCES", "REFERENCING", "RELEASE", "RENAME", "RESULT", "RETURN", "RETURNS", "REVOKE", "RIGHT", KEYWORD_ROLLBACK, "ROLLUP", "ROW", "ROW_NUMBER", "ROWS", "SAVEPOINT", "SCOPE", "SCROLL", "SEARCH", "SECOND", KEYWORD_SELECT, "SENSITIVE", "SESSION_USER", KEYWORD_SET, "SIMILAR", "SMALLINT", "SOME", "SPECIFIC", "SPECIFICTYPE", "SQL", "SQLEXCEPTION", "SQLSTATE", "SQLWARNING", "START", "STATIC", // "STDDEV_POP", // "STDDEV_SAMP", "SUBMULTISET", "SYMMETRIC", "SYSTEM", "SYSTEM_USER", "TABLE", "TABLESAMPLE", "THEN", "TIMEZONE_HOUR", "TIMEZONE_MINUTE", "TO", "TRAILING", "TRANSLATE", "TRANSLATION", "TREAT", "TRIGGER", "TRUE", "UNION", "UNIQUE", "UNKNOWN", "UNNEST", KEYWORD_UPDATE, "USER", "USING", //"VALUE", // too common for column names KEYWORD_VALUES, // "VAR_POP", // "VAR_SAMP", //"VARCHAR", "VARYING", "WHEN", "WHENEVER", KEYWORD_WHERE, "WIDTH_BUCKET", "WINDOW", "WITH", "WITHIN", "WITHOUT", "YEAR", "NULLS", "FIRST", "LAST", "FOLLOWING", "PRECEDING", "UNBOUNDED", "LENGTH", "KEY", "LEVEL", "VIEW", "SEQUENCE", "SCHEMA", "ROLE", "RESTRICT", "ASC", "DESC", // Not actually standard but widely used "LIMIT", // Extended keywords // "A", "ABSOLUTE", "ACTION", // "ADA", "ADD", // "ADMIN", "AFTER", "ALWAYS", // "ASC", "ASSERTION", "ASSIGNMENT", "ATTRIBUTE", "ATTRIBUTES", "BEFORE", // "BERNOULLI", // "BREADTH", // "C", "CASCADE", "CATALOG", // "CATALOG_NAME", "CHAIN", // "CHARACTER_SET_CATALOG", // "CHARACTER_SET_NAME", // "CHARACTER_SET_SCHEMA", "CHARACTERISTICS", "CHARACTERS", // "CLASS_ORIGIN", // "COBOL", "COLLATION", // "COLLATION_CATALOG", // "COLLATION_NAME", // "COLLATION_SCHEMA", // "COLUMN_NAME", // "COMMAND_FUNCTION", // "COMMAND_FUNCTION_CODE", "COMMITTED", // "CONDITION_NUMBER", "CONNECTION", // "CONNECTION_NAME", // "CONSTRAINT_CATALOG", // "CONSTRAINT_NAME", // "CONSTRAINT_SCHEMA", "CONSTRAINTS", "CONSTRUCTOR", "CONTAINS", "CONTINUE", "CURSOR_NAME", "DATA", // "DATETIME_INTERVAL_CODE", // "DATETIME_INTERVAL_PRECISION", "DEFAULTS", "DEFERRABLE", "DEFERRED", "DEFINED", "DEFINER", "DEGREE", "DEPTH", "DERIVED", // "DESC", "DESCRIPTOR", "DIAGNOSTICS", "DISPATCH", "DOMAIN", // "DYNAMIC_FUNCTION", // "DYNAMIC_FUNCTION_CODE", "EQUALS", "EXCEPTION", "EXCLUDE", "EXCLUDING", "FINAL", "FIRST", // "FORTRAN", "FOUND", // "G", "GENERAL", "GENERATED", "GO", "GOTO", "GRANTED", "HIERARCHY", "IMMEDIATE", "IMPLEMENTATION", "INCLUDING", "INCREMENT", "INITIALLY", "INPUT", "INSTANCE", "INSTANTIABLE", "INVOKER", "ISOLATION", // "K", // "KEY_MEMBER", "KEY_TYPE", "LAST", "LOCATOR", // "M", "MAP", "MATCHED", "MAXVALUE", // "MESSAGE_LENGTH", // "MESSAGE_OCTET_LENGTH", // "MESSAGE_TEXT", "MINVALUE", "MORE", "MUMPS", // "NAME", // "NAMES", "NESTING", "NEXT", "NORMALIZED", // "NULLABLE", // "NULLS", // "NUMBER", "OBJECT", "OCTETS", "OPTION", "OPTIONS", "ORDERING", "ORDINALITY", "OTHERS", "OUTPUT", "OVERRIDING", "PAD", // "PARAMETER_MODE", // "PARAMETER_NAME", // "PARAMETER_ORDINAL_POSITION", // "PARAMETER_SPECIFIC_CATALOG", // "PARAMETER_SPECIFIC_NAME", // "PARAMETER_SPECIFIC_SCHEMA", "PARTIAL", // "PASCAL", "PATH", "PLACING", // "PLI", "PRESERVE", "PRIOR", "PRIVILEGES", // "PUBLIC", "READ", "RELATIVE", "REPEATABLE", "RESTART", // "RETURNED_CARDINALITY", // "RETURNED_LENGTH", // "RETURNED_OCTET_LENGTH", // "RETURNED_SQLSTATE", "ROUTINE", // "ROUTINE_CATALOG", // "ROUTINE_NAME", // "ROUTINE_SCHEMA", // "ROW_COUNT", "SCALE", // "SCHEMA_NAME", // "SCOPE_CATALOG", // "SCOPE_NAME", // "SCOPE_SCHEMA", "SECTION", "SECURITY", "SELF", "SERIALIZABLE", // "SERVER_NAME", "SESSION", "SETS", // "SIMPLE", "SIZE", "SOURCE", "SPACE", // "SPECIFIC_NAME", // "STATE", // too common for column names "STATEMENT", "STRUCTURE", "STYLE", // "SUBCLASS_ORIGIN", // "TABLE_NAME", "TEMPORARY", "TIES", // "TOP_LEVEL_COUNT", "TRANSACTION", // "TRANSACTION_ACTIVE", // "TRANSACTIONS_COMMITTED", // "TRANSACTIONS_ROLLED_BACK", "TRANSFORM", "TRANSFORMS", // "TRIGGER_CATALOG", // "TRIGGER_NAME", // "TRIGGER_SCHEMA", "TYPE", "UNCOMMITTED", "UNDER", "UNNAMED", "USAGE", // "USER_DEFINED_TYPE_CATALOG", // "USER_DEFINED_TYPE_CODE", // "USER_DEFINED_TYPE_NAME", // "USER_DEFINED_TYPE_SCHEMA", "WORK", "WRITE", "ZONE", KEYWORD_QUALIFY }; public static final String[] SQL2003_FUNCTIONS = { "ABS", "AVG", "CHAR_LENGTH", "CHARACTER_LENGTH", "COUNT", "LOCALTIME", "LOCALTIMESTAMP", // "CURRENT_DATE", // "CURRENT_DEFAULT_TRANSFORM_GROUP", // "CURRENT_PATH", // "CURRENT_ROLE", // "CURRENT_TIME", // "CURRENT_TIMESTAMP", // "CURRENT_TRANSFORM_GROUP_FOR_TYPE", // "CURRENT_USER", "FLOOR", "LOWER", "MAX", "MIN", "OCTET_LENGTH", "PERCENT_RANK", "PERCENTILE_CONT", "PERCENTILE_DISC", "POWER", "REGR_AVGX", "REGR_AVGY", "REGR_COUNT", "REGR_INTERCEPT", "REGR_R2", "REGR_SLOPE", "REGR_SXX", "REGR_SXY", "REGR_SYY", "SQRT", "SUBSTRING", "SUM", "TRIM", "UESCAPE", "UPPER", }; public static final String[] SQL_EX_KEYWORDS = { "CHANGE", "MODIFY", }; public static final String[] DEFAULT_TYPES = { DATA_TYPE_BOOLEAN, "CHAR", "VARCHAR", [MASK] , "VARBINARY", DATA_TYPE_INT, "INTEGER", DATA_TYPE_SMALLINT, DATA_TYPE_BIGINT, "NUMBER", "NUMERIC", "DECIMAL", DATA_TYPE_FLOAT, DATA_TYPE_DOUBLE, "DATE", "TIME", "TIMESTAMP", "CLOB", "BLOB", }; public static final String BLOCK_BEGIN = "BEGIN"; public static final String BLOCK_END = "END"; /** * Pseudo variables - these are not dynamic parameters */ public static final String[] PSEUDO_VARIABLES = { ":NEW", ":OLD", }; public static final char STRUCT_SEPARATOR = '.'; //$NON-NLS-1$ public static final String CONFIG_COLOR_KEYWORD = "org.jkiss.dbeaver.sql.editor.color.keyword.foreground"; public static final String CONFIG_COLOR_DATATYPE = "org.jkiss.dbeaver.sql.editor.color.datatype.foreground"; public static final String CONFIG_COLOR_FUNCTION = "org.jkiss.dbeaver.sql.editor.color.function.foreground"; public static final String CONFIG_COLOR_STRING = "org.jkiss.dbeaver.sql.editor.color.string.foreground"; public static final String CONFIG_COLOR_TABLE = "org.jkiss.dbeaver.sql.editor.color.table.foreground"; public static final String CONFIG_COLOR_TABLE_ALIAS = "org.jkiss.dbeaver.sql.editor.color.table.alias.foreground"; public static final String CONFIG_COLOR_COLUMN = "org.jkiss.dbeaver.sql.editor.color.column.foreground"; public static final String CONFIG_COLOR_COLUMN_DERIVED = "org.jkiss.dbeaver.sql.editor.color.column.derived.foreground"; public static final String CONFIG_COLOR_SCHEMA = "org.jkiss.dbeaver.sql.editor.color.schema.foreground"; public static final String CONFIG_COLOR_COMPOSITE_FIELD = "org.jkiss.dbeaver.sql.editor.color.composite.field.foreground"; public static final String CONFIG_COLOR_SQL_VARIABLE = "org.jkiss.dbeaver.sql.editor.color.sqlVariable.foreground"; public static final String CONFIG_COLOR_SEMANTIC_ERROR = "org.jkiss.dbeaver.sql.editor.color.semanticError.foreground"; public static final String CONFIG_COLOR_NUMBER = "org.jkiss.dbeaver.sql.editor.color.number.foreground"; public static final String CONFIG_COLOR_COMMENT = "org.jkiss.dbeaver.sql.editor.color.comment.foreground"; public static final String CONFIG_COLOR_DELIMITER = "org.jkiss.dbeaver.sql.editor.color.delimiter.foreground"; public static final String CONFIG_COLOR_PARAMETER = "org.jkiss.dbeaver.sql.editor.color.parameter.foreground"; public static final String CONFIG_COLOR_COMMAND = "org.jkiss.dbeaver.sql.editor.color.command.foreground"; public static final String CONFIG_COLOR_TEXT = "org.jkiss.dbeaver.sql.editor.color.text.foreground"; public static final String CONFIG_COLOR_BACKGROUND = "org.jkiss.dbeaver.sql.editor.color.text.background"; public static final String CONFIG_COLOR_DISABLED = "org.jkiss.dbeaver.sql.editor.color.disabled.background"; public static final char DEFAULT_PARAMETER_MARK = '?'; public static final char DEFAULT_PARAMETER_PREFIX = ':'; public static final String DEFAULT_IDENTIFIER_QUOTE = "\""; public static final String DEFAULT_LIKE_ESCAPE = "\\"; public static final String KEYWORD_PATTERN_CHARS = "\\*\\"; public static final String DEFAULT_CONTROL_COMMAND_PREFIX = "@"; public final static char[] BRACKETS = {'{', '}', '(', ')', '[', ']', '<', '>'}; public static final String COLUMN_ASTERISK = "*"; }
DATA_TYPE_BINARY
package com.taobao.arthas.grpcweb.grpc.server.httpServer; import io.netty.channel.ChannelInitializer; import io.netty.channel.ChannelPipeline; import io.netty.channel.socket.SocketChannel; import io.netty. [MASK] .codec.http.HttpObjectAggregator; import io.netty. [MASK] .codec.http.HttpServerCodec; import io.netty. [MASK] .stream.ChunkedWriteHandler; public class NettyHttpInitializer extends ChannelInitializer<SocketChannel> { private final String STATIC_LOCATION; public NettyHttpInitializer(String staticLocation) { this.STATIC_LOCATION = staticLocation; } @Override public void initChannel(SocketChannel ch) throws Exception { ChannelPipeline pipeline = ch.pipeline(); //将请求和应答消息编码或解码为HTTP消息 pipeline.addLast(new HttpServerCodec()); //将HTTP消息的多个部分组合成一条完整的HTTP消息 pipeline.addLast(new HttpObjectAggregator(64 * 1024)); pipeline.addLast(new ChunkedWriteHandler()); pipeline.addLast(new NettyHttpStaticFileHandler(this.STATIC_LOCATION)); } }
handler
package com.alibaba. [MASK] .bvt.sql.mysql.createTable; import com.alibaba. [MASK] .sql.MysqlTest; import com.alibaba. [MASK] .sql.ast.SQLStatement; import com.alibaba. [MASK] .sql.dialect.mysql.ast.statement.MySqlCreateTableStatement; import com.alibaba. [MASK] .sql.dialect.mysql.parser.MySqlStatementParser; import java.util.List; public class MySqlCreateTableTest139 extends MysqlTest { public void test_0() throws Exception { String sql = " CREATE TABLE IF NOT EXISTS simiao_alter_partition2 (id int, id2 int, name varchar(30), time timestamp NOT NULL, PRIMARY KEY (id, time), KEY idx_id_time USING BTREE (id, time)) PARTITION BY RANGE (UNIX_TIMESTAMP(time))( PARTITION p0 VALUES LESS THAN (UNIX_TIMESTAMP('2013-01-01 00:00:00')), PARTITION p1 VALUES LESS THAN (UNIX_TIMESTAMP('2013-02-01 00:00:00')), PARTITION p2 VALUES LESS THAN (UNIX_TIMESTAMP('2013-03-01 00:00:00')), PARTITION p3 VALUES LESS THAN (UNIX_TIMESTAMP('2013-04-01 00:00:00')), PARTITION p4 VALUES LESS THAN (UNIX_TIMESTAMP('2013-05-01 00:00:00')), PARTITION p5 VALUES LESS THAN (UNIX_TIMESTAMP('2013-06-01 00:00:00')), PARTITION p6 VALUES LESS THAN (UNIX_TIMESTAMP('2013-07-01 00:00:00')), PARTITION p7 VALUES LESS THAN (UNIX_TIMESTAMP('2013-08-01 00:00:00')), PARTITION p8 VALUES LESS THAN (UNIX_TIMESTAMP('2013-09-01 00:00:00')), PARTITION p10 VALUES LESS THAN (UNIX_TIMESTAMP('2013-10-01 00:00:00')), PARTITION p11 VALUES LESS THAN (UNIX_TIMESTAMP('2013-11-01 00:00:00')), PARTITION p12 VALUES LESS THAN (UNIX_TIMESTAMP('2013-12-01 00:00:00')), PARTITION p13 VALUES LESS THAN (MAXVALUE) ) dbpartition by hash(id) dbpartitions 4;"; MySqlStatementParser parser = new MySqlStatementParser(sql); List<SQLStatement> statementList = parser.parseStatementList(); MySqlCreateTableStatement stmt = (MySqlCreateTableStatement) statementList.get(0); assertEquals(1, statementList.size()); assertEquals("CREATE TABLE IF NOT EXISTS simiao_alter_partition2 (\n" + "\tid int,\n" + "\tid2 int,\n" + "\tname varchar(30),\n" + "\ttime timestamp NOT NULL,\n" + "\tPRIMARY KEY (id, time),\n" + "\tKEY idx_id_time USING BTREE (id, time)\n" + ")\n" + "PARTITION BY RANGE (UNIX_TIMESTAMP(time)) (\n" + "\tPARTITION p0 VALUES LESS THAN (UNIX_TIMESTAMP('2013-01-01 00:00:00')),\n" + "\tPARTITION p1 VALUES LESS THAN (UNIX_TIMESTAMP('2013-02-01 00:00:00')),\n" + "\tPARTITION p2 VALUES LESS THAN (UNIX_TIMESTAMP('2013-03-01 00:00:00')),\n" + "\tPARTITION p3 VALUES LESS THAN (UNIX_TIMESTAMP('2013-04-01 00:00:00')),\n" + "\tPARTITION p4 VALUES LESS THAN (UNIX_TIMESTAMP('2013-05-01 00:00:00')),\n" + "\tPARTITION p5 VALUES LESS THAN (UNIX_TIMESTAMP('2013-06-01 00:00:00')),\n" + "\tPARTITION p6 VALUES LESS THAN (UNIX_TIMESTAMP('2013-07-01 00:00:00')),\n" + "\tPARTITION p7 VALUES LESS THAN (UNIX_TIMESTAMP('2013-08-01 00:00:00')),\n" + "\tPARTITION p8 VALUES LESS THAN (UNIX_TIMESTAMP('2013-09-01 00:00:00')),\n" + "\tPARTITION p10 VALUES LESS THAN (UNIX_TIMESTAMP('2013-10-01 00:00:00')),\n" + "\tPARTITION p11 VALUES LESS THAN (UNIX_TIMESTAMP('2013-11-01 00:00:00')),\n" + "\tPARTITION p12 VALUES LESS THAN (UNIX_TIMESTAMP('2013-12-01 00:00:00')),\n" + "\tPARTITION p13 VALUES LESS THAN MAXVALUE\n" + ")\n" + "DBPARTITION BY hash(id) DBPARTITIONS 4;", stmt.toString()); assertEquals("create table if not exists simiao_alter_partition2 (\n" + "\tid int,\n" + "\tid2 int,\n" + "\tname varchar(30),\n" + "\ttime timestamp not null,\n" + "\tprimary key (id, time),\n" + "\tkey idx_id_time using BTREE (id, time)\n" + ")\n" + "partition by range (UNIX_TIMESTAMP(time)) (\n" + "\tpartition p0 values less than (UNIX_TIMESTAMP('2013-01-01 00:00:00')),\n" + "\tpartition p1 values less than (UNIX_TIMESTAMP('2013-02-01 00:00:00')),\n" + "\tpartition p2 values less than (UNIX_TIMESTAMP('2013-03-01 00:00:00')),\n" + "\tpartition p3 values less than (UNIX_TIMESTAMP('2013-04-01 00:00:00')),\n" + "\tpartition p4 values less than (UNIX_TIMESTAMP('2013-05-01 00:00:00')),\n" + "\tpartition p5 values less than (UNIX_TIMESTAMP('2013-06-01 00:00:00')),\n" + "\tpartition p6 values less than (UNIX_TIMESTAMP('2013-07-01 00:00:00')),\n" + "\tpartition p7 values less than (UNIX_TIMESTAMP('2013-08-01 00:00:00')),\n" + "\tpartition p8 values less than (UNIX_TIMESTAMP('2013-09-01 00:00:00')),\n" + "\tpartition p10 values less than (UNIX_TIMESTAMP('2013-10-01 00:00:00')),\n" + "\tpartition p11 values less than (UNIX_TIMESTAMP('2013-11-01 00:00:00')),\n" + "\tpartition p12 values less than (UNIX_TIMESTAMP('2013-12-01 00:00:00')),\n" + "\tpartition p13 values less than maxvalue\n" + ")\n" + "dbpartition by hash(id) dbpartitions 4;", stmt.toLowerCaseString()); } }
druid
/* * Copyright 2012-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.loader.net.protocol.jar; import java.io.File; import java.net.MalformedURLException; import java.net.URL; import java.util.jar.JarEntry; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.io.TempDir; import org.springframework.boot.loader.net.util.UrlDecoder; import static org.assertj.core.api.Assertions.assertThat; /** * Tests for {@link JarUrl}. * * @author Phillip Webb */ class JarUrlTests { @TempDir File temp; File [MASK] ; String [MASK] UrlPath; @BeforeEach void setup() throws MalformedURLException { this. [MASK] = new File(this.temp, "my.jar"); this. [MASK] UrlPath = this. [MASK] .toURI().toURL().toString().substring("file:".length()).replace("!", "%21"); } @Test void createWithFileReturnsUrl() { URL url = JarUrl.create(this. [MASK] ); assertThat(url).hasToString("jar:file:%s!/".formatted(this. [MASK] UrlPath)); } @Test void createWithFileAndEntryReturnsUrl() { JarEntry entry = new JarEntry("lib.jar"); URL url = JarUrl.create(this. [MASK] , entry); assertThat(url).hasToString("jar:nested:%s/!lib.jar!/".formatted(this. [MASK] UrlPath)); } @Test void createWithFileAndNullEntryReturnsUrl() { URL url = JarUrl.create(this. [MASK] , (JarEntry) null); assertThat(url).hasToString("jar:file:%s!/".formatted(this. [MASK] UrlPath)); } @Test void createWithFileAndNameReturnsUrl() { URL url = JarUrl.create(this. [MASK] , "lib.jar"); assertThat(url).hasToString("jar:nested:%s/!lib.jar!/".formatted(this. [MASK] UrlPath)); } @Test void createWithFileAndNullNameReturnsUrl() { URL url = JarUrl.create(this. [MASK] , (String) null); assertThat(url).hasToString("jar:file:%s!/".formatted(this. [MASK] UrlPath)); } @Test void createWithFileNameAndPathReturnsUrl() { URL url = JarUrl.create(this. [MASK] , "lib.jar", "com/example/My.class"); assertThat(url).hasToString("jar:nested:%s/!lib.jar!/com/example/My.class".formatted(this. [MASK] UrlPath)); } @Test void createWithReservedCharsInName() throws Exception { String badFolderName = "foo#bar!/baz/!oof"; this.temp = new File(this.temp, badFolderName); setup(); URL url = JarUrl.create(this. [MASK] , "lib.jar", "com/example/My.class"); assertThat(url).hasToString("jar:nested:%s/!lib.jar!/com/example/My.class".formatted(this. [MASK] UrlPath)); assertThat(UrlDecoder.decode(url.toString())).contains(badFolderName); } }
jarFile
// Copyright 2015 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.skyframe; import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertWithMessage; import com.google.common.base.Preconditions; import com.google.common.base.Suppliers; import com.google.common.collect.Collections2; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.ArtifactRoot; import com.google.devtools.build.lib.actions.FilesetOutputSymlink; import com.google.devtools.build.lib.actions.FilesetTraversalParams; import com.google.devtools.build.lib.actions.FilesetTraversalParamsFactory; import com.google.devtools.build.lib.actions.util.ActionsTestUtil; import com.google.devtools.build.lib.analysis.BlazeDirectories; import com.google.devtools.build.lib.analysis.ServerDirectories; import com.google.devtools.build.lib.clock.BlazeClock; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.events.NullEventHandler; import com.google.devtools.build.lib.pkgcache.PathPackageLocator; import com.google.devtools.build.lib.skyframe.ExternalFilesHelper.ExternalFileAction; import com.google.devtools.build.lib.skyframe.PackageLookupFunction.CrossRepositoryLabelViolationStrategy; import com.google.devtools.build.lib.testutil.FoundationTestCase; import com.google.devtools.build.lib.testutil.TestConstants; import com.google.devtools.build.lib.util.Fingerprint; import com.google.devtools.build.lib.util.io.TimestampGranularityMonitor; import com.google.devtools.build.lib.vfs.FileStateKey; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.build.lib.vfs.Root; import com.google.devtools.build.lib.vfs.RootedPath; import com.google.devtools.build.lib.vfs.SyscallCache; import com.google.devtools.build.skyframe.EvaluationContext; import com.google.devtools.build.skyframe.EvaluationResult; import com.google.devtools.build.skyframe.InMemoryMemoizingEvaluator; import com.google.devtools.build.skyframe.MemoizingEvaluator; import com.google.devtools.build.skyframe.RecordingDifferencer; import com.google.devtools.build.skyframe.SequencedRecordingDifferencer; import com.google.devtools.build.skyframe.SkyFunction; import com.google.devtools.build.skyframe.SkyFunctionException; import com.google.devtools.build.skyframe.SkyFunctionName; import com.google.devtools.build.skyframe.SkyKey; import com.google.devtools.build.skyframe.SkyValue; import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.concurrent.atomic.AtomicReference; import javax.annotation. [MASK] ; import net.starlark.java.eval.StarlarkSemantics; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Tests for {@link FilesetEntryFunction}. */ @RunWith(JUnit4.class) public final class FilesetEntryFunctionTest extends FoundationTestCase { private MemoizingEvaluator evaluator; private RecordingDifferencer differencer; @Before public void setUp() throws Exception { AtomicReference<PathPackageLocator> pkgLocator = new AtomicReference<>( new PathPackageLocator( outputBase, ImmutableList.of(Root.fromPath(rootDirectory)), BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY)); BlazeDirectories directories = new BlazeDirectories( new ServerDirectories(outputBase, outputBase, outputBase), rootDirectory, /* defaultSystemJavabase= */ null, TestConstants.PRODUCT_NAME); ExternalFilesHelper externalFilesHelper = ExternalFilesHelper.createForTesting( pkgLocator, ExternalFileAction.DEPEND_ON_EXTERNAL_PKG_FOR_EXTERNAL_REPO_PATHS, directories); Map<SkyFunctionName, SkyFunction> skyFunctions = new HashMap<>(); skyFunctions.put( FileStateKey.FILE_STATE, new FileStateFunction( Suppliers.ofInstance(new TimestampGranularityMonitor(BlazeClock.instance())), SyscallCache.NO_CACHE, externalFilesHelper)); skyFunctions.put(SkyFunctions.FILE, new FileFunction(pkgLocator, directories)); skyFunctions.put(SkyFunctions.DIRECTORY_LISTING, new DirectoryListingFunction()); skyFunctions.put( SkyFunctions.DIRECTORY_LISTING_STATE, new DirectoryListingStateFunction(externalFilesHelper, SyscallCache.NO_CACHE)); skyFunctions.put( SkyFunctions.RECURSIVE_FILESYSTEM_TRAVERSAL, new RecursiveFilesystemTraversalFunction(SyscallCache.NO_CACHE)); skyFunctions.put( SkyFunctions.PACKAGE_LOOKUP, new PackageLookupFunction( new AtomicReference<>(ImmutableSet.of()), CrossRepositoryLabelViolationStrategy.ERROR, BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY, BazelSkyframeExecutorConstants.EXTERNAL_PACKAGE_HELPER)); skyFunctions.put(SkyFunctions.IGNORED_SUBDIRECTORIES, IgnoredSubdirectoriesFunction.NOOP); skyFunctions.put( SkyFunctions.FILESET_ENTRY, new FilesetEntryFunction((unused) -> rootDirectory)); skyFunctions.put(SkyFunctions.WORKSPACE_NAME, new TestWorkspaceNameFunction()); skyFunctions.put( SkyFunctions.LOCAL_REPOSITORY_LOOKUP, new LocalRepositoryLookupFunction(BazelSkyframeExecutorConstants.EXTERNAL_PACKAGE_HELPER)); differencer = new SequencedRecordingDifferencer(); evaluator = new InMemoryMemoizingEvaluator(skyFunctions, differencer); PrecomputedValue.BUILD_ID.set(differencer, UUID.randomUUID()); PrecomputedValue.PATH_PACKAGE_LOCATOR.set(differencer, pkgLocator.get()); PrecomputedValue.STARLARK_SEMANTICS.set(differencer, StarlarkSemantics.DEFAULT); } private Artifact getSourceArtifact(String path) { return ActionsTestUtil.createArtifact( ArtifactRoot.asSourceRoot(Root.fromPath(rootDirectory)), path); } private Artifact createSourceArtifact(String path) throws Exception { Artifact result = getSourceArtifact(path); createFile(result, "foo"); return result; } private static RootedPath childOf(Artifact artifact, String relative) { return RootedPath.toRootedPath( artifact.getRoot().getRoot(), artifact.getRootRelativePath().getRelative(relative)); } private static RootedPath siblingOf(Artifact artifact, String relative) { PathFragment parent = Preconditions.checkNotNull(artifact.getRootRelativePath().getParentDirectory()); return RootedPath.toRootedPath(artifact.getRoot().getRoot(), parent.getRelative(relative)); } private void createFile(Path path, String... contents) throws Exception { if (!path.getParentDirectory().exists()) { scratch.dir(path.getParentDirectory().getPathString()); } scratch.file(path.getPathString(), contents); } private void createFile(Artifact artifact, String... contents) throws Exception { createFile(artifact.getPath(), contents); } private RootedPath createFile(RootedPath path, String... contents) throws Exception { createFile(path.asPath(), contents); return path; } private <T extends SkyValue> EvaluationResult<T> eval(SkyKey key) throws Exception { EvaluationContext evaluationContext = EvaluationContext.newBuilder() .setKeepGoing(false) .setParallelism(SkyframeExecutor.DEFAULT_THREAD_COUNT) .setEventHandler(NullEventHandler.INSTANCE) .build(); return evaluator.evaluate(ImmutableList.of(key), evaluationContext); } private FilesetEntryValue evalFilesetTraversal(FilesetTraversalParams params) throws Exception { SkyKey key = FilesetEntryKey.key(params); EvaluationResult<FilesetEntryValue> result = eval(key); assertThat(result.hasError()).isFalse(); return result.get(key); } private FilesetOutputSymlink symlink(String from, Artifact to) { return symlink(PathFragment.create(from), to.getPath().asFragment()); } private FilesetOutputSymlink symlink(String from, RootedPath to) { return symlink(PathFragment.create(from), to.asPath().asFragment()); } private FilesetOutputSymlink symlink(PathFragment from, PathFragment to) { return FilesetOutputSymlink.createForTesting(from, to, rootDirectory.asFragment()); } private void assertSymlinksCreatedInOrder( FilesetTraversalParams request, FilesetOutputSymlink... expectedSymlinks) throws Exception { Collection<FilesetOutputSymlink> actual = Collections2.transform( evalFilesetTraversal(request).getSymlinks(), // Strip the metadata from the actual results. (input) -> FilesetOutputSymlink.createAlreadyRelativizedForTesting( input.name(), input.targetPath(), input.relativeToExecRoot())); assertThat(actual).containsExactlyElementsIn(expectedSymlinks).inOrder(); } private static Label label(String label) throws Exception { return Label.parseCanonical(label); } @Test public void testFileTraversalForFile() throws Exception { Artifact file = createSourceArtifact("foo/file.real"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ file, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params, symlink("output-name", file)); } @Test public void testFileTraversalForDirectory() throws Exception { Artifact dir = getSourceArtifact("foo/dir_real"); RootedPath fileA = createFile(childOf(dir, "file.a"), "hello"); RootedPath fileB = createFile(childOf(dir, "sub/file.b"), "world"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ dir, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ true); assertSymlinksCreatedInOrder( params, symlink("output-name/file.a", fileA), symlink("output-name/sub/file.b", fileB)); } @Test public void testFileTraversalForDisallowedDirectoryThrows() throws Exception { Artifact dir = getSourceArtifact("foo/dir_real"); createFile(childOf(dir, "file.a"), "hello"); createFile(childOf(dir, "sub/file.b"), "world"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ dir, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); SkyKey key = FilesetEntryKey.key(params); EvaluationResult<FilesetEntryValue> result = eval(key); assertThat(result.hasError()).isTrue(); assertThat(result.getError(key).getException()) .hasMessageThat() .contains("foo contains a directory"); } @Test public void testFileTraversalForDanglingSymlink() throws Exception { Artifact linkName = getSourceArtifact("foo/dangling.sym"); RootedPath linkTarget = createFile(siblingOf(linkName, "target.file"), "blah"); linkName.getPath().createSymbolicLink(PathFragment.create("target.file")); linkTarget.asPath().delete(); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ linkName, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params); // expect empty results } @Test public void testFileTraversalForNonExistentFile() throws Exception { Artifact path = getSourceArtifact("foo/non-existent"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ path, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params); // expect empty results } /** * Tests that the fingerprint is a function of all arguments of the factory method. * * <p>Implementations must provide: * <ul> * <li>two different values (a domain) for each argument of the factory method and whether or not * it is expected to influence the fingerprint * <li>a way to instantiate {@link FilesetTraversalParams} with a given set of arguments from the * specified domains * </ul> * * <p>The tests will instantiate pairs of {@link FilesetTraversalParams} objects with only a given * attribute differing, and observe whether the fingerprints differ (if they are expected to) or * are the same (otherwise). */ private abstract static class FingerprintTester { private final Map<String, Domain> domains; FingerprintTester(Map<String, Domain> domains) { this.domains = domains; } abstract FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception; private Map<String, ?> getDefaultArgs() { return getKwArgs(null); } private Map<String, ?> getKwArgs(@ [MASK] String useAlternateFor) { Map<String, Object> values = new HashMap<>(); for (Map.Entry<String, Domain> d : domains.entrySet()) { values.put( d.getKey(), d.getKey().equals(useAlternateFor) ? d.getValue().valueA : d.getValue().valueB); } return values; } public void doTest() throws Exception { Fingerprint fp = new Fingerprint(); create(getDefaultArgs()).fingerprint(fp); String primary = fp.hexDigestAndReset(); for (String argName : domains.keySet()) { create(getKwArgs(argName)).fingerprint(fp); String secondary = fp.hexDigestAndReset(); if (domains.get(argName).includedInFingerprint) { assertWithMessage( "Argument '" + argName + "' was expected to be included in the" + " fingerprint, but wasn't") .that(primary) .isNotEqualTo(secondary); } else { assertWithMessage( "Argument '" + argName + "' was expected not to be included in the" + " fingerprint, but was") .that(primary) .isEqualTo(secondary); } } } } private static final class Domain { boolean includedInFingerprint; Object valueA; Object valueB; Domain(boolean includedInFingerprint, Object valueA, Object valueB) { this.includedInFingerprint = includedInFingerprint; this.valueA = valueA; this.valueB = valueB; } } private static Domain partOfFingerprint(Object valueA, Object valueB) { return new Domain(true, valueA, valueB); } private static Domain notPartOfFingerprint(Object valueB) { return new Domain(false, "//foo", valueB); } @Test public void testFingerprintOfFileTraversal() throws Exception { new FingerprintTester( ImmutableMap.<String, Domain>builder() .put("ownerLabel", notPartOfFingerprint("//bar")) .put("fileToTraverse", partOfFingerprint("foo/file.a", "bar/file.b")) .put("destPath", partOfFingerprint("out1", "out2")) .put("strictFilesetOutput", partOfFingerprint(true, false)) .put("permitDirectories", partOfFingerprint(true, false)) .buildOrThrow()) { @Override FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception { return FilesetTraversalParamsFactory.fileTraversal( label((String) kwArgs.get("ownerLabel")), getSourceArtifact((String) kwArgs.get("fileToTraverse")), PathFragment.create((String) kwArgs.get("destPath")), (Boolean) kwArgs.get("strictFilesetOutput"), (Boolean) kwArgs.get("permitDirectories")); } }.doTest(); } @Test public void testFingerprintOfNestedTraversal() throws Exception { Artifact nested1 = getSourceArtifact("a/b"); Artifact nested2 = getSourceArtifact("a/c"); new FingerprintTester( ImmutableMap.of( "ownerLabel", notPartOfFingerprint("//bar"), "nestedArtifact", partOfFingerprint(nested1, nested2), "destDir", partOfFingerprint("out1", "out2"), "excludes", partOfFingerprint(ImmutableSet.<String>of(), ImmutableSet.of("x")))) { @SuppressWarnings("unchecked") @Override FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception { return FilesetTraversalParamsFactory.nestedTraversal( label((String) kwArgs.get("ownerLabel")), (Artifact) kwArgs.get("nestedArtifact"), PathFragment.create((String) kwArgs.get("destDir")), (Set<String>) kwArgs.get("excludes")); } }.doTest(); } private static class TestWorkspaceNameFunction implements SkyFunction { @ [MASK] @Override public SkyValue compute(SkyKey skyKey, Environment env) throws SkyFunctionException, InterruptedException { return WorkspaceNameValue.withName("workspace"); } } }
Nullable
/* * Copyright (c) 2019, 2023, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * The Universal Permissive License (UPL), Version 1.0 * * Subject to the condition set forth below, permission is hereby granted to any * person obtaining a copy of this software, associated documentation and/or * data (collectively the "Software"), free of charge and under any and all * copyright rights in the Software, and any and all patent rights owned or * freely licensable by each licensor hereunder covering either (i) the * unmodified Software as contributed to or provided by such licensor, or (ii) * the Larger Works (as defined below), to deal in both * * (a) the Software, and * * (b) any piece of software and/or hardware listed in the lrgrwrks.txt file if * one is included with the Software each a "Larger Work" to which the Software * is contributed by such licensors), * * without restriction, including without limitation the rights to copy, create * derivative works of, display, perform, and distribute the Software and make, * use, sell, offer for sale, import, export, have made, and have sold the * Software and the Larger Work(s), and to sublicense the foregoing rights on * either these or other terms. * * This license is subject to the following condition: * * The above copyright notice and either this complete permission notice or at a * minimum a reference to the UPL must be included in all copies or substantial * portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.oracle.truffle.runtime; import com.oracle.truffle.api.impl.Accessor; final class OptimizedRuntimeAccessor extends Accessor { static final OptimizedRuntimeAccessor [MASK] = new OptimizedRuntimeAccessor(); static final NodeSupport NODES = [MASK] .nodeSupport(); static final SourceSupport SOURCE = [MASK] .sourceSupport(); static final InstrumentSupport INSTRUMENT = [MASK] .instrumentSupport(); static final LanguageSupport LANGUAGE = [MASK] .languageSupport(); static final EngineSupport ENGINE = [MASK] .engineSupport(); static final InteropSupport INTEROP = [MASK] .interopSupport(); static final ExceptionSupport EXCEPTION = [MASK] .exceptionSupport(); static final FrameSupport FRAME = [MASK] .framesSupport(); private OptimizedRuntimeAccessor() { } }
ACCESSOR
package com.alibaba.druid.bvt.sql. [MASK] .createTable; import com.alibaba.druid.sql.MysqlTest; import com.alibaba.druid.sql.ast.SQLStatement; import com.alibaba.druid.sql.dialect. [MASK] .ast.statement.MySqlCreateTableStatement; import com.alibaba.druid.sql.dialect. [MASK] .parser.MySqlStatementParser; import java.util.List; public class MySqlCreateTableTest139 extends MysqlTest { public void test_0() throws Exception { String sql = " CREATE TABLE IF NOT EXISTS simiao_alter_partition2 (id int, id2 int, name varchar(30), time timestamp NOT NULL, PRIMARY KEY (id, time), KEY idx_id_time USING BTREE (id, time)) PARTITION BY RANGE (UNIX_TIMESTAMP(time))( PARTITION p0 VALUES LESS THAN (UNIX_TIMESTAMP('2013-01-01 00:00:00')), PARTITION p1 VALUES LESS THAN (UNIX_TIMESTAMP('2013-02-01 00:00:00')), PARTITION p2 VALUES LESS THAN (UNIX_TIMESTAMP('2013-03-01 00:00:00')), PARTITION p3 VALUES LESS THAN (UNIX_TIMESTAMP('2013-04-01 00:00:00')), PARTITION p4 VALUES LESS THAN (UNIX_TIMESTAMP('2013-05-01 00:00:00')), PARTITION p5 VALUES LESS THAN (UNIX_TIMESTAMP('2013-06-01 00:00:00')), PARTITION p6 VALUES LESS THAN (UNIX_TIMESTAMP('2013-07-01 00:00:00')), PARTITION p7 VALUES LESS THAN (UNIX_TIMESTAMP('2013-08-01 00:00:00')), PARTITION p8 VALUES LESS THAN (UNIX_TIMESTAMP('2013-09-01 00:00:00')), PARTITION p10 VALUES LESS THAN (UNIX_TIMESTAMP('2013-10-01 00:00:00')), PARTITION p11 VALUES LESS THAN (UNIX_TIMESTAMP('2013-11-01 00:00:00')), PARTITION p12 VALUES LESS THAN (UNIX_TIMESTAMP('2013-12-01 00:00:00')), PARTITION p13 VALUES LESS THAN (MAXVALUE) ) dbpartition by hash(id) dbpartitions 4;"; MySqlStatementParser parser = new MySqlStatementParser(sql); List<SQLStatement> statementList = parser.parseStatementList(); MySqlCreateTableStatement stmt = (MySqlCreateTableStatement) statementList.get(0); assertEquals(1, statementList.size()); assertEquals("CREATE TABLE IF NOT EXISTS simiao_alter_partition2 (\n" + "\tid int,\n" + "\tid2 int,\n" + "\tname varchar(30),\n" + "\ttime timestamp NOT NULL,\n" + "\tPRIMARY KEY (id, time),\n" + "\tKEY idx_id_time USING BTREE (id, time)\n" + ")\n" + "PARTITION BY RANGE (UNIX_TIMESTAMP(time)) (\n" + "\tPARTITION p0 VALUES LESS THAN (UNIX_TIMESTAMP('2013-01-01 00:00:00')),\n" + "\tPARTITION p1 VALUES LESS THAN (UNIX_TIMESTAMP('2013-02-01 00:00:00')),\n" + "\tPARTITION p2 VALUES LESS THAN (UNIX_TIMESTAMP('2013-03-01 00:00:00')),\n" + "\tPARTITION p3 VALUES LESS THAN (UNIX_TIMESTAMP('2013-04-01 00:00:00')),\n" + "\tPARTITION p4 VALUES LESS THAN (UNIX_TIMESTAMP('2013-05-01 00:00:00')),\n" + "\tPARTITION p5 VALUES LESS THAN (UNIX_TIMESTAMP('2013-06-01 00:00:00')),\n" + "\tPARTITION p6 VALUES LESS THAN (UNIX_TIMESTAMP('2013-07-01 00:00:00')),\n" + "\tPARTITION p7 VALUES LESS THAN (UNIX_TIMESTAMP('2013-08-01 00:00:00')),\n" + "\tPARTITION p8 VALUES LESS THAN (UNIX_TIMESTAMP('2013-09-01 00:00:00')),\n" + "\tPARTITION p10 VALUES LESS THAN (UNIX_TIMESTAMP('2013-10-01 00:00:00')),\n" + "\tPARTITION p11 VALUES LESS THAN (UNIX_TIMESTAMP('2013-11-01 00:00:00')),\n" + "\tPARTITION p12 VALUES LESS THAN (UNIX_TIMESTAMP('2013-12-01 00:00:00')),\n" + "\tPARTITION p13 VALUES LESS THAN MAXVALUE\n" + ")\n" + "DBPARTITION BY hash(id) DBPARTITIONS 4;", stmt.toString()); assertEquals("create table if not exists simiao_alter_partition2 (\n" + "\tid int,\n" + "\tid2 int,\n" + "\tname varchar(30),\n" + "\ttime timestamp not null,\n" + "\tprimary key (id, time),\n" + "\tkey idx_id_time using BTREE (id, time)\n" + ")\n" + "partition by range (UNIX_TIMESTAMP(time)) (\n" + "\tpartition p0 values less than (UNIX_TIMESTAMP('2013-01-01 00:00:00')),\n" + "\tpartition p1 values less than (UNIX_TIMESTAMP('2013-02-01 00:00:00')),\n" + "\tpartition p2 values less than (UNIX_TIMESTAMP('2013-03-01 00:00:00')),\n" + "\tpartition p3 values less than (UNIX_TIMESTAMP('2013-04-01 00:00:00')),\n" + "\tpartition p4 values less than (UNIX_TIMESTAMP('2013-05-01 00:00:00')),\n" + "\tpartition p5 values less than (UNIX_TIMESTAMP('2013-06-01 00:00:00')),\n" + "\tpartition p6 values less than (UNIX_TIMESTAMP('2013-07-01 00:00:00')),\n" + "\tpartition p7 values less than (UNIX_TIMESTAMP('2013-08-01 00:00:00')),\n" + "\tpartition p8 values less than (UNIX_TIMESTAMP('2013-09-01 00:00:00')),\n" + "\tpartition p10 values less than (UNIX_TIMESTAMP('2013-10-01 00:00:00')),\n" + "\tpartition p11 values less than (UNIX_TIMESTAMP('2013-11-01 00:00:00')),\n" + "\tpartition p12 values less than (UNIX_TIMESTAMP('2013-12-01 00:00:00')),\n" + "\tpartition p13 values less than maxvalue\n" + ")\n" + "dbpartition by hash(id) dbpartitions 4;", stmt.toLowerCaseString()); } }
mysql
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.hudi; import com.facebook.presto.common.type.TypeManager; import com.facebook.presto.hive.HdfsEnvironment; import com.facebook.presto.hive.MetastoreClientConfig; import com.facebook.presto.hive.metastore.ExtendedHiveMetastore; import com.facebook.presto.hive.metastore.InMemoryCachingHiveMetastore; import com.facebook.presto.spi.connector.ConnectorMetadata; import javax.inject.Inject; import static java.util.Objects.requireNonNull; public class HudiMetadataFactory { private final ExtendedHiveMetastore metastore; private final HdfsEnvironment hdfsEnvironment; private final TypeManager typeManager; private final long perTransactionCacheMaximumSize; private final boolean [MASK] ; private final int metastorePartitionCacheMaxColumnCount; @Inject public HudiMetadataFactory( ExtendedHiveMetastore metastore, HdfsEnvironment hdfsEnvironment, TypeManager typeManager, MetastoreClientConfig metastoreClientConfig) { this.metastore = requireNonNull(metastore, "metastore is null"); this.hdfsEnvironment = requireNonNull(hdfsEnvironment, "hdfsEnvironment is null"); this.typeManager = requireNonNull(typeManager, "typeManager is null"); this.perTransactionCacheMaximumSize = metastoreClientConfig.getPerTransactionMetastoreCacheMaximumSize(); this. [MASK] = metastoreClientConfig.isMetastoreImpersonationEnabled(); this.metastorePartitionCacheMaxColumnCount = metastoreClientConfig.getPartitionCacheColumnCountLimit(); } public ConnectorMetadata create() { return new HudiMetadata( InMemoryCachingHiveMetastore.memoizeMetastore(metastore, [MASK] , perTransactionCacheMaximumSize, metastorePartitionCacheMaxColumnCount), hdfsEnvironment, typeManager); } }
metastoreImpersonationEnabled
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.qbao.cat.plugin.common; import java.util.StringTokenizer; import org.aspectj.lang.ProceedingJoinPoint; import org.aspectj.lang.annotation.Around; import org.aspectj.lang.annotation.Aspect; import org.aspectj.lang.annotation.Pointcut; import com.dianping.cat.Cat; import com.dianping.cat.message.Transaction; import com.qbao.cat.plugin.DefaultPluginTemplate; @Aspect public abstract class CommonPluginTemplate extends DefaultPluginTemplate { @Override @Pointcut public void scope() {} @Override @Around(POINTCUT_NAME) public Object doAround(ProceedingJoinPoint pjp) throws Throwable { return super.doAround(pjp); } @Override protected Transaction beginLog(ProceedingJoinPoint pjp) { StringBuilder type = new StringBuilder(); String packageStr = pjp.getSignature().getDeclaringType().getPackage().getName(); StringTokenizer st = new StringTokenizer(packageStr, "."); for(int i=0;i<2;i++){ type.append(st.nextToken()); type.append("."); } type.append("Method"); Transaction [MASK] = Cat.newTransaction(type.toString(),pjp.getSignature().toString()); return [MASK] ; } @Override protected void endLog(Transaction [MASK] , Object retVal, Object... params) {} }
transaction
/* * DBeaver - Universal Database Manager * Copyright (C) 2010-2024 DBeaver Corp and others * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jkiss.dbeaver.model.sql; /** * SQL editor constants */ public class SQLConstants { public static final String NULL_VALUE = "NULL"; public static final String STR_QUOTE_SINGLE = "'"; public static final String STR_QUOTE_DOUBLE = "\""; public static final String[][] DOUBLE_QUOTE_STRINGS = {{ STR_QUOTE_DOUBLE, STR_QUOTE_DOUBLE }}; public static final String DEFAULT_STATEMENT_DELIMITER = ";"; public static final String[] DEFAULT_SCRIPT_DELIMITER = { DEFAULT_STATEMENT_DELIMITER }; public static final String STR_QUOTE_APOS = "`"; public static final String ML_COMMENT_START = "/*"; public static final String ML_COMMENT_END = "*/"; public static final String SL_COMMENT = "--"; public static final String ASTERISK = "*"; public static final String QUESTION = "?"; public static final String DOT = "."; public static final String KEYWORD_SELECT = "SELECT"; public static final String KEYWORD_INSERT = "INSERT"; public static final String KEYWORD_UPDATE = "UPDATE"; public static final String [MASK] = "DELETE"; public static final String KEYWORD_MERGE = "MERGE"; public static final String KEYWORD_UPSERT = "UPSERT"; public static final String KEYWORD_TRUNCATE = "TRUNCATE"; public static final String KEYWORD_FROM = "FROM"; public static final String KEYWORD_INTO = "INTO"; public static final String KEYWORD_JOIN = "JOIN"; public static final String KEYWORD_CROSS_JOIN = "CROSS JOIN"; public static final String KEYWORD_NATURAL_JOIN = "NATURAL JOIN"; public static final String KEYWORD_WHERE = "WHERE"; public static final String KEYWORD_SET = "SET"; public static final String KEYWORD_ON = "ON"; public static final String KEYWORD_AND = "AND"; public static final String KEYWORD_OR = "OR"; public static final String KEYWORD_BETWEEN = "BETWEEN"; public static final String KEYWORD_IS = "IS"; public static final String KEYWORD_NOT = "NOT"; public static final String KEYWORD_NULL = "NULL"; public static final String KEYWORD_IN = "IN"; public static final String KEYWORD_VALUES = "VALUES"; public static final String KEYWORD_ORDER_BY = "ORDER BY"; public static final String KEYWORD_GROUP_BY = "GROUP BY"; public static final String KEYWORD_HAVING = "HAVING"; public static final String KEYWORD_LIKE = "LIKE"; public static final String KEYWORD_ILIKE = "ILIKE"; public static final String KEYWORD_FUNCTION = "FUNCTION"; public static final String KEYWORD_PROCEDURE = "PROCEDURE"; public static final String KEYWORD_COMMIT = "COMMIT"; public static final String KEYWORD_ROLLBACK = "ROLLBACK"; public static final String KEYWORD_EXPLAIN = "EXPLAIN"; public static final String KEYWORD_CASE = "CASE"; public static final String KEYWORD_QUALIFY = "QUALIFY"; public static final String KEYWORD_AS = "AS"; public static final String KEYWORD_USING = "USING"; public static final String DATA_TYPE_VARCHAR = "varchar"; public static final String DATA_TYPE_BIGINT = "BIGINT"; public static final String DATA_TYPE_BINARY = "BINARY"; public static final String DATA_TYPE_BOOLEAN = "BOOLEAN"; public static final String DATA_TYPE_DOUBLE = "DOUBLE"; public static final String DATA_TYPE_FLOAT = "FLOAT"; public static final String DATA_TYPE_INT = "INT"; public static final String DATA_TYPE_SMALLINT = "SMALLINT"; public static final String DATA_TYPE_STRING = "STRING"; public static final String DATA_TYPE_TINYINT = "TINYINT"; public static final String[] QUERY_KEYWORDS = { KEYWORD_SELECT, KEYWORD_INSERT, KEYWORD_UPDATE, [MASK] , KEYWORD_MERGE, KEYWORD_UPSERT, KEYWORD_TRUNCATE }; public static final String[] TABLE_KEYWORDS = { KEYWORD_FROM, KEYWORD_INSERT, KEYWORD_UPDATE, [MASK] , KEYWORD_INTO, "TABLE", "VIEW", KEYWORD_JOIN, KEYWORD_TRUNCATE, KEYWORD_MERGE, }; public static final String[] COLUMN_KEYWORDS = { KEYWORD_SELECT, KEYWORD_WHERE, KEYWORD_SET, KEYWORD_ON, KEYWORD_AND, KEYWORD_OR, "BY", "HAVING" }; public static final String[] DDL_KEYWORDS = { "CREATE", "ALTER", "DROP", }; public static final String[] SQL2003_RESERVED_KEYWORDS = { "ALL", "ALLOCATE", "ALTER", KEYWORD_AND, "ANY", "ARE", "ARRAY", "AS", "ASENSITIVE", "ASYMMETRIC", "AT", "ATOMIC", "AUTHORIZATION", "BEGIN", KEYWORD_BETWEEN, //"BIGINT", DATA_TYPE_BINARY, "BOTH", "BY", "CALL", "CALLED", "CARDINALITY", "CASCADE", "CASCADED", KEYWORD_CASE, "CAST", "CEIL", "CEILING", "CHARACTER", "CHECK", "CLOSE", "COALESCE", "COLLATE", "COLLECT", "COLUMN", KEYWORD_COMMIT, "CONDITION", "CONNECT", "CONSTRAINT", "CONVERT", "CORR", "CORRESPONDING", "COVAR_POP", "COVAR_SAMP", "CREATE", "CROSS", "CUBE", "CUME_DIST", "CURRENT", "CURSOR", "CYCLE", "DAY", "DEALLOCATE", "DEC", "DECLARE", "DEFAULT", [MASK] , "DENSE_RANK", "DEREF", "DESCRIBE", "DETERMINISTIC", "DISCONNECT", "DISTINCT", "DROP", "DYNAMIC", "EACH", "ELEMENT", "ELSE", "END", "END-EXEC", "ESCAPE", "EVERY", "EXCEPT", "EXEC", "EXECUTE", "EXISTS", "EXP", "EXTERNAL", "EXTRACT", "FALSE", "FETCH", "FILTER", "FOR", "FOREIGN", "FREE", "FROM", "FULL", KEYWORD_FUNCTION, "FUSION", "GET", "GLOBAL", "GRANT", "GROUP", "GROUPING", "HAVING", "HOLD", "HOUR", "IDENTITY", "IF", KEYWORD_IN, "INDEX", "INDICATOR", "INNER", "INOUT", "INSENSITIVE", KEYWORD_INSERT, "INTERSECT", "INTERSECTION", "INTERVAL", "INTO", KEYWORD_IS, "JOIN", "LANGUAGE", "LARGE", "LATERAL", "LEADING", "LEFT", "LIKE", "LN", "LOCAL", "MATCH", "MEMBER", KEYWORD_MERGE, "METHOD", "MINUTE", "MOD", "MODIFIES", // "MODULE", // too common for column names "MONTH", "MULTISET", "NATIONAL", "NATURAL", //"NCHAR", //"NCLOB", "NEW", "NO", "NONE", "NORMALIZE", KEYWORD_NOT, KEYWORD_NULL, "NULLIF", "NUMERIC", "OF", "OLD", KEYWORD_ON, "ONLY", "OPEN", "OR", "ORDER", "OUT", "OUTER", "OVER", "OVERLAPS", "OVERLAY", "PARAMETER", "PARTITION", "POSITION", "PRECISION", "PREPARE", "PRIMARY", KEYWORD_PROCEDURE, "RANGE", "RANK", "READS", "REAL", "RECURSIVE", "REF", "REFERENCES", "REFERENCING", "RELEASE", "RENAME", "RESULT", "RETURN", "RETURNS", "REVOKE", "RIGHT", KEYWORD_ROLLBACK, "ROLLUP", "ROW", "ROW_NUMBER", "ROWS", "SAVEPOINT", "SCOPE", "SCROLL", "SEARCH", "SECOND", KEYWORD_SELECT, "SENSITIVE", "SESSION_USER", KEYWORD_SET, "SIMILAR", "SMALLINT", "SOME", "SPECIFIC", "SPECIFICTYPE", "SQL", "SQLEXCEPTION", "SQLSTATE", "SQLWARNING", "START", "STATIC", // "STDDEV_POP", // "STDDEV_SAMP", "SUBMULTISET", "SYMMETRIC", "SYSTEM", "SYSTEM_USER", "TABLE", "TABLESAMPLE", "THEN", "TIMEZONE_HOUR", "TIMEZONE_MINUTE", "TO", "TRAILING", "TRANSLATE", "TRANSLATION", "TREAT", "TRIGGER", "TRUE", "UNION", "UNIQUE", "UNKNOWN", "UNNEST", KEYWORD_UPDATE, "USER", "USING", //"VALUE", // too common for column names KEYWORD_VALUES, // "VAR_POP", // "VAR_SAMP", //"VARCHAR", "VARYING", "WHEN", "WHENEVER", KEYWORD_WHERE, "WIDTH_BUCKET", "WINDOW", "WITH", "WITHIN", "WITHOUT", "YEAR", "NULLS", "FIRST", "LAST", "FOLLOWING", "PRECEDING", "UNBOUNDED", "LENGTH", "KEY", "LEVEL", "VIEW", "SEQUENCE", "SCHEMA", "ROLE", "RESTRICT", "ASC", "DESC", // Not actually standard but widely used "LIMIT", // Extended keywords // "A", "ABSOLUTE", "ACTION", // "ADA", "ADD", // "ADMIN", "AFTER", "ALWAYS", // "ASC", "ASSERTION", "ASSIGNMENT", "ATTRIBUTE", "ATTRIBUTES", "BEFORE", // "BERNOULLI", // "BREADTH", // "C", "CASCADE", "CATALOG", // "CATALOG_NAME", "CHAIN", // "CHARACTER_SET_CATALOG", // "CHARACTER_SET_NAME", // "CHARACTER_SET_SCHEMA", "CHARACTERISTICS", "CHARACTERS", // "CLASS_ORIGIN", // "COBOL", "COLLATION", // "COLLATION_CATALOG", // "COLLATION_NAME", // "COLLATION_SCHEMA", // "COLUMN_NAME", // "COMMAND_FUNCTION", // "COMMAND_FUNCTION_CODE", "COMMITTED", // "CONDITION_NUMBER", "CONNECTION", // "CONNECTION_NAME", // "CONSTRAINT_CATALOG", // "CONSTRAINT_NAME", // "CONSTRAINT_SCHEMA", "CONSTRAINTS", "CONSTRUCTOR", "CONTAINS", "CONTINUE", "CURSOR_NAME", "DATA", // "DATETIME_INTERVAL_CODE", // "DATETIME_INTERVAL_PRECISION", "DEFAULTS", "DEFERRABLE", "DEFERRED", "DEFINED", "DEFINER", "DEGREE", "DEPTH", "DERIVED", // "DESC", "DESCRIPTOR", "DIAGNOSTICS", "DISPATCH", "DOMAIN", // "DYNAMIC_FUNCTION", // "DYNAMIC_FUNCTION_CODE", "EQUALS", "EXCEPTION", "EXCLUDE", "EXCLUDING", "FINAL", "FIRST", // "FORTRAN", "FOUND", // "G", "GENERAL", "GENERATED", "GO", "GOTO", "GRANTED", "HIERARCHY", "IMMEDIATE", "IMPLEMENTATION", "INCLUDING", "INCREMENT", "INITIALLY", "INPUT", "INSTANCE", "INSTANTIABLE", "INVOKER", "ISOLATION", // "K", // "KEY_MEMBER", "KEY_TYPE", "LAST", "LOCATOR", // "M", "MAP", "MATCHED", "MAXVALUE", // "MESSAGE_LENGTH", // "MESSAGE_OCTET_LENGTH", // "MESSAGE_TEXT", "MINVALUE", "MORE", "MUMPS", // "NAME", // "NAMES", "NESTING", "NEXT", "NORMALIZED", // "NULLABLE", // "NULLS", // "NUMBER", "OBJECT", "OCTETS", "OPTION", "OPTIONS", "ORDERING", "ORDINALITY", "OTHERS", "OUTPUT", "OVERRIDING", "PAD", // "PARAMETER_MODE", // "PARAMETER_NAME", // "PARAMETER_ORDINAL_POSITION", // "PARAMETER_SPECIFIC_CATALOG", // "PARAMETER_SPECIFIC_NAME", // "PARAMETER_SPECIFIC_SCHEMA", "PARTIAL", // "PASCAL", "PATH", "PLACING", // "PLI", "PRESERVE", "PRIOR", "PRIVILEGES", // "PUBLIC", "READ", "RELATIVE", "REPEATABLE", "RESTART", // "RETURNED_CARDINALITY", // "RETURNED_LENGTH", // "RETURNED_OCTET_LENGTH", // "RETURNED_SQLSTATE", "ROUTINE", // "ROUTINE_CATALOG", // "ROUTINE_NAME", // "ROUTINE_SCHEMA", // "ROW_COUNT", "SCALE", // "SCHEMA_NAME", // "SCOPE_CATALOG", // "SCOPE_NAME", // "SCOPE_SCHEMA", "SECTION", "SECURITY", "SELF", "SERIALIZABLE", // "SERVER_NAME", "SESSION", "SETS", // "SIMPLE", "SIZE", "SOURCE", "SPACE", // "SPECIFIC_NAME", // "STATE", // too common for column names "STATEMENT", "STRUCTURE", "STYLE", // "SUBCLASS_ORIGIN", // "TABLE_NAME", "TEMPORARY", "TIES", // "TOP_LEVEL_COUNT", "TRANSACTION", // "TRANSACTION_ACTIVE", // "TRANSACTIONS_COMMITTED", // "TRANSACTIONS_ROLLED_BACK", "TRANSFORM", "TRANSFORMS", // "TRIGGER_CATALOG", // "TRIGGER_NAME", // "TRIGGER_SCHEMA", "TYPE", "UNCOMMITTED", "UNDER", "UNNAMED", "USAGE", // "USER_DEFINED_TYPE_CATALOG", // "USER_DEFINED_TYPE_CODE", // "USER_DEFINED_TYPE_NAME", // "USER_DEFINED_TYPE_SCHEMA", "WORK", "WRITE", "ZONE", KEYWORD_QUALIFY }; public static final String[] SQL2003_FUNCTIONS = { "ABS", "AVG", "CHAR_LENGTH", "CHARACTER_LENGTH", "COUNT", "LOCALTIME", "LOCALTIMESTAMP", // "CURRENT_DATE", // "CURRENT_DEFAULT_TRANSFORM_GROUP", // "CURRENT_PATH", // "CURRENT_ROLE", // "CURRENT_TIME", // "CURRENT_TIMESTAMP", // "CURRENT_TRANSFORM_GROUP_FOR_TYPE", // "CURRENT_USER", "FLOOR", "LOWER", "MAX", "MIN", "OCTET_LENGTH", "PERCENT_RANK", "PERCENTILE_CONT", "PERCENTILE_DISC", "POWER", "REGR_AVGX", "REGR_AVGY", "REGR_COUNT", "REGR_INTERCEPT", "REGR_R2", "REGR_SLOPE", "REGR_SXX", "REGR_SXY", "REGR_SYY", "SQRT", "SUBSTRING", "SUM", "TRIM", "UESCAPE", "UPPER", }; public static final String[] SQL_EX_KEYWORDS = { "CHANGE", "MODIFY", }; public static final String[] DEFAULT_TYPES = { DATA_TYPE_BOOLEAN, "CHAR", "VARCHAR", DATA_TYPE_BINARY, "VARBINARY", DATA_TYPE_INT, "INTEGER", DATA_TYPE_SMALLINT, DATA_TYPE_BIGINT, "NUMBER", "NUMERIC", "DECIMAL", DATA_TYPE_FLOAT, DATA_TYPE_DOUBLE, "DATE", "TIME", "TIMESTAMP", "CLOB", "BLOB", }; public static final String BLOCK_BEGIN = "BEGIN"; public static final String BLOCK_END = "END"; /** * Pseudo variables - these are not dynamic parameters */ public static final String[] PSEUDO_VARIABLES = { ":NEW", ":OLD", }; public static final char STRUCT_SEPARATOR = '.'; //$NON-NLS-1$ public static final String CONFIG_COLOR_KEYWORD = "org.jkiss.dbeaver.sql.editor.color.keyword.foreground"; public static final String CONFIG_COLOR_DATATYPE = "org.jkiss.dbeaver.sql.editor.color.datatype.foreground"; public static final String CONFIG_COLOR_FUNCTION = "org.jkiss.dbeaver.sql.editor.color.function.foreground"; public static final String CONFIG_COLOR_STRING = "org.jkiss.dbeaver.sql.editor.color.string.foreground"; public static final String CONFIG_COLOR_TABLE = "org.jkiss.dbeaver.sql.editor.color.table.foreground"; public static final String CONFIG_COLOR_TABLE_ALIAS = "org.jkiss.dbeaver.sql.editor.color.table.alias.foreground"; public static final String CONFIG_COLOR_COLUMN = "org.jkiss.dbeaver.sql.editor.color.column.foreground"; public static final String CONFIG_COLOR_COLUMN_DERIVED = "org.jkiss.dbeaver.sql.editor.color.column.derived.foreground"; public static final String CONFIG_COLOR_SCHEMA = "org.jkiss.dbeaver.sql.editor.color.schema.foreground"; public static final String CONFIG_COLOR_COMPOSITE_FIELD = "org.jkiss.dbeaver.sql.editor.color.composite.field.foreground"; public static final String CONFIG_COLOR_SQL_VARIABLE = "org.jkiss.dbeaver.sql.editor.color.sqlVariable.foreground"; public static final String CONFIG_COLOR_SEMANTIC_ERROR = "org.jkiss.dbeaver.sql.editor.color.semanticError.foreground"; public static final String CONFIG_COLOR_NUMBER = "org.jkiss.dbeaver.sql.editor.color.number.foreground"; public static final String CONFIG_COLOR_COMMENT = "org.jkiss.dbeaver.sql.editor.color.comment.foreground"; public static final String CONFIG_COLOR_DELIMITER = "org.jkiss.dbeaver.sql.editor.color.delimiter.foreground"; public static final String CONFIG_COLOR_PARAMETER = "org.jkiss.dbeaver.sql.editor.color.parameter.foreground"; public static final String CONFIG_COLOR_COMMAND = "org.jkiss.dbeaver.sql.editor.color.command.foreground"; public static final String CONFIG_COLOR_TEXT = "org.jkiss.dbeaver.sql.editor.color.text.foreground"; public static final String CONFIG_COLOR_BACKGROUND = "org.jkiss.dbeaver.sql.editor.color.text.background"; public static final String CONFIG_COLOR_DISABLED = "org.jkiss.dbeaver.sql.editor.color.disabled.background"; public static final char DEFAULT_PARAMETER_MARK = '?'; public static final char DEFAULT_PARAMETER_PREFIX = ':'; public static final String DEFAULT_IDENTIFIER_QUOTE = "\""; public static final String DEFAULT_LIKE_ESCAPE = "\\"; public static final String KEYWORD_PATTERN_CHARS = "\\*\\"; public static final String DEFAULT_CONTROL_COMMAND_PREFIX = "@"; public final static char[] BRACKETS = {'{', '}', '(', ')', '[', ']', '<', '>'}; public static final String COLUMN_ASTERISK = "*"; }
KEYWORD_DELETE
/* ### * IP: GHIDRA * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ghidra.util.datastruct; import java.io.Serializable; import java.util.Arrays; /** * The BitTree class maintains a set of ordered keys between the values of * 0 and N. It can quickly (O(log(n))) add keys, remove keys, find the next key * greater than some value , and find the prev key less than some value. It can * determine if a key is in the set in O(1) time. This implementation has been * limited to short keys so that it can implement the ShortKeySet interface. */ public class BitTree implements ShortKeySet, Serializable { private final static long serialVersionUID = 1; private int size; // The maximum number of keys in the set. Keys range from 0 to size-1 private int power2; // The next power of 2 that is greater than size. private int[] bits; // Array of bits used to represent a tree of binary values. A bit at // position N will have a left child at 2*N and a right child at 2*N+1. // Its parent position will be at N/2. // A bit is on if any bits in its subtree are on. Leaf bits correspond // directly to keys and are on if the key is in the set. private int numKeys; // The current number of keys in the set. // masks for seting and clearing bits within an 32 bit integer. private static final int[] setMask = { 0x00000001,0x00000002,0x00000004,0x00000008, 0x00000010,0x00000020,0x00000040,0x00000080, 0x00000100,0x00000200,0x00000400,0x00000800, 0x00001000,0x00002000,0x00004000,0x00008000, 0x00010000,0x00020000,0x00040000,0x00080000, 0x00100000,0x00200000,0x00400000,0x00800000, 0x01000000,0x02000000,0x04000000,0x08000000, 0x10000000,0x20000000,0x40000000,0x80000000 }; private static final int[] clearMask={ 0xfffffffe,0xfffffffd,0xfffffffb,0xfffffff7, 0xffffffef,0xffffffdf,0xffffffbf,0xffffff7f, 0xfffffeff,0xfffffdff,0xfffffbff,0xfffff7ff, 0xffffefff,0xffffdfff,0xffffbfff,0xffff7fff, 0xfffeffff,0xfffdffff,0xfffbffff,0xfff7ffff, 0xffefffff,0xffdfffff,0xffbfffff,0xff7fffff, 0xfeffffff,0xfdffffff,0xfbffffff,0xf7ffffff, 0xefffffff,0xdfffffff,0xbfffffff,0x7fffffff }; /** * The BitTree constructor takes the maximum key value. The legal * keys for this set range from 0 to maxKey. * @param maxKey the maximum key that will ever be put into this BitTree. */ public BitTree(short maxKey) { this(maxKey,false); } /** * The BitTree constructor takes the maximum key value. The legal * keys for this set range from 0 to maxKey. * @param maxKey the maximum key value. * @param isFull if true, then the set is initilized to contain all legal keys. */ public BitTree(short maxKey, boolean isFull) { this.size = maxKey+1; // find the next power of 2 greater than or equal to n. power2 = 2; int sz = maxKey+1; while (sz > 1) { sz /= 2; power2 *= 2; } // The number of bits need to store the tree is 2 times the number of keys. // Since we are storing the bits in 32 bit integers we need N/16 integers // to store the bits. int nInts = power2/16; // must have at least 1 if (nInts < 1) { nInts = 1; } bits = new int[nInts]; if (isFull) { Arrays.fill(bits,0xffffffff); numKeys = this.size; } } /** * Removes all keys from the set. */ public void removeAll() { Arrays.fill(bits,0); numKeys = 0; } /** * Returns the number of keys currently in the set. */ public int size() { return numKeys; } /** * Adds a key to the set. * @param key to be added. * @exception IndexOutOfBoundsException if the given key is not * in the range [0, size-1]. */ public void put(short key) { if ((key < 0) || (key >= size)) { throw new IndexOutOfBoundsException(); } // The first "power2" number of bits are used for internal tree nodes. The // leaf nodes start at index "power2". int nodeIndex = power2+key; // set the leaf bit on to indicate that the key is in the set. // if the bit is already on (The key is already in the set), then just return. if (!setBit(nodeIndex)) { return; } // increment the number of keys in the set. numKeys++; // go up the tree setting each parent bit to "on" while(nodeIndex != 1) { // compute parent index. nodeIndex /= 2; // if any parent bit is already on, then all its parents are already on, // so were done. if (!setBit(nodeIndex)) { return; } } } /** * Removes the key from the set. * @param key The key to remove. * @exception IndexOutOfBoundsException if the given key is not * in the range [0, size-1]. */ public boolean remove(short key) { if ((key < 0) || (key >= size)) { throw new IndexOutOfBoundsException(); } // compute the leaf node index. int nodeIndex = power2+key; // clear the leaf bit to indicate that the key is not in the set. // if it is already "off", then we don't have to do anything if (!clearBit(nodeIndex)) { return false; } // decrement the number of keys in the set numKeys--; // traverse up the tree, clearing any parent nodes if all its child // nodes are "off". while(nodeIndex != 1) { nodeIndex /= 2; if (!isBitSet(nodeIndex)) { return true; } if (isBitSet(nodeIndex*2) || isBitSet(nodeIndex*2+1)) { return true; } clearBit(nodeIndex); } return true; } /** * Determines if a given key is in the set. * @param key the key to check if it is in this set. * @return true if the key is in the set. */ public boolean containsKey(short key) { if ((key < 0) || (key >= size)) { return false; } return isBitSet(power2+key); } /** * finds the next key that is in the set that is greater than the given key. * @param key from which to search forward. * @return the next key greater than the given key or -1 if there is no key * greater than the given key. * @exception IndexOutOfBoundsException if the given key is not * in the range [0, size-1]. */ public short getNext(short key) { if ((key < 0) || (key >= size)) { throw new IndexOutOfBoundsException(); } // compute leaf node. int nodeIndex = key + power2; // while we are not at the root, search upward until we find a right // sibling that is "on". while(nodeIndex != 1) { // see if we are odd (i.e. the right child) int odd = nodeIndex % 2; // if we are the left child see if my sibling on the right is on. // if so, then the next key must be in that subtree. if (odd == 0) { if (isBitSet(nodeIndex+1)) { // we found a right sibling that is "on", set nodeIndex to // that node. nodeIndex++; break; } } nodeIndex = nodeIndex/2; } // if we made it all the way up to the root node, then there is no key // greater than, so return -1; if (nodeIndex == 1) { return (short)-1; } // now that we found a right sibling that is "on", // follow the leftmost trail of "on" bits to an "on" leaf bit. This bit // represents the next key in the set. while (nodeIndex < power2) { nodeIndex *= 2; // if the left child is not on, then the right child must be "on". if (!isBitSet(nodeIndex)) { nodeIndex++; } } short nextKey = (short)(nodeIndex-power2); if (nextKey >= size) { nextKey = -1; } return nextKey; } /** * Finds the next key that is in the set that is less than the given key. * @param key the key to search before. * @return the next key less than the given key or -1 if there is no key * less than the given key. * @exception IndexOutOfBoundsException if the given key is not * in the range [0, size-1]. */ public short getPrevious(short key) { if ((key < 0) || (key >= size)) { throw new IndexOutOfBoundsException(); } // find the leaf node for the given key. int nodeIndex = key + power2; // while we are not at the root, search upward until we find a left // sibling that is "on". while(nodeIndex != 1) { // check if we are a right node. int odd = nodeIndex % 2; // if we are the right child see if my sibling on the left is "on". // if so, then the previous key must be in that subtree. if (odd == 1) { if (isBitSet(nodeIndex-1)) { nodeIndex--; break; } } nodeIndex = nodeIndex/2; } // If we went all the way to the root then there is no previous key, return -1. if (nodeIndex == 1) { return (short)-1; } // follow the rightmost trail of "on" bits to an "on" leaf bit. This bit // represents the previous key in the set. while (nodeIndex < power2) { nodeIndex *= 2; if (isBitSet(nodeIndex+1)) { nodeIndex++; } } return (short)(nodeIndex-power2); } /** * Checks if the set is empty. * @return true if the set is empty. */ public boolean isEmpty() { return numKeys == 0; } /** * Returns the first (lowest) key in the set. */ public short getFirst() { // if the 0 key is in the set, then return it. if(containsKey((short)0)) { return (short)0; } // otherwise return the next key after 0. return getNext((short)0); } /** * Returns the last (highest) key in the set. */ public short getLast() { // if the highest possible key is in the set, return it. if(containsKey((short)(size-1))) { return (short)(size-1); } // otherwise return the next lowest key. return getPrevious((short)(size-1)); } /** * Sets the nth bit on. */ private boolean setBit(int n) { int [MASK] = n >> 5; int maskIndex = n & 0x1f; int old = bits[ [MASK] ]; return ((bits[ [MASK] ] |= setMask[maskIndex]) != old); } /** * Sets the nth bit to off. */ private boolean clearBit(int n) { int [MASK] = n >> 5; int maskIndex = n & 0x1f; int old = bits[ [MASK] ]; return ((bits[ [MASK] ] &= clearMask[maskIndex]) != old); } /** * Tests if the nth bit is on. */ private boolean isBitSet(int n) { int [MASK] = n >> 5; int maskIndex = n & 0x1f; return ((bits[ [MASK] ] & setMask[maskIndex]) != 0); } }
intIndex
/* ### * IP: GHIDRA * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ghidra.util.datastruct; import java.io.Serializable; import java.util.Arrays; /** * The BitTree class maintains a set of ordered keys between the values of * 0 and N. It can quickly (O(log(n))) add keys, remove keys, find the next key * greater than some value , and find the prev key less than some value. It can * determine if a key is in the set in O(1) time. This implementation has been * limited to short keys so that it can implement the ShortKeySet interface. */ public class BitTree implements ShortKeySet, Serializable { private final static long serialVersionUID = 1; private int size; // The maximum number of keys in the set. Keys range from 0 to size-1 private int power2; // The next power of 2 that is greater than size. private int[] bits; // Array of bits used to represent a tree of binary values. A bit at // position N will have a left child at 2*N and a right child at 2*N+1. // Its parent position will be at N/2. // A bit is on if any bits in its subtree are on. Leaf bits correspond // directly to keys and are on if the key is in the set. private int numKeys; // The current number of keys in the set. // masks for seting and clearing bits within an 32 bit integer. private static final int[] setMask = { 0x00000001,0x00000002,0x00000004,0x00000008, 0x00000010,0x00000020,0x00000040,0x00000080, 0x00000100,0x00000200,0x00000400,0x00000800, 0x00001000,0x00002000,0x00004000,0x00008000, 0x00010000,0x00020000,0x00040000,0x00080000, 0x00100000,0x00200000,0x00400000,0x00800000, 0x01000000,0x02000000,0x04000000,0x08000000, 0x10000000,0x20000000,0x40000000,0x80000000 }; private static final int[] clearMask={ 0xfffffffe,0xfffffffd,0xfffffffb,0xfffffff7, 0xffffffef,0xffffffdf,0xffffffbf,0xffffff7f, 0xfffffeff,0xfffffdff,0xfffffbff,0xfffff7ff, 0xffffefff,0xffffdfff,0xffffbfff,0xffff7fff, 0xfffeffff,0xfffdffff,0xfffbffff,0xfff7ffff, 0xffefffff,0xffdfffff,0xffbfffff,0xff7fffff, 0xfeffffff,0xfdffffff,0xfbffffff,0xf7ffffff, 0xefffffff,0xdfffffff,0xbfffffff,0x7fffffff }; /** * The BitTree constructor takes the maximum key value. The legal * keys for this set range from 0 to maxKey. * @param maxKey the maximum key that will ever be put into this BitTree. */ public BitTree(short maxKey) { this(maxKey,false); } /** * The BitTree constructor takes the maximum key value. The legal * keys for this set range from 0 to maxKey. * @param maxKey the maximum key value. * @param isFull if true, then the set is initilized to contain all legal keys. */ public BitTree(short maxKey, boolean isFull) { this.size = maxKey+1; // find the next power of 2 greater than or equal to n. power2 = 2; int sz = maxKey+1; while (sz > 1) { sz /= 2; power2 *= 2; } // The number of bits need to store the tree is 2 times the number of keys. // Since we are storing the bits in 32 bit integers we need N/16 integers // to store the bits. int nInts = power2/16; // must have at least 1 if (nInts < 1) { nInts = 1; } bits = new int[nInts]; if (isFull) { Arrays.fill(bits,0xffffffff); numKeys = this.size; } } /** * Removes all keys from the set. */ public void removeAll() { Arrays.fill(bits,0); numKeys = 0; } /** * Returns the number of keys currently in the set. */ public int size() { return numKeys; } /** * Adds a key to the set. * @param key to be added. * @exception [MASK] if the given key is not * in the range [0, size-1]. */ public void put(short key) { if ((key < 0) || (key >= size)) { throw new [MASK] (); } // The first "power2" number of bits are used for internal tree nodes. The // leaf nodes start at index "power2". int nodeIndex = power2+key; // set the leaf bit on to indicate that the key is in the set. // if the bit is already on (The key is already in the set), then just return. if (!setBit(nodeIndex)) { return; } // increment the number of keys in the set. numKeys++; // go up the tree setting each parent bit to "on" while(nodeIndex != 1) { // compute parent index. nodeIndex /= 2; // if any parent bit is already on, then all its parents are already on, // so were done. if (!setBit(nodeIndex)) { return; } } } /** * Removes the key from the set. * @param key The key to remove. * @exception [MASK] if the given key is not * in the range [0, size-1]. */ public boolean remove(short key) { if ((key < 0) || (key >= size)) { throw new [MASK] (); } // compute the leaf node index. int nodeIndex = power2+key; // clear the leaf bit to indicate that the key is not in the set. // if it is already "off", then we don't have to do anything if (!clearBit(nodeIndex)) { return false; } // decrement the number of keys in the set numKeys--; // traverse up the tree, clearing any parent nodes if all its child // nodes are "off". while(nodeIndex != 1) { nodeIndex /= 2; if (!isBitSet(nodeIndex)) { return true; } if (isBitSet(nodeIndex*2) || isBitSet(nodeIndex*2+1)) { return true; } clearBit(nodeIndex); } return true; } /** * Determines if a given key is in the set. * @param key the key to check if it is in this set. * @return true if the key is in the set. */ public boolean containsKey(short key) { if ((key < 0) || (key >= size)) { return false; } return isBitSet(power2+key); } /** * finds the next key that is in the set that is greater than the given key. * @param key from which to search forward. * @return the next key greater than the given key or -1 if there is no key * greater than the given key. * @exception [MASK] if the given key is not * in the range [0, size-1]. */ public short getNext(short key) { if ((key < 0) || (key >= size)) { throw new [MASK] (); } // compute leaf node. int nodeIndex = key + power2; // while we are not at the root, search upward until we find a right // sibling that is "on". while(nodeIndex != 1) { // see if we are odd (i.e. the right child) int odd = nodeIndex % 2; // if we are the left child see if my sibling on the right is on. // if so, then the next key must be in that subtree. if (odd == 0) { if (isBitSet(nodeIndex+1)) { // we found a right sibling that is "on", set nodeIndex to // that node. nodeIndex++; break; } } nodeIndex = nodeIndex/2; } // if we made it all the way up to the root node, then there is no key // greater than, so return -1; if (nodeIndex == 1) { return (short)-1; } // now that we found a right sibling that is "on", // follow the leftmost trail of "on" bits to an "on" leaf bit. This bit // represents the next key in the set. while (nodeIndex < power2) { nodeIndex *= 2; // if the left child is not on, then the right child must be "on". if (!isBitSet(nodeIndex)) { nodeIndex++; } } short nextKey = (short)(nodeIndex-power2); if (nextKey >= size) { nextKey = -1; } return nextKey; } /** * Finds the next key that is in the set that is less than the given key. * @param key the key to search before. * @return the next key less than the given key or -1 if there is no key * less than the given key. * @exception [MASK] if the given key is not * in the range [0, size-1]. */ public short getPrevious(short key) { if ((key < 0) || (key >= size)) { throw new [MASK] (); } // find the leaf node for the given key. int nodeIndex = key + power2; // while we are not at the root, search upward until we find a left // sibling that is "on". while(nodeIndex != 1) { // check if we are a right node. int odd = nodeIndex % 2; // if we are the right child see if my sibling on the left is "on". // if so, then the previous key must be in that subtree. if (odd == 1) { if (isBitSet(nodeIndex-1)) { nodeIndex--; break; } } nodeIndex = nodeIndex/2; } // If we went all the way to the root then there is no previous key, return -1. if (nodeIndex == 1) { return (short)-1; } // follow the rightmost trail of "on" bits to an "on" leaf bit. This bit // represents the previous key in the set. while (nodeIndex < power2) { nodeIndex *= 2; if (isBitSet(nodeIndex+1)) { nodeIndex++; } } return (short)(nodeIndex-power2); } /** * Checks if the set is empty. * @return true if the set is empty. */ public boolean isEmpty() { return numKeys == 0; } /** * Returns the first (lowest) key in the set. */ public short getFirst() { // if the 0 key is in the set, then return it. if(containsKey((short)0)) { return (short)0; } // otherwise return the next key after 0. return getNext((short)0); } /** * Returns the last (highest) key in the set. */ public short getLast() { // if the highest possible key is in the set, return it. if(containsKey((short)(size-1))) { return (short)(size-1); } // otherwise return the next lowest key. return getPrevious((short)(size-1)); } /** * Sets the nth bit on. */ private boolean setBit(int n) { int intIndex = n >> 5; int maskIndex = n & 0x1f; int old = bits[intIndex]; return ((bits[intIndex] |= setMask[maskIndex]) != old); } /** * Sets the nth bit to off. */ private boolean clearBit(int n) { int intIndex = n >> 5; int maskIndex = n & 0x1f; int old = bits[intIndex]; return ((bits[intIndex] &= clearMask[maskIndex]) != old); } /** * Tests if the nth bit is on. */ private boolean isBitSet(int n) { int intIndex = n >> 5; int maskIndex = n & 0x1f; return ((bits[intIndex] & setMask[maskIndex]) != 0); } }
IndexOutOfBoundsException
// Copyright 2015 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.skyframe; import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertWithMessage; import com.google.common.base.Preconditions; import com.google.common.base.Suppliers; import com.google.common.collect.Collections2; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.ArtifactRoot; import com.google.devtools.build.lib.actions.FilesetOutputSymlink; import com.google.devtools.build.lib.actions.FilesetTraversalParams; import com.google.devtools.build.lib.actions.FilesetTraversalParamsFactory; import com.google.devtools.build.lib.actions.util.ActionsTestUtil; import com.google.devtools.build.lib.analysis.BlazeDirectories; import com.google.devtools.build.lib.analysis.ServerDirectories; import com.google.devtools.build.lib.clock.BlazeClock; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.events.NullEventHandler; import com.google.devtools.build.lib.pkgcache.PathPackageLocator; import com.google.devtools.build.lib.skyframe.ExternalFilesHelper.ExternalFileAction; import com.google.devtools.build.lib.skyframe.PackageLookupFunction.CrossRepositoryLabelViolationStrategy; import com.google.devtools.build.lib.testutil.FoundationTestCase; import com.google.devtools.build.lib.testutil.TestConstants; import com.google.devtools.build.lib.util.Fingerprint; import com.google.devtools.build.lib.util.io.TimestampGranularityMonitor; import com.google.devtools.build.lib.vfs.FileStateKey; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.build.lib.vfs.Root; import com.google.devtools.build.lib.vfs.RootedPath; import com.google.devtools.build.lib.vfs.SyscallCache; import com.google.devtools.build.skyframe.EvaluationContext; import com.google.devtools.build.skyframe.EvaluationResult; import com.google.devtools.build.skyframe.InMemoryMemoizingEvaluator; import com.google.devtools.build.skyframe.MemoizingEvaluator; import com.google.devtools.build.skyframe.RecordingDifferencer; import com.google.devtools.build.skyframe.SequencedRecordingDifferencer; import com.google.devtools.build.skyframe.SkyFunction; import com.google.devtools.build.skyframe.SkyFunctionException; import com.google.devtools.build.skyframe.SkyFunctionName; import com.google.devtools.build.skyframe.SkyKey; import com.google.devtools.build.skyframe.SkyValue; import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.concurrent.atomic.AtomicReference; import javax.annotation.Nullable; import net.starlark.java.eval.StarlarkSemantics; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Tests for {@link FilesetEntryFunction}. */ @RunWith(JUnit4.class) public final class FilesetEntryFunctionTest extends FoundationTestCase { private MemoizingEvaluator evaluator; private RecordingDifferencer differencer; @Before public void setUp() throws Exception { AtomicReference<PathPackageLocator> pkgLocator = new AtomicReference<>( new PathPackageLocator( outputBase, ImmutableList.of(Root.fromPath(rootDirectory)), BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY)); BlazeDirectories directories = new BlazeDirectories( new ServerDirectories(outputBase, outputBase, outputBase), rootDirectory, /* defaultSystemJavabase= */ null, TestConstants.PRODUCT_NAME); ExternalFilesHelper externalFilesHelper = ExternalFilesHelper.createForTesting( pkgLocator, ExternalFileAction.DEPEND_ON_EXTERNAL_PKG_FOR_EXTERNAL_REPO_PATHS, directories); Map<SkyFunctionName, SkyFunction> skyFunctions = new HashMap<>(); skyFunctions.put( FileStateKey.FILE_STATE, new FileStateFunction( Suppliers.ofInstance(new TimestampGranularityMonitor(BlazeClock.instance())), SyscallCache.NO_CACHE, externalFilesHelper)); skyFunctions.put(SkyFunctions.FILE, new FileFunction(pkgLocator, directories)); skyFunctions.put(SkyFunctions.DIRECTORY_LISTING, new DirectoryListingFunction()); skyFunctions.put( SkyFunctions.DIRECTORY_LISTING_STATE, new DirectoryListingStateFunction(externalFilesHelper, SyscallCache.NO_CACHE)); skyFunctions.put( SkyFunctions.RECURSIVE_FILESYSTEM_TRAVERSAL, new RecursiveFilesystemTraversalFunction(SyscallCache.NO_CACHE)); skyFunctions.put( SkyFunctions.PACKAGE_LOOKUP, new PackageLookupFunction( new AtomicReference<>(ImmutableSet.of()), CrossRepositoryLabelViolationStrategy.ERROR, BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY, BazelSkyframeExecutorConstants.EXTERNAL_PACKAGE_HELPER)); skyFunctions.put(SkyFunctions.IGNORED_SUBDIRECTORIES, IgnoredSubdirectoriesFunction.NOOP); skyFunctions.put( SkyFunctions.FILESET_ENTRY, new FilesetEntryFunction((unused) -> rootDirectory)); skyFunctions.put(SkyFunctions.WORKSPACE_NAME, new TestWorkspaceNameFunction()); skyFunctions.put( SkyFunctions.LOCAL_REPOSITORY_LOOKUP, new LocalRepositoryLookupFunction(BazelSkyframeExecutorConstants.EXTERNAL_PACKAGE_HELPER)); differencer = new SequencedRecordingDifferencer(); evaluator = new InMemoryMemoizingEvaluator(skyFunctions, differencer); PrecomputedValue.BUILD_ID.set(differencer, UUID.randomUUID()); PrecomputedValue.PATH_PACKAGE_LOCATOR.set(differencer, pkgLocator.get()); PrecomputedValue.STARLARK_SEMANTICS.set(differencer, StarlarkSemantics.DEFAULT); } private Artifact getSourceArtifact(String path) { return ActionsTestUtil.createArtifact( ArtifactRoot.asSourceRoot(Root.fromPath(rootDirectory)), path); } private Artifact createSourceArtifact(String path) throws Exception { Artifact result = getSourceArtifact(path); createFile(result, "foo"); return result; } private static RootedPath childOf(Artifact artifact, String relative) { return RootedPath.toRootedPath( artifact.getRoot().getRoot(), artifact.getRootRelativePath().getRelative(relative)); } private static RootedPath siblingOf(Artifact artifact, String relative) { PathFragment parent = Preconditions.checkNotNull(artifact.getRootRelativePath().getParentDirectory()); return RootedPath.toRootedPath(artifact.getRoot().getRoot(), parent.getRelative(relative)); } private void createFile(Path path, String... contents) throws Exception { if (!path.getParentDirectory().exists()) { scratch.dir(path.getParentDirectory().getPathString()); } scratch.file(path.getPathString(), contents); } private void createFile(Artifact artifact, String... contents) throws Exception { createFile(artifact.getPath(), contents); } private RootedPath createFile(RootedPath path, String... contents) throws Exception { createFile(path.asPath(), contents); return path; } private <T extends SkyValue> EvaluationResult<T> eval(SkyKey key) throws Exception { EvaluationContext evaluationContext = EvaluationContext.newBuilder() .setKeepGoing(false) .setParallelism(SkyframeExecutor.DEFAULT_THREAD_COUNT) .setEventHandler(NullEventHandler.INSTANCE) .build(); return evaluator.evaluate(ImmutableList.of(key), evaluationContext); } private FilesetEntryValue evalFilesetTraversal(FilesetTraversalParams params) throws Exception { SkyKey key = FilesetEntryKey.key(params); EvaluationResult<FilesetEntryValue> result = eval(key); assertThat(result.hasError()).isFalse(); return result.get(key); } private FilesetOutputSymlink symlink(String from, Artifact to) { return symlink(PathFragment.create(from), to.getPath().asFragment()); } private FilesetOutputSymlink symlink(String from, RootedPath to) { return symlink(PathFragment.create(from), to.asPath().asFragment()); } private FilesetOutputSymlink symlink(PathFragment from, PathFragment to) { return FilesetOutputSymlink.createForTesting(from, to, rootDirectory.asFragment()); } private void assertSymlinksCreatedInOrder( FilesetTraversalParams request, FilesetOutputSymlink... expectedSymlinks) throws Exception { Collection<FilesetOutputSymlink> actual = Collections2.transform( evalFilesetTraversal(request).getSymlinks(), // Strip the metadata from the actual results. (input) -> FilesetOutputSymlink.createAlreadyRelativizedForTesting( input.name(), input.targetPath(), input.relativeToExecRoot())); assertThat(actual).containsExactlyElementsIn(expectedSymlinks).inOrder(); } private static Label label(String label) throws Exception { return Label.parseCanonical(label); } @Test public void testFileTraversalForFile() throws Exception { Artifact file = createSourceArtifact("foo/file.real"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ file, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params, symlink("output-name", file)); } @Test public void testFileTraversalForDirectory() throws Exception { Artifact dir = getSourceArtifact("foo/dir_real"); RootedPath fileA = createFile(childOf(dir, "file.a"), "hello"); RootedPath fileB = createFile(childOf(dir, "sub/file.b"), "world"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ dir, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ true); assertSymlinksCreatedInOrder( params, symlink("output-name/file.a", fileA), symlink("output-name/sub/file.b", fileB)); } @Test public void testFileTraversalForDisallowedDirectoryThrows() throws Exception { Artifact dir = getSourceArtifact("foo/dir_real"); createFile(childOf(dir, "file.a"), "hello"); createFile(childOf(dir, "sub/file.b"), "world"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ dir, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); SkyKey key = FilesetEntryKey.key(params); EvaluationResult<FilesetEntryValue> result = eval(key); assertThat(result.hasError()).isTrue(); assertThat(result.getError(key).getException()) .hasMessageThat() .contains("foo contains a directory"); } @Test public void testFileTraversalForDanglingSymlink() throws Exception { Artifact linkName = getSourceArtifact("foo/dangling.sym"); RootedPath linkTarget = createFile(siblingOf(linkName, "target.file"), "blah"); linkName.getPath().createSymbolicLink(PathFragment.create("target.file")); linkTarget.asPath().delete(); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ linkName, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params); // expect empty results } @Test public void testFileTraversalForNonExistentFile() throws Exception { Artifact path = getSourceArtifact("foo/non-existent"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ path, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params); // expect empty results } /** * Tests that the fingerprint is a function of all arguments of the factory method. * * <p>Implementations must provide: * <ul> * <li>two different [MASK] (a domain) for each argument of the factory method and whether or not * it is expected to influence the fingerprint * <li>a way to instantiate {@link FilesetTraversalParams} with a given set of arguments from the * specified domains * </ul> * * <p>The tests will instantiate pairs of {@link FilesetTraversalParams} objects with only a given * attribute differing, and observe whether the fingerprints differ (if they are expected to) or * are the same (otherwise). */ private abstract static class FingerprintTester { private final Map<String, Domain> domains; FingerprintTester(Map<String, Domain> domains) { this.domains = domains; } abstract FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception; private Map<String, ?> getDefaultArgs() { return getKwArgs(null); } private Map<String, ?> getKwArgs(@Nullable String useAlternateFor) { Map<String, Object> [MASK] = new HashMap<>(); for (Map.Entry<String, Domain> d : domains.entrySet()) { [MASK] .put( d.getKey(), d.getKey().equals(useAlternateFor) ? d.getValue().valueA : d.getValue().valueB); } return [MASK] ; } public void doTest() throws Exception { Fingerprint fp = new Fingerprint(); create(getDefaultArgs()).fingerprint(fp); String primary = fp.hexDigestAndReset(); for (String argName : domains.keySet()) { create(getKwArgs(argName)).fingerprint(fp); String secondary = fp.hexDigestAndReset(); if (domains.get(argName).includedInFingerprint) { assertWithMessage( "Argument '" + argName + "' was expected to be included in the" + " fingerprint, but wasn't") .that(primary) .isNotEqualTo(secondary); } else { assertWithMessage( "Argument '" + argName + "' was expected not to be included in the" + " fingerprint, but was") .that(primary) .isEqualTo(secondary); } } } } private static final class Domain { boolean includedInFingerprint; Object valueA; Object valueB; Domain(boolean includedInFingerprint, Object valueA, Object valueB) { this.includedInFingerprint = includedInFingerprint; this.valueA = valueA; this.valueB = valueB; } } private static Domain partOfFingerprint(Object valueA, Object valueB) { return new Domain(true, valueA, valueB); } private static Domain notPartOfFingerprint(Object valueB) { return new Domain(false, "//foo", valueB); } @Test public void testFingerprintOfFileTraversal() throws Exception { new FingerprintTester( ImmutableMap.<String, Domain>builder() .put("ownerLabel", notPartOfFingerprint("//bar")) .put("fileToTraverse", partOfFingerprint("foo/file.a", "bar/file.b")) .put("destPath", partOfFingerprint("out1", "out2")) .put("strictFilesetOutput", partOfFingerprint(true, false)) .put("permitDirectories", partOfFingerprint(true, false)) .buildOrThrow()) { @Override FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception { return FilesetTraversalParamsFactory.fileTraversal( label((String) kwArgs.get("ownerLabel")), getSourceArtifact((String) kwArgs.get("fileToTraverse")), PathFragment.create((String) kwArgs.get("destPath")), (Boolean) kwArgs.get("strictFilesetOutput"), (Boolean) kwArgs.get("permitDirectories")); } }.doTest(); } @Test public void testFingerprintOfNestedTraversal() throws Exception { Artifact nested1 = getSourceArtifact("a/b"); Artifact nested2 = getSourceArtifact("a/c"); new FingerprintTester( ImmutableMap.of( "ownerLabel", notPartOfFingerprint("//bar"), "nestedArtifact", partOfFingerprint(nested1, nested2), "destDir", partOfFingerprint("out1", "out2"), "excludes", partOfFingerprint(ImmutableSet.<String>of(), ImmutableSet.of("x")))) { @SuppressWarnings("unchecked") @Override FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception { return FilesetTraversalParamsFactory.nestedTraversal( label((String) kwArgs.get("ownerLabel")), (Artifact) kwArgs.get("nestedArtifact"), PathFragment.create((String) kwArgs.get("destDir")), (Set<String>) kwArgs.get("excludes")); } }.doTest(); } private static class TestWorkspaceNameFunction implements SkyFunction { @Nullable @Override public SkyValue compute(SkyKey skyKey, Environment env) throws SkyFunctionException, InterruptedException { return WorkspaceNameValue.withName("workspace"); } } }
values
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.qbao.cat.plugin.common; import java.util.StringTokenizer; import org.aspectj.lang.ProceedingJoinPoint; import org.aspectj.lang. [MASK] .Around; import org.aspectj.lang. [MASK] .Aspect; import org.aspectj.lang. [MASK] .Pointcut; import com.dianping.cat.Cat; import com.dianping.cat.message.Transaction; import com.qbao.cat.plugin.DefaultPluginTemplate; @Aspect public abstract class CommonPluginTemplate extends DefaultPluginTemplate { @Override @Pointcut public void scope() {} @Override @Around(POINTCUT_NAME) public Object doAround(ProceedingJoinPoint pjp) throws Throwable { return super.doAround(pjp); } @Override protected Transaction beginLog(ProceedingJoinPoint pjp) { StringBuilder type = new StringBuilder(); String packageStr = pjp.getSignature().getDeclaringType().getPackage().getName(); StringTokenizer st = new StringTokenizer(packageStr, "."); for(int i=0;i<2;i++){ type.append(st.nextToken()); type.append("."); } type.append("Method"); Transaction transaction = Cat.newTransaction(type.toString(),pjp.getSignature().toString()); return transaction; } @Override protected void endLog(Transaction transaction, Object retVal, Object... params) {} }
annotation
package com.baeldung.bufferedreader; import org.junit.Test; import java.io.BufferedReader; import java.io. [MASK] ; import java.io.StringReader; import java.nio.file.Files; import java.nio.file.Paths; import static org.junit.Assert.*; public class BufferedReaderUnitTest { private static final String FILE_PATH = "src/main/resources/input.txt"; @Test public void givenBufferedReader_whenSkipUnderscores_thenOk() throws [MASK] { StringBuilder result = new StringBuilder(); try (BufferedReader reader = new BufferedReader(new StringReader("1__2__3__4__5"))) { int value; while((value = reader.read()) != -1) { result.append((char) value); reader.skip(2L); } } assertEquals("12345", result.toString()); } @Test public void givenBufferedReader_whenSkipsWhitespacesAtBeginning_thenOk() throws [MASK] { String result; try (BufferedReader reader = new BufferedReader(new StringReader(" Lorem ipsum dolor sit amet."))) { do { reader.mark(1); } while(Character.isWhitespace(reader.read())); reader.reset(); result = reader.readLine(); } assertEquals("Lorem ipsum dolor sit amet.", result); } @Test public void whenCreatesNewBufferedReader_thenOk() throws [MASK] { try(BufferedReader reader = Files.newBufferedReader(Paths.get(FILE_PATH))) { assertNotNull(reader); assertTrue(reader.ready()); } } }
IOException
/* * Copyright 2012-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.loader.net.protocol.jar; import java.io.File; import java.net.MalformedURLException; import java.net.URL; import java.util.jar.JarEntry; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.io.TempDir; import org.springframework.boot.loader.net.util.UrlDecoder; import static org.assertj.core.api.Assertions.assertThat; /** * Tests for {@link JarUrl}. * * @author Phillip Webb */ class JarUrlTests { @TempDir File temp; File jarFile; String [MASK] ; @BeforeEach void setup() throws MalformedURLException { this.jarFile = new File(this.temp, "my.jar"); this. [MASK] = this.jarFile.toURI().toURL().toString().substring("file:".length()).replace("!", "%21"); } @Test void createWithFileReturnsUrl() { URL url = JarUrl.create(this.jarFile); assertThat(url).hasToString("jar:file:%s!/".formatted(this. [MASK] )); } @Test void createWithFileAndEntryReturnsUrl() { JarEntry entry = new JarEntry("lib.jar"); URL url = JarUrl.create(this.jarFile, entry); assertThat(url).hasToString("jar:nested:%s/!lib.jar!/".formatted(this. [MASK] )); } @Test void createWithFileAndNullEntryReturnsUrl() { URL url = JarUrl.create(this.jarFile, (JarEntry) null); assertThat(url).hasToString("jar:file:%s!/".formatted(this. [MASK] )); } @Test void createWithFileAndNameReturnsUrl() { URL url = JarUrl.create(this.jarFile, "lib.jar"); assertThat(url).hasToString("jar:nested:%s/!lib.jar!/".formatted(this. [MASK] )); } @Test void createWithFileAndNullNameReturnsUrl() { URL url = JarUrl.create(this.jarFile, (String) null); assertThat(url).hasToString("jar:file:%s!/".formatted(this. [MASK] )); } @Test void createWithFileNameAndPathReturnsUrl() { URL url = JarUrl.create(this.jarFile, "lib.jar", "com/example/My.class"); assertThat(url).hasToString("jar:nested:%s/!lib.jar!/com/example/My.class".formatted(this. [MASK] )); } @Test void createWithReservedCharsInName() throws Exception { String badFolderName = "foo#bar!/baz/!oof"; this.temp = new File(this.temp, badFolderName); setup(); URL url = JarUrl.create(this.jarFile, "lib.jar", "com/example/My.class"); assertThat(url).hasToString("jar:nested:%s/!lib.jar!/com/example/My.class".formatted(this. [MASK] )); assertThat(UrlDecoder.decode(url.toString())).contains(badFolderName); } }
jarFileUrlPath
// Copyright 2015 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.skyframe; import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertWithMessage; import com.google.common.base.Preconditions; import com.google.common.base.Suppliers; import com.google.common.collect.Collections2; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.ArtifactRoot; import com.google.devtools.build.lib.actions.FilesetOutputSymlink; import com.google.devtools.build.lib.actions.FilesetTraversalParams; import com.google.devtools.build.lib.actions.FilesetTraversalParamsFactory; import com.google.devtools.build.lib.actions.util.ActionsTestUtil; import com.google.devtools.build.lib.analysis.BlazeDirectories; import com.google.devtools.build.lib.analysis.ServerDirectories; import com.google.devtools.build.lib.clock.BlazeClock; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.events.NullEventHandler; import com.google.devtools.build.lib.pkgcache.PathPackageLocator; import com.google.devtools.build.lib.skyframe.ExternalFilesHelper.ExternalFileAction; import com.google.devtools.build.lib.skyframe.PackageLookupFunction.CrossRepositoryLabelViolationStrategy; import com.google.devtools.build.lib.testutil.FoundationTestCase; import com.google.devtools.build.lib.testutil.TestConstants; import com.google.devtools.build.lib.util.Fingerprint; import com.google.devtools.build.lib.util.io.TimestampGranularityMonitor; import com.google.devtools.build.lib.vfs.FileStateKey; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.build.lib.vfs.Root; import com.google.devtools.build.lib.vfs.RootedPath; import com.google.devtools.build.lib.vfs.SyscallCache; import com.google.devtools.build.skyframe.EvaluationContext; import com.google.devtools.build.skyframe.EvaluationResult; import com.google.devtools.build.skyframe.InMemoryMemoizingEvaluator; import com.google.devtools.build.skyframe.MemoizingEvaluator; import com.google.devtools.build.skyframe.RecordingDifferencer; import com.google.devtools.build.skyframe.SequencedRecordingDifferencer; import com.google.devtools.build.skyframe.SkyFunction; import com.google.devtools.build.skyframe.SkyFunctionException; import com.google.devtools.build.skyframe.SkyFunctionName; import com.google.devtools.build.skyframe.SkyKey; import com.google.devtools.build.skyframe.SkyValue; import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.concurrent.atomic.AtomicReference; import javax.annotation.Nullable; import net.starlark.java.eval.StarlarkSemantics; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Tests for {@link FilesetEntryFunction}. */ @RunWith(JUnit4.class) public final class FilesetEntryFunctionTest extends FoundationTestCase { private MemoizingEvaluator evaluator; private RecordingDifferencer differencer; @Before public void setUp() throws Exception { AtomicReference<PathPackageLocator> pkgLocator = new AtomicReference<>( new PathPackageLocator( outputBase, ImmutableList.of(Root.fromPath(rootDirectory)), BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY)); BlazeDirectories directories = new BlazeDirectories( new ServerDirectories(outputBase, outputBase, outputBase), rootDirectory, /* defaultSystemJavabase= */ null, TestConstants.PRODUCT_NAME); ExternalFilesHelper externalFilesHelper = ExternalFilesHelper.createForTesting( pkgLocator, ExternalFileAction.DEPEND_ON_EXTERNAL_PKG_FOR_EXTERNAL_REPO_PATHS, directories); Map<SkyFunctionName, SkyFunction> skyFunctions = new HashMap<>(); skyFunctions.put( FileStateKey.FILE_STATE, new FileStateFunction( Suppliers.ofInstance(new TimestampGranularityMonitor(BlazeClock.instance())), SyscallCache.NO_CACHE, externalFilesHelper)); skyFunctions.put(SkyFunctions.FILE, new FileFunction(pkgLocator, directories)); skyFunctions.put(SkyFunctions.DIRECTORY_LISTING, new DirectoryListingFunction()); skyFunctions.put( SkyFunctions.DIRECTORY_LISTING_STATE, new DirectoryListingStateFunction(externalFilesHelper, SyscallCache.NO_CACHE)); skyFunctions.put( SkyFunctions.RECURSIVE_FILESYSTEM_TRAVERSAL, new RecursiveFilesystemTraversalFunction(SyscallCache.NO_CACHE)); skyFunctions.put( SkyFunctions.PACKAGE_LOOKUP, new PackageLookupFunction( new AtomicReference<>(ImmutableSet.of()), CrossRepositoryLabelViolationStrategy.ERROR, BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY, BazelSkyframeExecutorConstants.EXTERNAL_PACKAGE_HELPER)); skyFunctions.put(SkyFunctions.IGNORED_SUBDIRECTORIES, IgnoredSubdirectoriesFunction.NOOP); skyFunctions.put( SkyFunctions.FILESET_ENTRY, new FilesetEntryFunction((unused) -> rootDirectory)); skyFunctions.put(SkyFunctions.WORKSPACE_NAME, new TestWorkspaceNameFunction()); skyFunctions.put( SkyFunctions.LOCAL_REPOSITORY_LOOKUP, new LocalRepositoryLookupFunction(BazelSkyframeExecutorConstants.EXTERNAL_PACKAGE_HELPER)); differencer = new SequencedRecordingDifferencer(); evaluator = new InMemoryMemoizingEvaluator(skyFunctions, differencer); PrecomputedValue.BUILD_ID.set(differencer, UUID.randomUUID()); PrecomputedValue.PATH_PACKAGE_LOCATOR.set(differencer, pkgLocator.get()); PrecomputedValue.STARLARK_SEMANTICS.set(differencer, StarlarkSemantics.DEFAULT); } private Artifact getSourceArtifact(String path) { return ActionsTestUtil.createArtifact( ArtifactRoot.asSourceRoot(Root.fromPath(rootDirectory)), path); } private Artifact createSourceArtifact(String path) throws Exception { Artifact result = getSourceArtifact(path); createFile(result, "foo"); return result; } private static RootedPath childOf(Artifact artifact, String relative) { return RootedPath.toRootedPath( artifact.getRoot().getRoot(), artifact.getRootRelativePath().getRelative(relative)); } private static RootedPath siblingOf(Artifact artifact, String relative) { PathFragment parent = Preconditions.checkNotNull(artifact.getRootRelativePath().getParentDirectory()); return RootedPath.toRootedPath(artifact.getRoot().getRoot(), parent.getRelative(relative)); } private void createFile(Path path, String... contents) throws Exception { if (!path.getParentDirectory().exists()) { scratch.dir(path.getParentDirectory().getPathString()); } scratch.file(path.getPathString(), contents); } private void createFile(Artifact artifact, String... contents) throws Exception { createFile(artifact.getPath(), contents); } private RootedPath createFile(RootedPath path, String... contents) throws Exception { createFile(path.asPath(), contents); return path; } private <T extends SkyValue> EvaluationResult<T> eval(SkyKey key) throws Exception { EvaluationContext evaluationContext = EvaluationContext.newBuilder() .setKeepGoing(false) .setParallelism(SkyframeExecutor.DEFAULT_THREAD_COUNT) .setEventHandler(NullEventHandler.INSTANCE) .build(); return evaluator.evaluate(ImmutableList.of(key), evaluationContext); } private FilesetEntryValue evalFilesetTraversal(FilesetTraversalParams params) throws Exception { SkyKey key = FilesetEntryKey.key(params); EvaluationResult<FilesetEntryValue> result = eval(key); assertThat(result.hasError()).isFalse(); return result.get(key); } private FilesetOutputSymlink symlink(String from, Artifact to) { return symlink(PathFragment.create(from), to.getPath().asFragment()); } private FilesetOutputSymlink symlink(String from, RootedPath to) { return symlink(PathFragment.create(from), to.asPath().asFragment()); } private FilesetOutputSymlink symlink(PathFragment from, PathFragment to) { return FilesetOutputSymlink.createForTesting(from, to, rootDirectory.asFragment()); } private void assertSymlinksCreatedInOrder( FilesetTraversalParams request, FilesetOutputSymlink... expectedSymlinks) throws Exception { Collection<FilesetOutputSymlink> actual = Collections2.transform( evalFilesetTraversal(request).getSymlinks(), // Strip the metadata from the actual results. (input) -> FilesetOutputSymlink.createAlreadyRelativizedForTesting( input.name(), input.targetPath(), input.relativeToExecRoot())); assertThat(actual).containsExactlyElementsIn(expectedSymlinks).inOrder(); } private static Label label(String label) throws Exception { return Label.parseCanonical(label); } @Test public void testFileTraversalForFile() throws Exception { Artifact file = createSourceArtifact("foo/file.real"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ file, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params, symlink("output-name", file)); } @Test public void testFileTraversalForDirectory() throws Exception { Artifact dir = getSourceArtifact("foo/dir_real"); RootedPath fileA = createFile(childOf(dir, "file.a"), "hello"); RootedPath fileB = createFile(childOf(dir, "sub/file.b"), "world"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ dir, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ true); assertSymlinksCreatedInOrder( params, symlink("output-name/file.a", fileA), symlink("output-name/sub/file.b", fileB)); } @Test public void testFileTraversalForDisallowedDirectoryThrows() throws Exception { Artifact dir = getSourceArtifact("foo/dir_real"); createFile(childOf(dir, "file.a"), "hello"); createFile(childOf(dir, "sub/file.b"), "world"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ dir, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); SkyKey key = FilesetEntryKey.key(params); EvaluationResult<FilesetEntryValue> result = eval(key); assertThat(result.hasError()).isTrue(); assertThat(result.getError(key).getException()) .hasMessageThat() .contains("foo contains a directory"); } @Test public void testFileTraversalForDanglingSymlink() throws Exception { Artifact linkName = getSourceArtifact("foo/dangling.sym"); RootedPath linkTarget = createFile(siblingOf(linkName, "target.file"), "blah"); linkName.getPath().createSymbolicLink(PathFragment.create("target.file")); linkTarget.asPath().delete(); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ linkName, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params); // expect empty results } @Test public void testFileTraversalForNonExistentFile() throws Exception { Artifact path = getSourceArtifact("foo/non-existent"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ path, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params); // expect empty results } /** * Tests that the fingerprint is a function of all arguments of the factory method. * * <p>Implementations must provide: * <ul> * <li>two different values (a domain) for each argument of the factory method and whether or not * it is expected to influence the fingerprint * <li>a way to instantiate {@link FilesetTraversalParams} with a given set of arguments from the * specified domains * </ul> * * <p>The tests will instantiate pairs of {@link FilesetTraversalParams} objects with only a given * attribute differing, and observe whether the fingerprints differ (if they are expected to) or * are the same (otherwise). */ private abstract static class [MASK] { private final Map<String, Domain> domains; [MASK] (Map<String, Domain> domains) { this.domains = domains; } abstract FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception; private Map<String, ?> getDefaultArgs() { return getKwArgs(null); } private Map<String, ?> getKwArgs(@Nullable String useAlternateFor) { Map<String, Object> values = new HashMap<>(); for (Map.Entry<String, Domain> d : domains.entrySet()) { values.put( d.getKey(), d.getKey().equals(useAlternateFor) ? d.getValue().valueA : d.getValue().valueB); } return values; } public void doTest() throws Exception { Fingerprint fp = new Fingerprint(); create(getDefaultArgs()).fingerprint(fp); String primary = fp.hexDigestAndReset(); for (String argName : domains.keySet()) { create(getKwArgs(argName)).fingerprint(fp); String secondary = fp.hexDigestAndReset(); if (domains.get(argName).includedInFingerprint) { assertWithMessage( "Argument '" + argName + "' was expected to be included in the" + " fingerprint, but wasn't") .that(primary) .isNotEqualTo(secondary); } else { assertWithMessage( "Argument '" + argName + "' was expected not to be included in the" + " fingerprint, but was") .that(primary) .isEqualTo(secondary); } } } } private static final class Domain { boolean includedInFingerprint; Object valueA; Object valueB; Domain(boolean includedInFingerprint, Object valueA, Object valueB) { this.includedInFingerprint = includedInFingerprint; this.valueA = valueA; this.valueB = valueB; } } private static Domain partOfFingerprint(Object valueA, Object valueB) { return new Domain(true, valueA, valueB); } private static Domain notPartOfFingerprint(Object valueB) { return new Domain(false, "//foo", valueB); } @Test public void testFingerprintOfFileTraversal() throws Exception { new [MASK] ( ImmutableMap.<String, Domain>builder() .put("ownerLabel", notPartOfFingerprint("//bar")) .put("fileToTraverse", partOfFingerprint("foo/file.a", "bar/file.b")) .put("destPath", partOfFingerprint("out1", "out2")) .put("strictFilesetOutput", partOfFingerprint(true, false)) .put("permitDirectories", partOfFingerprint(true, false)) .buildOrThrow()) { @Override FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception { return FilesetTraversalParamsFactory.fileTraversal( label((String) kwArgs.get("ownerLabel")), getSourceArtifact((String) kwArgs.get("fileToTraverse")), PathFragment.create((String) kwArgs.get("destPath")), (Boolean) kwArgs.get("strictFilesetOutput"), (Boolean) kwArgs.get("permitDirectories")); } }.doTest(); } @Test public void testFingerprintOfNestedTraversal() throws Exception { Artifact nested1 = getSourceArtifact("a/b"); Artifact nested2 = getSourceArtifact("a/c"); new [MASK] ( ImmutableMap.of( "ownerLabel", notPartOfFingerprint("//bar"), "nestedArtifact", partOfFingerprint(nested1, nested2), "destDir", partOfFingerprint("out1", "out2"), "excludes", partOfFingerprint(ImmutableSet.<String>of(), ImmutableSet.of("x")))) { @SuppressWarnings("unchecked") @Override FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception { return FilesetTraversalParamsFactory.nestedTraversal( label((String) kwArgs.get("ownerLabel")), (Artifact) kwArgs.get("nestedArtifact"), PathFragment.create((String) kwArgs.get("destDir")), (Set<String>) kwArgs.get("excludes")); } }.doTest(); } private static class TestWorkspaceNameFunction implements SkyFunction { @Nullable @Override public SkyValue compute(SkyKey skyKey, Environment env) throws SkyFunctionException, InterruptedException { return WorkspaceNameValue.withName("workspace"); } } }
FingerprintTester
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.io.network.api.serialization; import org.apache.flink.core.memory.MemorySegment; import org.apache.flink.runtime.io.network.buffer.Buffer; import org.apache.flink.testutils.junit.utils.TempDirUtils; import org.apache.flink.util.CloseableIterator; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.io.TempDir; import java.io.File; import java.nio.file.Path; import java.util.ArrayList; import java.util.List; import java.util.Random; import static org.apache.flink.core.memory.MemorySegmentFactory.wrap; import static org.apache.flink.runtime.io.network.api.serialization.SpillingAdaptiveSpanningRecordDeserializer.LENGTH_BYTES; import static org.assertj.core.api.Assertions.assertThat; /** {@link SpanningWrapper} test. */ class SpanningWrapperTest { private static final Random random = new Random(); @TempDir private Path folder; @Test void testLargeUnconsumedSegment() throws Exception { int recordLen = 100; int firstChunk = (int) (recordLen * .9); int spillingThreshold = (int) (firstChunk * .9); byte[] record1 = recordBytes(recordLen); byte[] record2 = recordBytes(recordLen * 2); File canNotEecutableFile = TempDirUtils.newFolder(folder); canNotEecutableFile.setExecutable(false); // Always pick 'canNotEecutableFile' first as the Spilling Channel TmpDir. Thus trigger an // IOException. SpanningWrapper spanningWrapper = new SpanningWrapper( new String[] { TempDirUtils.newFolder(folder).getAbsolutePath(), canNotEecutableFile.getAbsolutePath() + File.separator + "pathdonotexit" }, spillingThreshold, recordLen); spanningWrapper.transferFrom( [MASK] (record1, firstChunk), recordLen); spanningWrapper.addNextChunkFromMemorySegment( wrap(record1), firstChunk, recordLen - firstChunk + LENGTH_BYTES); spanningWrapper.addNextChunkFromMemorySegment(wrap(record2), 0, record2.length); CloseableIterator<Buffer> unconsumedSegment = spanningWrapper.getUnconsumedSegment(); spanningWrapper .getInputView() .readFully(new byte[recordLen], 0, recordLen); // read out from file spanningWrapper.transferLeftOverTo(new NonSpanningWrapper()); // clear any leftover spanningWrapper.transferFrom( [MASK] (recordBytes(recordLen), recordLen), recordLen); // overwrite with new data canNotEecutableFile.setExecutable(true); assertThat(concat(record1, record2)).isEqualTo(toByteArray(unconsumedSegment)); } private byte[] recordBytes(int recordLen) { byte[] inputData = randomBytes(recordLen + LENGTH_BYTES); for (int i = 0; i < Integer.BYTES; i++) { inputData[Integer.BYTES - i - 1] = (byte) (recordLen >>> i * 8); } return inputData; } private NonSpanningWrapper [MASK] (byte[] bytes, int len) { NonSpanningWrapper nonSpanningWrapper = new NonSpanningWrapper(); MemorySegment segment = wrap(bytes); nonSpanningWrapper.initializeFromMemorySegment(segment, 0, len); nonSpanningWrapper .readInt(); // emulate read length performed in getNextRecord to move position return nonSpanningWrapper; } private byte[] toByteArray(CloseableIterator<Buffer> unconsumed) { final List<Buffer> buffers = new ArrayList<>(); try { unconsumed.forEachRemaining(buffers::add); byte[] result = new byte[buffers.stream().mapToInt(Buffer::readableBytes).sum()]; int offset = 0; for (Buffer buffer : buffers) { int len = buffer.readableBytes(); buffer.getNioBuffer(0, len).get(result, offset, len); offset += len; } return result; } finally { buffers.forEach(Buffer::recycleBuffer); } } private byte[] randomBytes(int length) { byte[] inputData = new byte[length]; random.nextBytes(inputData); return inputData; } private byte[] concat(byte[] input1, byte[] input2) { byte[] expected = new byte[input1.length + input2.length]; System.arraycopy(input1, 0, expected, 0, input1.length); System.arraycopy(input2, 0, expected, input1.length, input2.length); return expected; } }
wrapNonSpanning
// Copyright 2015 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.skyframe; import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth. [MASK] ; import com.google.common.base.Preconditions; import com.google.common.base.Suppliers; import com.google.common.collect.Collections2; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.ArtifactRoot; import com.google.devtools.build.lib.actions.FilesetOutputSymlink; import com.google.devtools.build.lib.actions.FilesetTraversalParams; import com.google.devtools.build.lib.actions.FilesetTraversalParamsFactory; import com.google.devtools.build.lib.actions.util.ActionsTestUtil; import com.google.devtools.build.lib.analysis.BlazeDirectories; import com.google.devtools.build.lib.analysis.ServerDirectories; import com.google.devtools.build.lib.clock.BlazeClock; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.events.NullEventHandler; import com.google.devtools.build.lib.pkgcache.PathPackageLocator; import com.google.devtools.build.lib.skyframe.ExternalFilesHelper.ExternalFileAction; import com.google.devtools.build.lib.skyframe.PackageLookupFunction.CrossRepositoryLabelViolationStrategy; import com.google.devtools.build.lib.testutil.FoundationTestCase; import com.google.devtools.build.lib.testutil.TestConstants; import com.google.devtools.build.lib.util.Fingerprint; import com.google.devtools.build.lib.util.io.TimestampGranularityMonitor; import com.google.devtools.build.lib.vfs.FileStateKey; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.build.lib.vfs.Root; import com.google.devtools.build.lib.vfs.RootedPath; import com.google.devtools.build.lib.vfs.SyscallCache; import com.google.devtools.build.skyframe.EvaluationContext; import com.google.devtools.build.skyframe.EvaluationResult; import com.google.devtools.build.skyframe.InMemoryMemoizingEvaluator; import com.google.devtools.build.skyframe.MemoizingEvaluator; import com.google.devtools.build.skyframe.RecordingDifferencer; import com.google.devtools.build.skyframe.SequencedRecordingDifferencer; import com.google.devtools.build.skyframe.SkyFunction; import com.google.devtools.build.skyframe.SkyFunctionException; import com.google.devtools.build.skyframe.SkyFunctionName; import com.google.devtools.build.skyframe.SkyKey; import com.google.devtools.build.skyframe.SkyValue; import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.concurrent.atomic.AtomicReference; import javax.annotation.Nullable; import net.starlark.java.eval.StarlarkSemantics; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Tests for {@link FilesetEntryFunction}. */ @RunWith(JUnit4.class) public final class FilesetEntryFunctionTest extends FoundationTestCase { private MemoizingEvaluator evaluator; private RecordingDifferencer differencer; @Before public void setUp() throws Exception { AtomicReference<PathPackageLocator> pkgLocator = new AtomicReference<>( new PathPackageLocator( outputBase, ImmutableList.of(Root.fromPath(rootDirectory)), BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY)); BlazeDirectories directories = new BlazeDirectories( new ServerDirectories(outputBase, outputBase, outputBase), rootDirectory, /* defaultSystemJavabase= */ null, TestConstants.PRODUCT_NAME); ExternalFilesHelper externalFilesHelper = ExternalFilesHelper.createForTesting( pkgLocator, ExternalFileAction.DEPEND_ON_EXTERNAL_PKG_FOR_EXTERNAL_REPO_PATHS, directories); Map<SkyFunctionName, SkyFunction> skyFunctions = new HashMap<>(); skyFunctions.put( FileStateKey.FILE_STATE, new FileStateFunction( Suppliers.ofInstance(new TimestampGranularityMonitor(BlazeClock.instance())), SyscallCache.NO_CACHE, externalFilesHelper)); skyFunctions.put(SkyFunctions.FILE, new FileFunction(pkgLocator, directories)); skyFunctions.put(SkyFunctions.DIRECTORY_LISTING, new DirectoryListingFunction()); skyFunctions.put( SkyFunctions.DIRECTORY_LISTING_STATE, new DirectoryListingStateFunction(externalFilesHelper, SyscallCache.NO_CACHE)); skyFunctions.put( SkyFunctions.RECURSIVE_FILESYSTEM_TRAVERSAL, new RecursiveFilesystemTraversalFunction(SyscallCache.NO_CACHE)); skyFunctions.put( SkyFunctions.PACKAGE_LOOKUP, new PackageLookupFunction( new AtomicReference<>(ImmutableSet.of()), CrossRepositoryLabelViolationStrategy.ERROR, BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY, BazelSkyframeExecutorConstants.EXTERNAL_PACKAGE_HELPER)); skyFunctions.put(SkyFunctions.IGNORED_SUBDIRECTORIES, IgnoredSubdirectoriesFunction.NOOP); skyFunctions.put( SkyFunctions.FILESET_ENTRY, new FilesetEntryFunction((unused) -> rootDirectory)); skyFunctions.put(SkyFunctions.WORKSPACE_NAME, new TestWorkspaceNameFunction()); skyFunctions.put( SkyFunctions.LOCAL_REPOSITORY_LOOKUP, new LocalRepositoryLookupFunction(BazelSkyframeExecutorConstants.EXTERNAL_PACKAGE_HELPER)); differencer = new SequencedRecordingDifferencer(); evaluator = new InMemoryMemoizingEvaluator(skyFunctions, differencer); PrecomputedValue.BUILD_ID.set(differencer, UUID.randomUUID()); PrecomputedValue.PATH_PACKAGE_LOCATOR.set(differencer, pkgLocator.get()); PrecomputedValue.STARLARK_SEMANTICS.set(differencer, StarlarkSemantics.DEFAULT); } private Artifact getSourceArtifact(String path) { return ActionsTestUtil.createArtifact( ArtifactRoot.asSourceRoot(Root.fromPath(rootDirectory)), path); } private Artifact createSourceArtifact(String path) throws Exception { Artifact result = getSourceArtifact(path); createFile(result, "foo"); return result; } private static RootedPath childOf(Artifact artifact, String relative) { return RootedPath.toRootedPath( artifact.getRoot().getRoot(), artifact.getRootRelativePath().getRelative(relative)); } private static RootedPath siblingOf(Artifact artifact, String relative) { PathFragment parent = Preconditions.checkNotNull(artifact.getRootRelativePath().getParentDirectory()); return RootedPath.toRootedPath(artifact.getRoot().getRoot(), parent.getRelative(relative)); } private void createFile(Path path, String... contents) throws Exception { if (!path.getParentDirectory().exists()) { scratch.dir(path.getParentDirectory().getPathString()); } scratch.file(path.getPathString(), contents); } private void createFile(Artifact artifact, String... contents) throws Exception { createFile(artifact.getPath(), contents); } private RootedPath createFile(RootedPath path, String... contents) throws Exception { createFile(path.asPath(), contents); return path; } private <T extends SkyValue> EvaluationResult<T> eval(SkyKey key) throws Exception { EvaluationContext evaluationContext = EvaluationContext.newBuilder() .setKeepGoing(false) .setParallelism(SkyframeExecutor.DEFAULT_THREAD_COUNT) .setEventHandler(NullEventHandler.INSTANCE) .build(); return evaluator.evaluate(ImmutableList.of(key), evaluationContext); } private FilesetEntryValue evalFilesetTraversal(FilesetTraversalParams params) throws Exception { SkyKey key = FilesetEntryKey.key(params); EvaluationResult<FilesetEntryValue> result = eval(key); assertThat(result.hasError()).isFalse(); return result.get(key); } private FilesetOutputSymlink symlink(String from, Artifact to) { return symlink(PathFragment.create(from), to.getPath().asFragment()); } private FilesetOutputSymlink symlink(String from, RootedPath to) { return symlink(PathFragment.create(from), to.asPath().asFragment()); } private FilesetOutputSymlink symlink(PathFragment from, PathFragment to) { return FilesetOutputSymlink.createForTesting(from, to, rootDirectory.asFragment()); } private void assertSymlinksCreatedInOrder( FilesetTraversalParams request, FilesetOutputSymlink... expectedSymlinks) throws Exception { Collection<FilesetOutputSymlink> actual = Collections2.transform( evalFilesetTraversal(request).getSymlinks(), // Strip the metadata from the actual results. (input) -> FilesetOutputSymlink.createAlreadyRelativizedForTesting( input.name(), input.targetPath(), input.relativeToExecRoot())); assertThat(actual).containsExactlyElementsIn(expectedSymlinks).inOrder(); } private static Label label(String label) throws Exception { return Label.parseCanonical(label); } @Test public void testFileTraversalForFile() throws Exception { Artifact file = createSourceArtifact("foo/file.real"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ file, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params, symlink("output-name", file)); } @Test public void testFileTraversalForDirectory() throws Exception { Artifact dir = getSourceArtifact("foo/dir_real"); RootedPath fileA = createFile(childOf(dir, "file.a"), "hello"); RootedPath fileB = createFile(childOf(dir, "sub/file.b"), "world"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ dir, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ true); assertSymlinksCreatedInOrder( params, symlink("output-name/file.a", fileA), symlink("output-name/sub/file.b", fileB)); } @Test public void testFileTraversalForDisallowedDirectoryThrows() throws Exception { Artifact dir = getSourceArtifact("foo/dir_real"); createFile(childOf(dir, "file.a"), "hello"); createFile(childOf(dir, "sub/file.b"), "world"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ dir, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); SkyKey key = FilesetEntryKey.key(params); EvaluationResult<FilesetEntryValue> result = eval(key); assertThat(result.hasError()).isTrue(); assertThat(result.getError(key).getException()) .hasMessageThat() .contains("foo contains a directory"); } @Test public void testFileTraversalForDanglingSymlink() throws Exception { Artifact linkName = getSourceArtifact("foo/dangling.sym"); RootedPath linkTarget = createFile(siblingOf(linkName, "target.file"), "blah"); linkName.getPath().createSymbolicLink(PathFragment.create("target.file")); linkTarget.asPath().delete(); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ linkName, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params); // expect empty results } @Test public void testFileTraversalForNonExistentFile() throws Exception { Artifact path = getSourceArtifact("foo/non-existent"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ path, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params); // expect empty results } /** * Tests that the fingerprint is a function of all arguments of the factory method. * * <p>Implementations must provide: * <ul> * <li>two different values (a domain) for each argument of the factory method and whether or not * it is expected to influence the fingerprint * <li>a way to instantiate {@link FilesetTraversalParams} with a given set of arguments from the * specified domains * </ul> * * <p>The tests will instantiate pairs of {@link FilesetTraversalParams} objects with only a given * attribute differing, and observe whether the fingerprints differ (if they are expected to) or * are the same (otherwise). */ private abstract static class FingerprintTester { private final Map<String, Domain> domains; FingerprintTester(Map<String, Domain> domains) { this.domains = domains; } abstract FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception; private Map<String, ?> getDefaultArgs() { return getKwArgs(null); } private Map<String, ?> getKwArgs(@Nullable String useAlternateFor) { Map<String, Object> values = new HashMap<>(); for (Map.Entry<String, Domain> d : domains.entrySet()) { values.put( d.getKey(), d.getKey().equals(useAlternateFor) ? d.getValue().valueA : d.getValue().valueB); } return values; } public void doTest() throws Exception { Fingerprint fp = new Fingerprint(); create(getDefaultArgs()).fingerprint(fp); String primary = fp.hexDigestAndReset(); for (String argName : domains.keySet()) { create(getKwArgs(argName)).fingerprint(fp); String secondary = fp.hexDigestAndReset(); if (domains.get(argName).includedInFingerprint) { [MASK] ( "Argument '" + argName + "' was expected to be included in the" + " fingerprint, but wasn't") .that(primary) .isNotEqualTo(secondary); } else { [MASK] ( "Argument '" + argName + "' was expected not to be included in the" + " fingerprint, but was") .that(primary) .isEqualTo(secondary); } } } } private static final class Domain { boolean includedInFingerprint; Object valueA; Object valueB; Domain(boolean includedInFingerprint, Object valueA, Object valueB) { this.includedInFingerprint = includedInFingerprint; this.valueA = valueA; this.valueB = valueB; } } private static Domain partOfFingerprint(Object valueA, Object valueB) { return new Domain(true, valueA, valueB); } private static Domain notPartOfFingerprint(Object valueB) { return new Domain(false, "//foo", valueB); } @Test public void testFingerprintOfFileTraversal() throws Exception { new FingerprintTester( ImmutableMap.<String, Domain>builder() .put("ownerLabel", notPartOfFingerprint("//bar")) .put("fileToTraverse", partOfFingerprint("foo/file.a", "bar/file.b")) .put("destPath", partOfFingerprint("out1", "out2")) .put("strictFilesetOutput", partOfFingerprint(true, false)) .put("permitDirectories", partOfFingerprint(true, false)) .buildOrThrow()) { @Override FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception { return FilesetTraversalParamsFactory.fileTraversal( label((String) kwArgs.get("ownerLabel")), getSourceArtifact((String) kwArgs.get("fileToTraverse")), PathFragment.create((String) kwArgs.get("destPath")), (Boolean) kwArgs.get("strictFilesetOutput"), (Boolean) kwArgs.get("permitDirectories")); } }.doTest(); } @Test public void testFingerprintOfNestedTraversal() throws Exception { Artifact nested1 = getSourceArtifact("a/b"); Artifact nested2 = getSourceArtifact("a/c"); new FingerprintTester( ImmutableMap.of( "ownerLabel", notPartOfFingerprint("//bar"), "nestedArtifact", partOfFingerprint(nested1, nested2), "destDir", partOfFingerprint("out1", "out2"), "excludes", partOfFingerprint(ImmutableSet.<String>of(), ImmutableSet.of("x")))) { @SuppressWarnings("unchecked") @Override FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception { return FilesetTraversalParamsFactory.nestedTraversal( label((String) kwArgs.get("ownerLabel")), (Artifact) kwArgs.get("nestedArtifact"), PathFragment.create((String) kwArgs.get("destDir")), (Set<String>) kwArgs.get("excludes")); } }.doTest(); } private static class TestWorkspaceNameFunction implements SkyFunction { @Nullable @Override public SkyValue compute(SkyKey skyKey, Environment env) throws SkyFunctionException, InterruptedException { return WorkspaceNameValue.withName("workspace"); } } }
assertWithMessage
/* * Copyright 2002-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.beans.factory.xml; import org.jspecify.annotations.Nullable; import org.w3c.dom.Attr; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.springframework.beans.MutablePropertyValues; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.beans.factory.config.BeanDefinitionHolder; import org.springframework.beans.factory.config.RuntimeBeanReference; import org.springframework.core.Conventions; /** * Simple {@code NamespaceHandler} implementation that maps custom attributes * directly through to bean properties. An important point to note is that this * {@code NamespaceHandler} does not have a corresponding schema since there * is no way to know in advance all possible attribute names. * * <p>An example of the usage of this {@code NamespaceHandler} is shown below: * * <pre class="code"> * &lt;bean id=&quot;rob&quot; class=&quot;..TestBean&quot; p:name=&quot;Rob Harrop&quot; p:spouse-ref=&quot;sally&quot;/&gt;</pre> * * Here the '{@code p:name}' corresponds directly to the '{@code name}' * property on class '{@code TestBean}'. The '{@code p:spouse-ref}' * attributes corresponds to the '{@code spouse}' property and, rather * than being the concrete value, it contains the name of the bean that will * be injected into that property. * * @author Rob Harrop * @author Juergen Hoeller * @since 2.0 */ public class SimplePropertyNamespaceHandler implements NamespaceHandler { private static final String REF_SUFFIX = "-ref"; @Override public void init() { } @Override public @Nullable BeanDefinition parse(Element element, ParserContext [MASK] ) { [MASK] .getReaderContext().error( "Class [" + getClass().getName() + "] does not support custom elements.", element); return null; } @Override public BeanDefinitionHolder decorate(Node node, BeanDefinitionHolder definition, ParserContext [MASK] ) { if (node instanceof Attr attr) { String propertyName = [MASK] .getDelegate().getLocalName(attr); String propertyValue = attr.getValue(); MutablePropertyValues pvs = definition.getBeanDefinition().getPropertyValues(); if (pvs.contains(propertyName)) { [MASK] .getReaderContext().error("Property '" + propertyName + "' is already defined using " + "both <property> and inline syntax. Only one approach may be used per property.", attr); } if (propertyName.endsWith(REF_SUFFIX)) { propertyName = propertyName.substring(0, propertyName.length() - REF_SUFFIX.length()); pvs.add(Conventions.attributeNameToPropertyName(propertyName), new RuntimeBeanReference(propertyValue)); } else { pvs.add(Conventions.attributeNameToPropertyName(propertyName), propertyValue); } } return definition; } }
parserContext
package com.alibaba.druid.bvt.sql.mysql.createTable; import com.alibaba.druid.sql.MysqlTest; import com.alibaba.druid.sql.ast.SQLStatement; import com.alibaba.druid.sql.dialect.mysql.ast.statement.MySqlCreateTableStatement; import com.alibaba.druid.sql.dialect.mysql.parser.MySqlStatementParser; import java.util.List; public class MySqlCreateTableTest139 extends MysqlTest { public void test_0() throws Exception { String sql = " CREATE TABLE IF NOT EXISTS simiao_alter_partition2 (id int, id2 int, name varchar(30), time timestamp NOT NULL, PRIMARY KEY (id, time), KEY idx_id_time USING BTREE (id, time)) PARTITION BY RANGE (UNIX_TIMESTAMP(time))( PARTITION p0 VALUES LESS THAN (UNIX_TIMESTAMP('2013-01-01 00:00:00')), PARTITION p1 VALUES LESS THAN (UNIX_TIMESTAMP('2013-02-01 00:00:00')), PARTITION p2 VALUES LESS THAN (UNIX_TIMESTAMP('2013-03-01 00:00:00')), PARTITION p3 VALUES LESS THAN (UNIX_TIMESTAMP('2013-04-01 00:00:00')), PARTITION p4 VALUES LESS THAN (UNIX_TIMESTAMP('2013-05-01 00:00:00')), PARTITION p5 VALUES LESS THAN (UNIX_TIMESTAMP('2013-06-01 00:00:00')), PARTITION p6 VALUES LESS THAN (UNIX_TIMESTAMP('2013-07-01 00:00:00')), PARTITION p7 VALUES LESS THAN (UNIX_TIMESTAMP('2013-08-01 00:00:00')), PARTITION p8 VALUES LESS THAN (UNIX_TIMESTAMP('2013-09-01 00:00:00')), PARTITION p10 VALUES LESS THAN (UNIX_TIMESTAMP('2013-10-01 00:00:00')), PARTITION p11 VALUES LESS THAN (UNIX_TIMESTAMP('2013-11-01 00:00:00')), PARTITION p12 VALUES LESS THAN (UNIX_TIMESTAMP('2013-12-01 00:00:00')), PARTITION p13 VALUES LESS THAN (MAXVALUE) ) dbpartition by hash(id) dbpartitions 4;"; MySqlStatementParser parser = new MySqlStatementParser(sql); List<SQLStatement> [MASK] = parser.parseStatementList(); MySqlCreateTableStatement stmt = (MySqlCreateTableStatement) [MASK] .get(0); assertEquals(1, [MASK] .size()); assertEquals("CREATE TABLE IF NOT EXISTS simiao_alter_partition2 (\n" + "\tid int,\n" + "\tid2 int,\n" + "\tname varchar(30),\n" + "\ttime timestamp NOT NULL,\n" + "\tPRIMARY KEY (id, time),\n" + "\tKEY idx_id_time USING BTREE (id, time)\n" + ")\n" + "PARTITION BY RANGE (UNIX_TIMESTAMP(time)) (\n" + "\tPARTITION p0 VALUES LESS THAN (UNIX_TIMESTAMP('2013-01-01 00:00:00')),\n" + "\tPARTITION p1 VALUES LESS THAN (UNIX_TIMESTAMP('2013-02-01 00:00:00')),\n" + "\tPARTITION p2 VALUES LESS THAN (UNIX_TIMESTAMP('2013-03-01 00:00:00')),\n" + "\tPARTITION p3 VALUES LESS THAN (UNIX_TIMESTAMP('2013-04-01 00:00:00')),\n" + "\tPARTITION p4 VALUES LESS THAN (UNIX_TIMESTAMP('2013-05-01 00:00:00')),\n" + "\tPARTITION p5 VALUES LESS THAN (UNIX_TIMESTAMP('2013-06-01 00:00:00')),\n" + "\tPARTITION p6 VALUES LESS THAN (UNIX_TIMESTAMP('2013-07-01 00:00:00')),\n" + "\tPARTITION p7 VALUES LESS THAN (UNIX_TIMESTAMP('2013-08-01 00:00:00')),\n" + "\tPARTITION p8 VALUES LESS THAN (UNIX_TIMESTAMP('2013-09-01 00:00:00')),\n" + "\tPARTITION p10 VALUES LESS THAN (UNIX_TIMESTAMP('2013-10-01 00:00:00')),\n" + "\tPARTITION p11 VALUES LESS THAN (UNIX_TIMESTAMP('2013-11-01 00:00:00')),\n" + "\tPARTITION p12 VALUES LESS THAN (UNIX_TIMESTAMP('2013-12-01 00:00:00')),\n" + "\tPARTITION p13 VALUES LESS THAN MAXVALUE\n" + ")\n" + "DBPARTITION BY hash(id) DBPARTITIONS 4;", stmt.toString()); assertEquals("create table if not exists simiao_alter_partition2 (\n" + "\tid int,\n" + "\tid2 int,\n" + "\tname varchar(30),\n" + "\ttime timestamp not null,\n" + "\tprimary key (id, time),\n" + "\tkey idx_id_time using BTREE (id, time)\n" + ")\n" + "partition by range (UNIX_TIMESTAMP(time)) (\n" + "\tpartition p0 values less than (UNIX_TIMESTAMP('2013-01-01 00:00:00')),\n" + "\tpartition p1 values less than (UNIX_TIMESTAMP('2013-02-01 00:00:00')),\n" + "\tpartition p2 values less than (UNIX_TIMESTAMP('2013-03-01 00:00:00')),\n" + "\tpartition p3 values less than (UNIX_TIMESTAMP('2013-04-01 00:00:00')),\n" + "\tpartition p4 values less than (UNIX_TIMESTAMP('2013-05-01 00:00:00')),\n" + "\tpartition p5 values less than (UNIX_TIMESTAMP('2013-06-01 00:00:00')),\n" + "\tpartition p6 values less than (UNIX_TIMESTAMP('2013-07-01 00:00:00')),\n" + "\tpartition p7 values less than (UNIX_TIMESTAMP('2013-08-01 00:00:00')),\n" + "\tpartition p8 values less than (UNIX_TIMESTAMP('2013-09-01 00:00:00')),\n" + "\tpartition p10 values less than (UNIX_TIMESTAMP('2013-10-01 00:00:00')),\n" + "\tpartition p11 values less than (UNIX_TIMESTAMP('2013-11-01 00:00:00')),\n" + "\tpartition p12 values less than (UNIX_TIMESTAMP('2013-12-01 00:00:00')),\n" + "\tpartition p13 values less than maxvalue\n" + ")\n" + "dbpartition by hash(id) dbpartitions 4;", stmt.toLowerCaseString()); } }
statementList
/* * Copyright (c) 2014, 2024, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package stream; import static jaxp.library.JAXPTestUtilities.USER_DIR; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.InputStream; import java.io.OutputStream; import javax.xml.stream.XMLInputFactory; import javax.xml.stream.XMLOutputFactory; import javax.xml.stream.XMLStreamReader; import javax.xml.stream.XMLStreamWriter; import org.testng.Assert; import org.testng.annotations.Test; /* * @test * @bug 6688002 * @library /javax/xml/jaxp/libs /javax/xml/jaxp/unittest * @run testng/othervm stream.Bug6688002Test * @summary Test single instance of XMLOutputFactory/XMLInputFactory create multiple Writer/Readers in parallel. */ public class Bug6688002Test { private static final XMLOutputFactory outputFactory = XMLOutputFactory.newInstance(); private static final XMLInputFactory inputFactory = XMLInputFactory.newInstance(); private static final int NO_THREADS = 3; @Test public void testMultiThread() throws Exception { Thread[] [MASK] = new Thread[NO_THREADS]; for (int i = 0; i < NO_THREADS; i++) { [MASK] [i] = new Thread(new MyRunnable(i)); } for (int i = 0; i < NO_THREADS; i++) { [MASK] [i].start(); } for (int i = 0; i < NO_THREADS; i++) { [MASK] [i].join(); } } public class MyRunnable implements Runnable { final String no; MyRunnable(int no) { this.no = String.valueOf(no); } public void run() { try { FileOutputStream fos = new FileOutputStream(USER_DIR + no); XMLStreamWriter w = getWriter(fos); // System.out.println("Writer="+w+" Thread="+Thread.currentThread()); w.writeStartDocument(); w.writeStartElement("hello"); for (int j = 0; j < 50; j++) { w.writeStartElement("a" + j); w.writeEndElement(); } w.writeEndElement(); w.writeEndDocument(); w.close(); fos.close(); FileInputStream fis = new FileInputStream(USER_DIR + no); XMLStreamReader r = getReader(fis); while (r.hasNext()) { r.next(); } r.close(); fis.close(); } catch (Exception e) { Assert.fail(e.getMessage()); } } } public static/* synchronized */XMLStreamReader getReader(InputStream is) throws Exception { return inputFactory.createXMLStreamReader(is); // return XMLStreamReaderFactory.create(null, is, true); } public static/* synchronized */XMLStreamWriter getWriter(OutputStream os) throws Exception { return outputFactory.createXMLStreamWriter(os); // return XMLStreamWriterFactory.createXMLStreamWriter(os); } }
threads
/* * DBeaver - Universal Database Manager * Copyright (C) 2010-2024 DBeaver Corp and others * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jkiss.dbeaver.model.sql; /** * SQL editor constants */ public class SQLConstants { public static final String NULL_VALUE = "NULL"; public static final String STR_QUOTE_SINGLE = "'"; public static final String STR_QUOTE_DOUBLE = "\""; public static final String[][] DOUBLE_QUOTE_STRINGS = {{ STR_QUOTE_DOUBLE, STR_QUOTE_DOUBLE }}; public static final String DEFAULT_STATEMENT_DELIMITER = ";"; public static final String[] DEFAULT_SCRIPT_DELIMITER = { DEFAULT_STATEMENT_DELIMITER }; public static final String STR_QUOTE_APOS = "`"; public static final String ML_COMMENT_START = "/*"; public static final String ML_COMMENT_END = "*/"; public static final String SL_COMMENT = "--"; public static final String ASTERISK = "*"; public static final String QUESTION = "?"; public static final String DOT = "."; public static final String KEYWORD_SELECT = "SELECT"; public static final String KEYWORD_INSERT = "INSERT"; public static final String KEYWORD_UPDATE = "UPDATE"; public static final String KEYWORD_DELETE = "DELETE"; public static final String KEYWORD_MERGE = "MERGE"; public static final String KEYWORD_UPSERT = "UPSERT"; public static final String [MASK] = "TRUNCATE"; public static final String KEYWORD_FROM = "FROM"; public static final String KEYWORD_INTO = "INTO"; public static final String KEYWORD_JOIN = "JOIN"; public static final String KEYWORD_CROSS_JOIN = "CROSS JOIN"; public static final String KEYWORD_NATURAL_JOIN = "NATURAL JOIN"; public static final String KEYWORD_WHERE = "WHERE"; public static final String KEYWORD_SET = "SET"; public static final String KEYWORD_ON = "ON"; public static final String KEYWORD_AND = "AND"; public static final String KEYWORD_OR = "OR"; public static final String KEYWORD_BETWEEN = "BETWEEN"; public static final String KEYWORD_IS = "IS"; public static final String KEYWORD_NOT = "NOT"; public static final String KEYWORD_NULL = "NULL"; public static final String KEYWORD_IN = "IN"; public static final String KEYWORD_VALUES = "VALUES"; public static final String KEYWORD_ORDER_BY = "ORDER BY"; public static final String KEYWORD_GROUP_BY = "GROUP BY"; public static final String KEYWORD_HAVING = "HAVING"; public static final String KEYWORD_LIKE = "LIKE"; public static final String KEYWORD_ILIKE = "ILIKE"; public static final String KEYWORD_FUNCTION = "FUNCTION"; public static final String KEYWORD_PROCEDURE = "PROCEDURE"; public static final String KEYWORD_COMMIT = "COMMIT"; public static final String KEYWORD_ROLLBACK = "ROLLBACK"; public static final String KEYWORD_EXPLAIN = "EXPLAIN"; public static final String KEYWORD_CASE = "CASE"; public static final String KEYWORD_QUALIFY = "QUALIFY"; public static final String KEYWORD_AS = "AS"; public static final String KEYWORD_USING = "USING"; public static final String DATA_TYPE_VARCHAR = "varchar"; public static final String DATA_TYPE_BIGINT = "BIGINT"; public static final String DATA_TYPE_BINARY = "BINARY"; public static final String DATA_TYPE_BOOLEAN = "BOOLEAN"; public static final String DATA_TYPE_DOUBLE = "DOUBLE"; public static final String DATA_TYPE_FLOAT = "FLOAT"; public static final String DATA_TYPE_INT = "INT"; public static final String DATA_TYPE_SMALLINT = "SMALLINT"; public static final String DATA_TYPE_STRING = "STRING"; public static final String DATA_TYPE_TINYINT = "TINYINT"; public static final String[] QUERY_KEYWORDS = { KEYWORD_SELECT, KEYWORD_INSERT, KEYWORD_UPDATE, KEYWORD_DELETE, KEYWORD_MERGE, KEYWORD_UPSERT, [MASK] }; public static final String[] TABLE_KEYWORDS = { KEYWORD_FROM, KEYWORD_INSERT, KEYWORD_UPDATE, KEYWORD_DELETE, KEYWORD_INTO, "TABLE", "VIEW", KEYWORD_JOIN, [MASK] , KEYWORD_MERGE, }; public static final String[] COLUMN_KEYWORDS = { KEYWORD_SELECT, KEYWORD_WHERE, KEYWORD_SET, KEYWORD_ON, KEYWORD_AND, KEYWORD_OR, "BY", "HAVING" }; public static final String[] DDL_KEYWORDS = { "CREATE", "ALTER", "DROP", }; public static final String[] SQL2003_RESERVED_KEYWORDS = { "ALL", "ALLOCATE", "ALTER", KEYWORD_AND, "ANY", "ARE", "ARRAY", "AS", "ASENSITIVE", "ASYMMETRIC", "AT", "ATOMIC", "AUTHORIZATION", "BEGIN", KEYWORD_BETWEEN, //"BIGINT", DATA_TYPE_BINARY, "BOTH", "BY", "CALL", "CALLED", "CARDINALITY", "CASCADE", "CASCADED", KEYWORD_CASE, "CAST", "CEIL", "CEILING", "CHARACTER", "CHECK", "CLOSE", "COALESCE", "COLLATE", "COLLECT", "COLUMN", KEYWORD_COMMIT, "CONDITION", "CONNECT", "CONSTRAINT", "CONVERT", "CORR", "CORRESPONDING", "COVAR_POP", "COVAR_SAMP", "CREATE", "CROSS", "CUBE", "CUME_DIST", "CURRENT", "CURSOR", "CYCLE", "DAY", "DEALLOCATE", "DEC", "DECLARE", "DEFAULT", KEYWORD_DELETE, "DENSE_RANK", "DEREF", "DESCRIBE", "DETERMINISTIC", "DISCONNECT", "DISTINCT", "DROP", "DYNAMIC", "EACH", "ELEMENT", "ELSE", "END", "END-EXEC", "ESCAPE", "EVERY", "EXCEPT", "EXEC", "EXECUTE", "EXISTS", "EXP", "EXTERNAL", "EXTRACT", "FALSE", "FETCH", "FILTER", "FOR", "FOREIGN", "FREE", "FROM", "FULL", KEYWORD_FUNCTION, "FUSION", "GET", "GLOBAL", "GRANT", "GROUP", "GROUPING", "HAVING", "HOLD", "HOUR", "IDENTITY", "IF", KEYWORD_IN, "INDEX", "INDICATOR", "INNER", "INOUT", "INSENSITIVE", KEYWORD_INSERT, "INTERSECT", "INTERSECTION", "INTERVAL", "INTO", KEYWORD_IS, "JOIN", "LANGUAGE", "LARGE", "LATERAL", "LEADING", "LEFT", "LIKE", "LN", "LOCAL", "MATCH", "MEMBER", KEYWORD_MERGE, "METHOD", "MINUTE", "MOD", "MODIFIES", // "MODULE", // too common for column names "MONTH", "MULTISET", "NATIONAL", "NATURAL", //"NCHAR", //"NCLOB", "NEW", "NO", "NONE", "NORMALIZE", KEYWORD_NOT, KEYWORD_NULL, "NULLIF", "NUMERIC", "OF", "OLD", KEYWORD_ON, "ONLY", "OPEN", "OR", "ORDER", "OUT", "OUTER", "OVER", "OVERLAPS", "OVERLAY", "PARAMETER", "PARTITION", "POSITION", "PRECISION", "PREPARE", "PRIMARY", KEYWORD_PROCEDURE, "RANGE", "RANK", "READS", "REAL", "RECURSIVE", "REF", "REFERENCES", "REFERENCING", "RELEASE", "RENAME", "RESULT", "RETURN", "RETURNS", "REVOKE", "RIGHT", KEYWORD_ROLLBACK, "ROLLUP", "ROW", "ROW_NUMBER", "ROWS", "SAVEPOINT", "SCOPE", "SCROLL", "SEARCH", "SECOND", KEYWORD_SELECT, "SENSITIVE", "SESSION_USER", KEYWORD_SET, "SIMILAR", "SMALLINT", "SOME", "SPECIFIC", "SPECIFICTYPE", "SQL", "SQLEXCEPTION", "SQLSTATE", "SQLWARNING", "START", "STATIC", // "STDDEV_POP", // "STDDEV_SAMP", "SUBMULTISET", "SYMMETRIC", "SYSTEM", "SYSTEM_USER", "TABLE", "TABLESAMPLE", "THEN", "TIMEZONE_HOUR", "TIMEZONE_MINUTE", "TO", "TRAILING", "TRANSLATE", "TRANSLATION", "TREAT", "TRIGGER", "TRUE", "UNION", "UNIQUE", "UNKNOWN", "UNNEST", KEYWORD_UPDATE, "USER", "USING", //"VALUE", // too common for column names KEYWORD_VALUES, // "VAR_POP", // "VAR_SAMP", //"VARCHAR", "VARYING", "WHEN", "WHENEVER", KEYWORD_WHERE, "WIDTH_BUCKET", "WINDOW", "WITH", "WITHIN", "WITHOUT", "YEAR", "NULLS", "FIRST", "LAST", "FOLLOWING", "PRECEDING", "UNBOUNDED", "LENGTH", "KEY", "LEVEL", "VIEW", "SEQUENCE", "SCHEMA", "ROLE", "RESTRICT", "ASC", "DESC", // Not actually standard but widely used "LIMIT", // Extended keywords // "A", "ABSOLUTE", "ACTION", // "ADA", "ADD", // "ADMIN", "AFTER", "ALWAYS", // "ASC", "ASSERTION", "ASSIGNMENT", "ATTRIBUTE", "ATTRIBUTES", "BEFORE", // "BERNOULLI", // "BREADTH", // "C", "CASCADE", "CATALOG", // "CATALOG_NAME", "CHAIN", // "CHARACTER_SET_CATALOG", // "CHARACTER_SET_NAME", // "CHARACTER_SET_SCHEMA", "CHARACTERISTICS", "CHARACTERS", // "CLASS_ORIGIN", // "COBOL", "COLLATION", // "COLLATION_CATALOG", // "COLLATION_NAME", // "COLLATION_SCHEMA", // "COLUMN_NAME", // "COMMAND_FUNCTION", // "COMMAND_FUNCTION_CODE", "COMMITTED", // "CONDITION_NUMBER", "CONNECTION", // "CONNECTION_NAME", // "CONSTRAINT_CATALOG", // "CONSTRAINT_NAME", // "CONSTRAINT_SCHEMA", "CONSTRAINTS", "CONSTRUCTOR", "CONTAINS", "CONTINUE", "CURSOR_NAME", "DATA", // "DATETIME_INTERVAL_CODE", // "DATETIME_INTERVAL_PRECISION", "DEFAULTS", "DEFERRABLE", "DEFERRED", "DEFINED", "DEFINER", "DEGREE", "DEPTH", "DERIVED", // "DESC", "DESCRIPTOR", "DIAGNOSTICS", "DISPATCH", "DOMAIN", // "DYNAMIC_FUNCTION", // "DYNAMIC_FUNCTION_CODE", "EQUALS", "EXCEPTION", "EXCLUDE", "EXCLUDING", "FINAL", "FIRST", // "FORTRAN", "FOUND", // "G", "GENERAL", "GENERATED", "GO", "GOTO", "GRANTED", "HIERARCHY", "IMMEDIATE", "IMPLEMENTATION", "INCLUDING", "INCREMENT", "INITIALLY", "INPUT", "INSTANCE", "INSTANTIABLE", "INVOKER", "ISOLATION", // "K", // "KEY_MEMBER", "KEY_TYPE", "LAST", "LOCATOR", // "M", "MAP", "MATCHED", "MAXVALUE", // "MESSAGE_LENGTH", // "MESSAGE_OCTET_LENGTH", // "MESSAGE_TEXT", "MINVALUE", "MORE", "MUMPS", // "NAME", // "NAMES", "NESTING", "NEXT", "NORMALIZED", // "NULLABLE", // "NULLS", // "NUMBER", "OBJECT", "OCTETS", "OPTION", "OPTIONS", "ORDERING", "ORDINALITY", "OTHERS", "OUTPUT", "OVERRIDING", "PAD", // "PARAMETER_MODE", // "PARAMETER_NAME", // "PARAMETER_ORDINAL_POSITION", // "PARAMETER_SPECIFIC_CATALOG", // "PARAMETER_SPECIFIC_NAME", // "PARAMETER_SPECIFIC_SCHEMA", "PARTIAL", // "PASCAL", "PATH", "PLACING", // "PLI", "PRESERVE", "PRIOR", "PRIVILEGES", // "PUBLIC", "READ", "RELATIVE", "REPEATABLE", "RESTART", // "RETURNED_CARDINALITY", // "RETURNED_LENGTH", // "RETURNED_OCTET_LENGTH", // "RETURNED_SQLSTATE", "ROUTINE", // "ROUTINE_CATALOG", // "ROUTINE_NAME", // "ROUTINE_SCHEMA", // "ROW_COUNT", "SCALE", // "SCHEMA_NAME", // "SCOPE_CATALOG", // "SCOPE_NAME", // "SCOPE_SCHEMA", "SECTION", "SECURITY", "SELF", "SERIALIZABLE", // "SERVER_NAME", "SESSION", "SETS", // "SIMPLE", "SIZE", "SOURCE", "SPACE", // "SPECIFIC_NAME", // "STATE", // too common for column names "STATEMENT", "STRUCTURE", "STYLE", // "SUBCLASS_ORIGIN", // "TABLE_NAME", "TEMPORARY", "TIES", // "TOP_LEVEL_COUNT", "TRANSACTION", // "TRANSACTION_ACTIVE", // "TRANSACTIONS_COMMITTED", // "TRANSACTIONS_ROLLED_BACK", "TRANSFORM", "TRANSFORMS", // "TRIGGER_CATALOG", // "TRIGGER_NAME", // "TRIGGER_SCHEMA", "TYPE", "UNCOMMITTED", "UNDER", "UNNAMED", "USAGE", // "USER_DEFINED_TYPE_CATALOG", // "USER_DEFINED_TYPE_CODE", // "USER_DEFINED_TYPE_NAME", // "USER_DEFINED_TYPE_SCHEMA", "WORK", "WRITE", "ZONE", KEYWORD_QUALIFY }; public static final String[] SQL2003_FUNCTIONS = { "ABS", "AVG", "CHAR_LENGTH", "CHARACTER_LENGTH", "COUNT", "LOCALTIME", "LOCALTIMESTAMP", // "CURRENT_DATE", // "CURRENT_DEFAULT_TRANSFORM_GROUP", // "CURRENT_PATH", // "CURRENT_ROLE", // "CURRENT_TIME", // "CURRENT_TIMESTAMP", // "CURRENT_TRANSFORM_GROUP_FOR_TYPE", // "CURRENT_USER", "FLOOR", "LOWER", "MAX", "MIN", "OCTET_LENGTH", "PERCENT_RANK", "PERCENTILE_CONT", "PERCENTILE_DISC", "POWER", "REGR_AVGX", "REGR_AVGY", "REGR_COUNT", "REGR_INTERCEPT", "REGR_R2", "REGR_SLOPE", "REGR_SXX", "REGR_SXY", "REGR_SYY", "SQRT", "SUBSTRING", "SUM", "TRIM", "UESCAPE", "UPPER", }; public static final String[] SQL_EX_KEYWORDS = { "CHANGE", "MODIFY", }; public static final String[] DEFAULT_TYPES = { DATA_TYPE_BOOLEAN, "CHAR", "VARCHAR", DATA_TYPE_BINARY, "VARBINARY", DATA_TYPE_INT, "INTEGER", DATA_TYPE_SMALLINT, DATA_TYPE_BIGINT, "NUMBER", "NUMERIC", "DECIMAL", DATA_TYPE_FLOAT, DATA_TYPE_DOUBLE, "DATE", "TIME", "TIMESTAMP", "CLOB", "BLOB", }; public static final String BLOCK_BEGIN = "BEGIN"; public static final String BLOCK_END = "END"; /** * Pseudo variables - these are not dynamic parameters */ public static final String[] PSEUDO_VARIABLES = { ":NEW", ":OLD", }; public static final char STRUCT_SEPARATOR = '.'; //$NON-NLS-1$ public static final String CONFIG_COLOR_KEYWORD = "org.jkiss.dbeaver.sql.editor.color.keyword.foreground"; public static final String CONFIG_COLOR_DATATYPE = "org.jkiss.dbeaver.sql.editor.color.datatype.foreground"; public static final String CONFIG_COLOR_FUNCTION = "org.jkiss.dbeaver.sql.editor.color.function.foreground"; public static final String CONFIG_COLOR_STRING = "org.jkiss.dbeaver.sql.editor.color.string.foreground"; public static final String CONFIG_COLOR_TABLE = "org.jkiss.dbeaver.sql.editor.color.table.foreground"; public static final String CONFIG_COLOR_TABLE_ALIAS = "org.jkiss.dbeaver.sql.editor.color.table.alias.foreground"; public static final String CONFIG_COLOR_COLUMN = "org.jkiss.dbeaver.sql.editor.color.column.foreground"; public static final String CONFIG_COLOR_COLUMN_DERIVED = "org.jkiss.dbeaver.sql.editor.color.column.derived.foreground"; public static final String CONFIG_COLOR_SCHEMA = "org.jkiss.dbeaver.sql.editor.color.schema.foreground"; public static final String CONFIG_COLOR_COMPOSITE_FIELD = "org.jkiss.dbeaver.sql.editor.color.composite.field.foreground"; public static final String CONFIG_COLOR_SQL_VARIABLE = "org.jkiss.dbeaver.sql.editor.color.sqlVariable.foreground"; public static final String CONFIG_COLOR_SEMANTIC_ERROR = "org.jkiss.dbeaver.sql.editor.color.semanticError.foreground"; public static final String CONFIG_COLOR_NUMBER = "org.jkiss.dbeaver.sql.editor.color.number.foreground"; public static final String CONFIG_COLOR_COMMENT = "org.jkiss.dbeaver.sql.editor.color.comment.foreground"; public static final String CONFIG_COLOR_DELIMITER = "org.jkiss.dbeaver.sql.editor.color.delimiter.foreground"; public static final String CONFIG_COLOR_PARAMETER = "org.jkiss.dbeaver.sql.editor.color.parameter.foreground"; public static final String CONFIG_COLOR_COMMAND = "org.jkiss.dbeaver.sql.editor.color.command.foreground"; public static final String CONFIG_COLOR_TEXT = "org.jkiss.dbeaver.sql.editor.color.text.foreground"; public static final String CONFIG_COLOR_BACKGROUND = "org.jkiss.dbeaver.sql.editor.color.text.background"; public static final String CONFIG_COLOR_DISABLED = "org.jkiss.dbeaver.sql.editor.color.disabled.background"; public static final char DEFAULT_PARAMETER_MARK = '?'; public static final char DEFAULT_PARAMETER_PREFIX = ':'; public static final String DEFAULT_IDENTIFIER_QUOTE = "\""; public static final String DEFAULT_LIKE_ESCAPE = "\\"; public static final String KEYWORD_PATTERN_CHARS = "\\*\\"; public static final String DEFAULT_CONTROL_COMMAND_PREFIX = "@"; public final static char[] BRACKETS = {'{', '}', '(', ')', '[', ']', '<', '>'}; public static final String COLUMN_ASTERISK = "*"; }
KEYWORD_TRUNCATE
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.io.network.api.serialization; import org.apache.flink.core.memory.MemorySegment; import org.apache.flink.runtime.io.network.buffer.Buffer; import org.apache.flink.testutils.junit.utils.TempDirUtils; import org.apache.flink.util.CloseableIterator; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.io.TempDir; import java.io.File; import java.nio.file.Path; import java.util.ArrayList; import java.util.List; import java.util.Random; import static org.apache.flink.core.memory.MemorySegmentFactory.wrap; import static org.apache.flink.runtime.io.network.api.serialization.SpillingAdaptiveSpanningRecordDeserializer.LENGTH_BYTES; import static org.assertj.core.api.Assertions.assertThat; /** {@link SpanningWrapper} test. */ class SpanningWrapperTest { private static final Random random = new Random(); @TempDir private Path folder; @Test void testLargeUnconsumedSegment() throws Exception { int recordLen = 100; int firstChunk = (int) (recordLen * .9); int spillingThreshold = (int) (firstChunk * .9); byte[] record1 = recordBytes(recordLen); byte[] record2 = recordBytes(recordLen * 2); File canNotEecutableFile = TempDirUtils.newFolder(folder); canNotEecutableFile.setExecutable(false); // Always pick 'canNotEecutableFile' first as the Spilling Channel TmpDir. Thus trigger an // IOException. SpanningWrapper spanningWrapper = new SpanningWrapper( new String[] { TempDirUtils.newFolder(folder).getAbsolutePath(), canNotEecutableFile.getAbsolutePath() + File.separator + "pathdonotexit" }, spillingThreshold, recordLen); spanningWrapper.transferFrom(wrapNonSpanning(record1, firstChunk), recordLen); spanningWrapper.addNextChunkFromMemorySegment( wrap(record1), firstChunk, recordLen - firstChunk + LENGTH_BYTES); spanningWrapper.addNextChunkFromMemorySegment(wrap(record2), 0, record2.length); CloseableIterator<Buffer> unconsumedSegment = spanningWrapper.getUnconsumedSegment(); spanningWrapper .getInputView() .readFully(new byte[recordLen], 0, recordLen); // read out from file spanningWrapper.transferLeftOverTo(new NonSpanningWrapper()); // clear any leftover spanningWrapper.transferFrom( wrapNonSpanning(recordBytes(recordLen), recordLen), recordLen); // overwrite with new data canNotEecutableFile.setExecutable(true); assertThat(concat(record1, record2)).isEqualTo(toByteArray(unconsumedSegment)); } private byte[] recordBytes(int recordLen) { byte[] inputData = randomBytes(recordLen + LENGTH_BYTES); for (int i = 0; i < Integer.BYTES; i++) { inputData[Integer.BYTES - i - 1] = (byte) (recordLen >>> i * 8); } return inputData; } private NonSpanningWrapper wrapNonSpanning(byte[] bytes, int len) { NonSpanningWrapper nonSpanningWrapper = new NonSpanningWrapper(); MemorySegment segment = wrap(bytes); nonSpanningWrapper.initializeFromMemorySegment(segment, 0, len); nonSpanningWrapper .readInt(); // emulate read length performed in getNextRecord to move position return nonSpanningWrapper; } private byte[] toByteArray(CloseableIterator<Buffer> unconsumed) { final List<Buffer> [MASK] = new ArrayList<>(); try { unconsumed.forEachRemaining( [MASK] ::add); byte[] result = new byte[ [MASK] .stream().mapToInt(Buffer::readableBytes).sum()]; int offset = 0; for (Buffer buffer : [MASK] ) { int len = buffer.readableBytes(); buffer.getNioBuffer(0, len).get(result, offset, len); offset += len; } return result; } finally { [MASK] .forEach(Buffer::recycleBuffer); } } private byte[] randomBytes(int length) { byte[] inputData = new byte[length]; random.nextBytes(inputData); return inputData; } private byte[] concat(byte[] input1, byte[] input2) { byte[] expected = new byte[input1.length + input2.length]; System.arraycopy(input1, 0, expected, 0, input1.length); System.arraycopy(input2, 0, expected, input1.length, input2.length); return expected; } }
buffers
// Copyright 2015 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.skyframe; import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertWithMessage; import com.google.common.base.Preconditions; import com.google.common.base.Suppliers; import com.google.common.collect.Collections2; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.ArtifactRoot; import com.google.devtools.build.lib.actions.FilesetOutputSymlink; import com.google.devtools.build.lib.actions.FilesetTraversalParams; import com.google.devtools.build.lib.actions.FilesetTraversalParamsFactory; import com.google.devtools.build.lib.actions.util.ActionsTestUtil; import com.google.devtools.build.lib.analysis. [MASK] ; import com.google.devtools.build.lib.analysis.ServerDirectories; import com.google.devtools.build.lib.clock.BlazeClock; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.events.NullEventHandler; import com.google.devtools.build.lib.pkgcache.PathPackageLocator; import com.google.devtools.build.lib.skyframe.ExternalFilesHelper.ExternalFileAction; import com.google.devtools.build.lib.skyframe.PackageLookupFunction.CrossRepositoryLabelViolationStrategy; import com.google.devtools.build.lib.testutil.FoundationTestCase; import com.google.devtools.build.lib.testutil.TestConstants; import com.google.devtools.build.lib.util.Fingerprint; import com.google.devtools.build.lib.util.io.TimestampGranularityMonitor; import com.google.devtools.build.lib.vfs.FileStateKey; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.build.lib.vfs.Root; import com.google.devtools.build.lib.vfs.RootedPath; import com.google.devtools.build.lib.vfs.SyscallCache; import com.google.devtools.build.skyframe.EvaluationContext; import com.google.devtools.build.skyframe.EvaluationResult; import com.google.devtools.build.skyframe.InMemoryMemoizingEvaluator; import com.google.devtools.build.skyframe.MemoizingEvaluator; import com.google.devtools.build.skyframe.RecordingDifferencer; import com.google.devtools.build.skyframe.SequencedRecordingDifferencer; import com.google.devtools.build.skyframe.SkyFunction; import com.google.devtools.build.skyframe.SkyFunctionException; import com.google.devtools.build.skyframe.SkyFunctionName; import com.google.devtools.build.skyframe.SkyKey; import com.google.devtools.build.skyframe.SkyValue; import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.concurrent.atomic.AtomicReference; import javax.annotation.Nullable; import net.starlark.java.eval.StarlarkSemantics; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Tests for {@link FilesetEntryFunction}. */ @RunWith(JUnit4.class) public final class FilesetEntryFunctionTest extends FoundationTestCase { private MemoizingEvaluator evaluator; private RecordingDifferencer differencer; @Before public void setUp() throws Exception { AtomicReference<PathPackageLocator> pkgLocator = new AtomicReference<>( new PathPackageLocator( outputBase, ImmutableList.of(Root.fromPath(rootDirectory)), BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY)); [MASK] directories = new [MASK] ( new ServerDirectories(outputBase, outputBase, outputBase), rootDirectory, /* defaultSystemJavabase= */ null, TestConstants.PRODUCT_NAME); ExternalFilesHelper externalFilesHelper = ExternalFilesHelper.createForTesting( pkgLocator, ExternalFileAction.DEPEND_ON_EXTERNAL_PKG_FOR_EXTERNAL_REPO_PATHS, directories); Map<SkyFunctionName, SkyFunction> skyFunctions = new HashMap<>(); skyFunctions.put( FileStateKey.FILE_STATE, new FileStateFunction( Suppliers.ofInstance(new TimestampGranularityMonitor(BlazeClock.instance())), SyscallCache.NO_CACHE, externalFilesHelper)); skyFunctions.put(SkyFunctions.FILE, new FileFunction(pkgLocator, directories)); skyFunctions.put(SkyFunctions.DIRECTORY_LISTING, new DirectoryListingFunction()); skyFunctions.put( SkyFunctions.DIRECTORY_LISTING_STATE, new DirectoryListingStateFunction(externalFilesHelper, SyscallCache.NO_CACHE)); skyFunctions.put( SkyFunctions.RECURSIVE_FILESYSTEM_TRAVERSAL, new RecursiveFilesystemTraversalFunction(SyscallCache.NO_CACHE)); skyFunctions.put( SkyFunctions.PACKAGE_LOOKUP, new PackageLookupFunction( new AtomicReference<>(ImmutableSet.of()), CrossRepositoryLabelViolationStrategy.ERROR, BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY, BazelSkyframeExecutorConstants.EXTERNAL_PACKAGE_HELPER)); skyFunctions.put(SkyFunctions.IGNORED_SUBDIRECTORIES, IgnoredSubdirectoriesFunction.NOOP); skyFunctions.put( SkyFunctions.FILESET_ENTRY, new FilesetEntryFunction((unused) -> rootDirectory)); skyFunctions.put(SkyFunctions.WORKSPACE_NAME, new TestWorkspaceNameFunction()); skyFunctions.put( SkyFunctions.LOCAL_REPOSITORY_LOOKUP, new LocalRepositoryLookupFunction(BazelSkyframeExecutorConstants.EXTERNAL_PACKAGE_HELPER)); differencer = new SequencedRecordingDifferencer(); evaluator = new InMemoryMemoizingEvaluator(skyFunctions, differencer); PrecomputedValue.BUILD_ID.set(differencer, UUID.randomUUID()); PrecomputedValue.PATH_PACKAGE_LOCATOR.set(differencer, pkgLocator.get()); PrecomputedValue.STARLARK_SEMANTICS.set(differencer, StarlarkSemantics.DEFAULT); } private Artifact getSourceArtifact(String path) { return ActionsTestUtil.createArtifact( ArtifactRoot.asSourceRoot(Root.fromPath(rootDirectory)), path); } private Artifact createSourceArtifact(String path) throws Exception { Artifact result = getSourceArtifact(path); createFile(result, "foo"); return result; } private static RootedPath childOf(Artifact artifact, String relative) { return RootedPath.toRootedPath( artifact.getRoot().getRoot(), artifact.getRootRelativePath().getRelative(relative)); } private static RootedPath siblingOf(Artifact artifact, String relative) { PathFragment parent = Preconditions.checkNotNull(artifact.getRootRelativePath().getParentDirectory()); return RootedPath.toRootedPath(artifact.getRoot().getRoot(), parent.getRelative(relative)); } private void createFile(Path path, String... contents) throws Exception { if (!path.getParentDirectory().exists()) { scratch.dir(path.getParentDirectory().getPathString()); } scratch.file(path.getPathString(), contents); } private void createFile(Artifact artifact, String... contents) throws Exception { createFile(artifact.getPath(), contents); } private RootedPath createFile(RootedPath path, String... contents) throws Exception { createFile(path.asPath(), contents); return path; } private <T extends SkyValue> EvaluationResult<T> eval(SkyKey key) throws Exception { EvaluationContext evaluationContext = EvaluationContext.newBuilder() .setKeepGoing(false) .setParallelism(SkyframeExecutor.DEFAULT_THREAD_COUNT) .setEventHandler(NullEventHandler.INSTANCE) .build(); return evaluator.evaluate(ImmutableList.of(key), evaluationContext); } private FilesetEntryValue evalFilesetTraversal(FilesetTraversalParams params) throws Exception { SkyKey key = FilesetEntryKey.key(params); EvaluationResult<FilesetEntryValue> result = eval(key); assertThat(result.hasError()).isFalse(); return result.get(key); } private FilesetOutputSymlink symlink(String from, Artifact to) { return symlink(PathFragment.create(from), to.getPath().asFragment()); } private FilesetOutputSymlink symlink(String from, RootedPath to) { return symlink(PathFragment.create(from), to.asPath().asFragment()); } private FilesetOutputSymlink symlink(PathFragment from, PathFragment to) { return FilesetOutputSymlink.createForTesting(from, to, rootDirectory.asFragment()); } private void assertSymlinksCreatedInOrder( FilesetTraversalParams request, FilesetOutputSymlink... expectedSymlinks) throws Exception { Collection<FilesetOutputSymlink> actual = Collections2.transform( evalFilesetTraversal(request).getSymlinks(), // Strip the metadata from the actual results. (input) -> FilesetOutputSymlink.createAlreadyRelativizedForTesting( input.name(), input.targetPath(), input.relativeToExecRoot())); assertThat(actual).containsExactlyElementsIn(expectedSymlinks).inOrder(); } private static Label label(String label) throws Exception { return Label.parseCanonical(label); } @Test public void testFileTraversalForFile() throws Exception { Artifact file = createSourceArtifact("foo/file.real"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ file, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params, symlink("output-name", file)); } @Test public void testFileTraversalForDirectory() throws Exception { Artifact dir = getSourceArtifact("foo/dir_real"); RootedPath fileA = createFile(childOf(dir, "file.a"), "hello"); RootedPath fileB = createFile(childOf(dir, "sub/file.b"), "world"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ dir, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ true); assertSymlinksCreatedInOrder( params, symlink("output-name/file.a", fileA), symlink("output-name/sub/file.b", fileB)); } @Test public void testFileTraversalForDisallowedDirectoryThrows() throws Exception { Artifact dir = getSourceArtifact("foo/dir_real"); createFile(childOf(dir, "file.a"), "hello"); createFile(childOf(dir, "sub/file.b"), "world"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ dir, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); SkyKey key = FilesetEntryKey.key(params); EvaluationResult<FilesetEntryValue> result = eval(key); assertThat(result.hasError()).isTrue(); assertThat(result.getError(key).getException()) .hasMessageThat() .contains("foo contains a directory"); } @Test public void testFileTraversalForDanglingSymlink() throws Exception { Artifact linkName = getSourceArtifact("foo/dangling.sym"); RootedPath linkTarget = createFile(siblingOf(linkName, "target.file"), "blah"); linkName.getPath().createSymbolicLink(PathFragment.create("target.file")); linkTarget.asPath().delete(); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ linkName, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params); // expect empty results } @Test public void testFileTraversalForNonExistentFile() throws Exception { Artifact path = getSourceArtifact("foo/non-existent"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ path, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params); // expect empty results } /** * Tests that the fingerprint is a function of all arguments of the factory method. * * <p>Implementations must provide: * <ul> * <li>two different values (a domain) for each argument of the factory method and whether or not * it is expected to influence the fingerprint * <li>a way to instantiate {@link FilesetTraversalParams} with a given set of arguments from the * specified domains * </ul> * * <p>The tests will instantiate pairs of {@link FilesetTraversalParams} objects with only a given * attribute differing, and observe whether the fingerprints differ (if they are expected to) or * are the same (otherwise). */ private abstract static class FingerprintTester { private final Map<String, Domain> domains; FingerprintTester(Map<String, Domain> domains) { this.domains = domains; } abstract FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception; private Map<String, ?> getDefaultArgs() { return getKwArgs(null); } private Map<String, ?> getKwArgs(@Nullable String useAlternateFor) { Map<String, Object> values = new HashMap<>(); for (Map.Entry<String, Domain> d : domains.entrySet()) { values.put( d.getKey(), d.getKey().equals(useAlternateFor) ? d.getValue().valueA : d.getValue().valueB); } return values; } public void doTest() throws Exception { Fingerprint fp = new Fingerprint(); create(getDefaultArgs()).fingerprint(fp); String primary = fp.hexDigestAndReset(); for (String argName : domains.keySet()) { create(getKwArgs(argName)).fingerprint(fp); String secondary = fp.hexDigestAndReset(); if (domains.get(argName).includedInFingerprint) { assertWithMessage( "Argument '" + argName + "' was expected to be included in the" + " fingerprint, but wasn't") .that(primary) .isNotEqualTo(secondary); } else { assertWithMessage( "Argument '" + argName + "' was expected not to be included in the" + " fingerprint, but was") .that(primary) .isEqualTo(secondary); } } } } private static final class Domain { boolean includedInFingerprint; Object valueA; Object valueB; Domain(boolean includedInFingerprint, Object valueA, Object valueB) { this.includedInFingerprint = includedInFingerprint; this.valueA = valueA; this.valueB = valueB; } } private static Domain partOfFingerprint(Object valueA, Object valueB) { return new Domain(true, valueA, valueB); } private static Domain notPartOfFingerprint(Object valueB) { return new Domain(false, "//foo", valueB); } @Test public void testFingerprintOfFileTraversal() throws Exception { new FingerprintTester( ImmutableMap.<String, Domain>builder() .put("ownerLabel", notPartOfFingerprint("//bar")) .put("fileToTraverse", partOfFingerprint("foo/file.a", "bar/file.b")) .put("destPath", partOfFingerprint("out1", "out2")) .put("strictFilesetOutput", partOfFingerprint(true, false)) .put("permitDirectories", partOfFingerprint(true, false)) .buildOrThrow()) { @Override FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception { return FilesetTraversalParamsFactory.fileTraversal( label((String) kwArgs.get("ownerLabel")), getSourceArtifact((String) kwArgs.get("fileToTraverse")), PathFragment.create((String) kwArgs.get("destPath")), (Boolean) kwArgs.get("strictFilesetOutput"), (Boolean) kwArgs.get("permitDirectories")); } }.doTest(); } @Test public void testFingerprintOfNestedTraversal() throws Exception { Artifact nested1 = getSourceArtifact("a/b"); Artifact nested2 = getSourceArtifact("a/c"); new FingerprintTester( ImmutableMap.of( "ownerLabel", notPartOfFingerprint("//bar"), "nestedArtifact", partOfFingerprint(nested1, nested2), "destDir", partOfFingerprint("out1", "out2"), "excludes", partOfFingerprint(ImmutableSet.<String>of(), ImmutableSet.of("x")))) { @SuppressWarnings("unchecked") @Override FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception { return FilesetTraversalParamsFactory.nestedTraversal( label((String) kwArgs.get("ownerLabel")), (Artifact) kwArgs.get("nestedArtifact"), PathFragment.create((String) kwArgs.get("destDir")), (Set<String>) kwArgs.get("excludes")); } }.doTest(); } private static class TestWorkspaceNameFunction implements SkyFunction { @Nullable @Override public SkyValue compute(SkyKey skyKey, Environment env) throws SkyFunctionException, InterruptedException { return WorkspaceNameValue.withName("workspace"); } } }
BlazeDirectories
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.hudi; import com.facebook.presto.common.type.TypeManager; import com.facebook.presto.hive.HdfsEnvironment; import com.facebook.presto.hive.MetastoreClientConfig; import com.facebook.presto.hive. [MASK] .ExtendedHiveMetastore; import com.facebook.presto.hive. [MASK] .InMemoryCachingHiveMetastore; import com.facebook.presto.spi.connector.ConnectorMetadata; import javax.inject.Inject; import static java.util.Objects.requireNonNull; public class HudiMetadataFactory { private final ExtendedHiveMetastore [MASK] ; private final HdfsEnvironment hdfsEnvironment; private final TypeManager typeManager; private final long perTransactionCacheMaximumSize; private final boolean [MASK] ImpersonationEnabled; private final int [MASK] PartitionCacheMaxColumnCount; @Inject public HudiMetadataFactory( ExtendedHiveMetastore [MASK] , HdfsEnvironment hdfsEnvironment, TypeManager typeManager, MetastoreClientConfig [MASK] ClientConfig) { this. [MASK] = requireNonNull( [MASK] , " [MASK] is null"); this.hdfsEnvironment = requireNonNull(hdfsEnvironment, "hdfsEnvironment is null"); this.typeManager = requireNonNull(typeManager, "typeManager is null"); this.perTransactionCacheMaximumSize = [MASK] ClientConfig.getPerTransactionMetastoreCacheMaximumSize(); this. [MASK] ImpersonationEnabled = [MASK] ClientConfig.isMetastoreImpersonationEnabled(); this. [MASK] PartitionCacheMaxColumnCount = [MASK] ClientConfig.getPartitionCacheColumnCountLimit(); } public ConnectorMetadata create() { return new HudiMetadata( InMemoryCachingHiveMetastore.memoizeMetastore( [MASK] , [MASK] ImpersonationEnabled, perTransactionCacheMaximumSize, [MASK] PartitionCacheMaxColumnCount), hdfsEnvironment, typeManager); } }
metastore
package com.alibaba.druid.bvt.sql.mysql.createTable; import com.alibaba.druid.sql.MysqlTest; import com.alibaba.druid.sql.ast.SQLStatement; import com.alibaba.druid.sql.dialect.mysql.ast.statement.MySqlCreateTableStatement; import com.alibaba.druid.sql.dialect.mysql. [MASK] .MySqlStatementParser; import java.util.List; public class MySqlCreateTableTest139 extends MysqlTest { public void test_0() throws Exception { String sql = " CREATE TABLE IF NOT EXISTS simiao_alter_partition2 (id int, id2 int, name varchar(30), time timestamp NOT NULL, PRIMARY KEY (id, time), KEY idx_id_time USING BTREE (id, time)) PARTITION BY RANGE (UNIX_TIMESTAMP(time))( PARTITION p0 VALUES LESS THAN (UNIX_TIMESTAMP('2013-01-01 00:00:00')), PARTITION p1 VALUES LESS THAN (UNIX_TIMESTAMP('2013-02-01 00:00:00')), PARTITION p2 VALUES LESS THAN (UNIX_TIMESTAMP('2013-03-01 00:00:00')), PARTITION p3 VALUES LESS THAN (UNIX_TIMESTAMP('2013-04-01 00:00:00')), PARTITION p4 VALUES LESS THAN (UNIX_TIMESTAMP('2013-05-01 00:00:00')), PARTITION p5 VALUES LESS THAN (UNIX_TIMESTAMP('2013-06-01 00:00:00')), PARTITION p6 VALUES LESS THAN (UNIX_TIMESTAMP('2013-07-01 00:00:00')), PARTITION p7 VALUES LESS THAN (UNIX_TIMESTAMP('2013-08-01 00:00:00')), PARTITION p8 VALUES LESS THAN (UNIX_TIMESTAMP('2013-09-01 00:00:00')), PARTITION p10 VALUES LESS THAN (UNIX_TIMESTAMP('2013-10-01 00:00:00')), PARTITION p11 VALUES LESS THAN (UNIX_TIMESTAMP('2013-11-01 00:00:00')), PARTITION p12 VALUES LESS THAN (UNIX_TIMESTAMP('2013-12-01 00:00:00')), PARTITION p13 VALUES LESS THAN (MAXVALUE) ) dbpartition by hash(id) dbpartitions 4;"; MySqlStatementParser [MASK] = new MySqlStatementParser(sql); List<SQLStatement> statementList = [MASK] .parseStatementList(); MySqlCreateTableStatement stmt = (MySqlCreateTableStatement) statementList.get(0); assertEquals(1, statementList.size()); assertEquals("CREATE TABLE IF NOT EXISTS simiao_alter_partition2 (\n" + "\tid int,\n" + "\tid2 int,\n" + "\tname varchar(30),\n" + "\ttime timestamp NOT NULL,\n" + "\tPRIMARY KEY (id, time),\n" + "\tKEY idx_id_time USING BTREE (id, time)\n" + ")\n" + "PARTITION BY RANGE (UNIX_TIMESTAMP(time)) (\n" + "\tPARTITION p0 VALUES LESS THAN (UNIX_TIMESTAMP('2013-01-01 00:00:00')),\n" + "\tPARTITION p1 VALUES LESS THAN (UNIX_TIMESTAMP('2013-02-01 00:00:00')),\n" + "\tPARTITION p2 VALUES LESS THAN (UNIX_TIMESTAMP('2013-03-01 00:00:00')),\n" + "\tPARTITION p3 VALUES LESS THAN (UNIX_TIMESTAMP('2013-04-01 00:00:00')),\n" + "\tPARTITION p4 VALUES LESS THAN (UNIX_TIMESTAMP('2013-05-01 00:00:00')),\n" + "\tPARTITION p5 VALUES LESS THAN (UNIX_TIMESTAMP('2013-06-01 00:00:00')),\n" + "\tPARTITION p6 VALUES LESS THAN (UNIX_TIMESTAMP('2013-07-01 00:00:00')),\n" + "\tPARTITION p7 VALUES LESS THAN (UNIX_TIMESTAMP('2013-08-01 00:00:00')),\n" + "\tPARTITION p8 VALUES LESS THAN (UNIX_TIMESTAMP('2013-09-01 00:00:00')),\n" + "\tPARTITION p10 VALUES LESS THAN (UNIX_TIMESTAMP('2013-10-01 00:00:00')),\n" + "\tPARTITION p11 VALUES LESS THAN (UNIX_TIMESTAMP('2013-11-01 00:00:00')),\n" + "\tPARTITION p12 VALUES LESS THAN (UNIX_TIMESTAMP('2013-12-01 00:00:00')),\n" + "\tPARTITION p13 VALUES LESS THAN MAXVALUE\n" + ")\n" + "DBPARTITION BY hash(id) DBPARTITIONS 4;", stmt.toString()); assertEquals("create table if not exists simiao_alter_partition2 (\n" + "\tid int,\n" + "\tid2 int,\n" + "\tname varchar(30),\n" + "\ttime timestamp not null,\n" + "\tprimary key (id, time),\n" + "\tkey idx_id_time using BTREE (id, time)\n" + ")\n" + "partition by range (UNIX_TIMESTAMP(time)) (\n" + "\tpartition p0 values less than (UNIX_TIMESTAMP('2013-01-01 00:00:00')),\n" + "\tpartition p1 values less than (UNIX_TIMESTAMP('2013-02-01 00:00:00')),\n" + "\tpartition p2 values less than (UNIX_TIMESTAMP('2013-03-01 00:00:00')),\n" + "\tpartition p3 values less than (UNIX_TIMESTAMP('2013-04-01 00:00:00')),\n" + "\tpartition p4 values less than (UNIX_TIMESTAMP('2013-05-01 00:00:00')),\n" + "\tpartition p5 values less than (UNIX_TIMESTAMP('2013-06-01 00:00:00')),\n" + "\tpartition p6 values less than (UNIX_TIMESTAMP('2013-07-01 00:00:00')),\n" + "\tpartition p7 values less than (UNIX_TIMESTAMP('2013-08-01 00:00:00')),\n" + "\tpartition p8 values less than (UNIX_TIMESTAMP('2013-09-01 00:00:00')),\n" + "\tpartition p10 values less than (UNIX_TIMESTAMP('2013-10-01 00:00:00')),\n" + "\tpartition p11 values less than (UNIX_TIMESTAMP('2013-11-01 00:00:00')),\n" + "\tpartition p12 values less than (UNIX_TIMESTAMP('2013-12-01 00:00:00')),\n" + "\tpartition p13 values less than maxvalue\n" + ")\n" + "dbpartition by hash(id) dbpartitions 4;", stmt.toLowerCaseString()); } }
parser
/* ### * IP: GHIDRA * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ghidra.util.datastruct; import java.io.Serializable; import java.util.Arrays; /** * The BitTree class maintains a set of ordered keys between the values of * 0 and N. It can quickly (O(log(n))) add keys, remove keys, find the next key * greater than some value , and find the prev key less than some value. It can * determine if a key is in the set in O(1) time. This implementation has been * limited to short keys so that it can implement the ShortKeySet interface. */ public class BitTree implements ShortKeySet, Serializable { private final static long serialVersionUID = 1; private int size; // The maximum number of keys in the set. Keys range from 0 to size-1 private int power2; // The next power of 2 that is greater than size. private int[] bits; // Array of bits used to represent a tree of binary values. A bit at // position N will have a left child at 2*N and a right child at 2*N+1. // Its parent position will be at N/2. // A bit is on if any bits in its subtree are on. Leaf bits correspond // directly to keys and are on if the key is in the set. private int numKeys; // The current number of keys in the set. // masks for seting and clearing bits within an 32 bit integer. private static final int[] setMask = { 0x00000001,0x00000002,0x00000004,0x00000008, 0x00000010,0x00000020,0x00000040,0x00000080, 0x00000100,0x00000200,0x00000400,0x00000800, 0x00001000,0x00002000,0x00004000,0x00008000, 0x00010000,0x00020000,0x00040000,0x00080000, 0x00100000,0x00200000,0x00400000,0x00800000, 0x01000000,0x02000000,0x04000000,0x08000000, 0x10000000,0x20000000,0x40000000,0x80000000 }; private static final int[] clearMask={ 0xfffffffe,0xfffffffd,0xfffffffb,0xfffffff7, 0xffffffef,0xffffffdf,0xffffffbf,0xffffff7f, 0xfffffeff,0xfffffdff,0xfffffbff,0xfffff7ff, 0xffffefff,0xffffdfff,0xffffbfff,0xffff7fff, 0xfffeffff,0xfffdffff,0xfffbffff,0xfff7ffff, 0xffefffff,0xffdfffff,0xffbfffff,0xff7fffff, 0xfeffffff,0xfdffffff,0xfbffffff,0xf7ffffff, 0xefffffff,0xdfffffff,0xbfffffff,0x7fffffff }; /** * The BitTree constructor takes the maximum key value. The legal * keys for this set range from 0 to maxKey. * @param maxKey the maximum key that will ever be put into this BitTree. */ public BitTree(short maxKey) { this(maxKey,false); } /** * The BitTree constructor takes the maximum key value. The legal * keys for this set range from 0 to maxKey. * @param maxKey the maximum key value. * @param isFull if true, then the set is initilized to contain all legal keys. */ public BitTree(short maxKey, boolean isFull) { this.size = maxKey+1; // find the next power of 2 greater than or equal to n. power2 = 2; int sz = maxKey+1; while (sz > 1) { sz /= 2; power2 *= 2; } // The number of bits need to store the tree is 2 times the number of keys. // Since we are storing the bits in 32 bit integers we need N/16 integers // to store the bits. int [MASK] = power2/16; // must have at least 1 if ( [MASK] < 1) { [MASK] = 1; } bits = new int[ [MASK] ]; if (isFull) { Arrays.fill(bits,0xffffffff); numKeys = this.size; } } /** * Removes all keys from the set. */ public void removeAll() { Arrays.fill(bits,0); numKeys = 0; } /** * Returns the number of keys currently in the set. */ public int size() { return numKeys; } /** * Adds a key to the set. * @param key to be added. * @exception IndexOutOfBoundsException if the given key is not * in the range [0, size-1]. */ public void put(short key) { if ((key < 0) || (key >= size)) { throw new IndexOutOfBoundsException(); } // The first "power2" number of bits are used for internal tree nodes. The // leaf nodes start at index "power2". int nodeIndex = power2+key; // set the leaf bit on to indicate that the key is in the set. // if the bit is already on (The key is already in the set), then just return. if (!setBit(nodeIndex)) { return; } // increment the number of keys in the set. numKeys++; // go up the tree setting each parent bit to "on" while(nodeIndex != 1) { // compute parent index. nodeIndex /= 2; // if any parent bit is already on, then all its parents are already on, // so were done. if (!setBit(nodeIndex)) { return; } } } /** * Removes the key from the set. * @param key The key to remove. * @exception IndexOutOfBoundsException if the given key is not * in the range [0, size-1]. */ public boolean remove(short key) { if ((key < 0) || (key >= size)) { throw new IndexOutOfBoundsException(); } // compute the leaf node index. int nodeIndex = power2+key; // clear the leaf bit to indicate that the key is not in the set. // if it is already "off", then we don't have to do anything if (!clearBit(nodeIndex)) { return false; } // decrement the number of keys in the set numKeys--; // traverse up the tree, clearing any parent nodes if all its child // nodes are "off". while(nodeIndex != 1) { nodeIndex /= 2; if (!isBitSet(nodeIndex)) { return true; } if (isBitSet(nodeIndex*2) || isBitSet(nodeIndex*2+1)) { return true; } clearBit(nodeIndex); } return true; } /** * Determines if a given key is in the set. * @param key the key to check if it is in this set. * @return true if the key is in the set. */ public boolean containsKey(short key) { if ((key < 0) || (key >= size)) { return false; } return isBitSet(power2+key); } /** * finds the next key that is in the set that is greater than the given key. * @param key from which to search forward. * @return the next key greater than the given key or -1 if there is no key * greater than the given key. * @exception IndexOutOfBoundsException if the given key is not * in the range [0, size-1]. */ public short getNext(short key) { if ((key < 0) || (key >= size)) { throw new IndexOutOfBoundsException(); } // compute leaf node. int nodeIndex = key + power2; // while we are not at the root, search upward until we find a right // sibling that is "on". while(nodeIndex != 1) { // see if we are odd (i.e. the right child) int odd = nodeIndex % 2; // if we are the left child see if my sibling on the right is on. // if so, then the next key must be in that subtree. if (odd == 0) { if (isBitSet(nodeIndex+1)) { // we found a right sibling that is "on", set nodeIndex to // that node. nodeIndex++; break; } } nodeIndex = nodeIndex/2; } // if we made it all the way up to the root node, then there is no key // greater than, so return -1; if (nodeIndex == 1) { return (short)-1; } // now that we found a right sibling that is "on", // follow the leftmost trail of "on" bits to an "on" leaf bit. This bit // represents the next key in the set. while (nodeIndex < power2) { nodeIndex *= 2; // if the left child is not on, then the right child must be "on". if (!isBitSet(nodeIndex)) { nodeIndex++; } } short nextKey = (short)(nodeIndex-power2); if (nextKey >= size) { nextKey = -1; } return nextKey; } /** * Finds the next key that is in the set that is less than the given key. * @param key the key to search before. * @return the next key less than the given key or -1 if there is no key * less than the given key. * @exception IndexOutOfBoundsException if the given key is not * in the range [0, size-1]. */ public short getPrevious(short key) { if ((key < 0) || (key >= size)) { throw new IndexOutOfBoundsException(); } // find the leaf node for the given key. int nodeIndex = key + power2; // while we are not at the root, search upward until we find a left // sibling that is "on". while(nodeIndex != 1) { // check if we are a right node. int odd = nodeIndex % 2; // if we are the right child see if my sibling on the left is "on". // if so, then the previous key must be in that subtree. if (odd == 1) { if (isBitSet(nodeIndex-1)) { nodeIndex--; break; } } nodeIndex = nodeIndex/2; } // If we went all the way to the root then there is no previous key, return -1. if (nodeIndex == 1) { return (short)-1; } // follow the rightmost trail of "on" bits to an "on" leaf bit. This bit // represents the previous key in the set. while (nodeIndex < power2) { nodeIndex *= 2; if (isBitSet(nodeIndex+1)) { nodeIndex++; } } return (short)(nodeIndex-power2); } /** * Checks if the set is empty. * @return true if the set is empty. */ public boolean isEmpty() { return numKeys == 0; } /** * Returns the first (lowest) key in the set. */ public short getFirst() { // if the 0 key is in the set, then return it. if(containsKey((short)0)) { return (short)0; } // otherwise return the next key after 0. return getNext((short)0); } /** * Returns the last (highest) key in the set. */ public short getLast() { // if the highest possible key is in the set, return it. if(containsKey((short)(size-1))) { return (short)(size-1); } // otherwise return the next lowest key. return getPrevious((short)(size-1)); } /** * Sets the nth bit on. */ private boolean setBit(int n) { int intIndex = n >> 5; int maskIndex = n & 0x1f; int old = bits[intIndex]; return ((bits[intIndex] |= setMask[maskIndex]) != old); } /** * Sets the nth bit to off. */ private boolean clearBit(int n) { int intIndex = n >> 5; int maskIndex = n & 0x1f; int old = bits[intIndex]; return ((bits[intIndex] &= clearMask[maskIndex]) != old); } /** * Tests if the nth bit is on. */ private boolean isBitSet(int n) { int intIndex = n >> 5; int maskIndex = n & 0x1f; return ((bits[intIndex] & setMask[maskIndex]) != 0); } }
nInts
// Copyright 2015 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.skyframe; import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertWithMessage; import com.google.common.base.Preconditions; import com.google.common.base.Suppliers; import com.google.common.collect.Collections2; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.ArtifactRoot; import com.google.devtools.build.lib.actions.FilesetOutputSymlink; import com.google.devtools.build.lib.actions.FilesetTraversalParams; import com.google.devtools.build.lib.actions.FilesetTraversalParamsFactory; import com.google.devtools.build.lib.actions.util.ActionsTestUtil; import com.google.devtools.build.lib.analysis.BlazeDirectories; import com.google.devtools.build.lib.analysis.ServerDirectories; import com.google.devtools.build.lib.clock.BlazeClock; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.events.NullEventHandler; import com.google.devtools.build.lib.pkgcache.PathPackageLocator; import com.google.devtools.build.lib.skyframe.ExternalFilesHelper.ExternalFileAction; import com.google.devtools.build.lib.skyframe.PackageLookupFunction.CrossRepositoryLabelViolationStrategy; import com.google.devtools.build.lib.testutil.FoundationTestCase; import com.google.devtools.build.lib.testutil.TestConstants; import com.google.devtools.build.lib.util.Fingerprint; import com.google.devtools.build.lib.util.io.TimestampGranularityMonitor; import com.google.devtools.build.lib.vfs.FileStateKey; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.build.lib.vfs.Root; import com.google.devtools.build.lib.vfs.RootedPath; import com.google.devtools.build.lib.vfs.SyscallCache; import com.google.devtools.build.skyframe.EvaluationContext; import com.google.devtools.build.skyframe.EvaluationResult; import com.google.devtools.build.skyframe.InMemoryMemoizingEvaluator; import com.google.devtools.build.skyframe.MemoizingEvaluator; import com.google.devtools.build.skyframe.RecordingDifferencer; import com.google.devtools.build.skyframe.SequencedRecordingDifferencer; import com.google.devtools.build.skyframe.SkyFunction; import com.google.devtools.build.skyframe.SkyFunctionException; import com.google.devtools.build.skyframe.SkyFunctionName; import com.google.devtools.build.skyframe.SkyKey; import com.google.devtools.build.skyframe.SkyValue; import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.concurrent.atomic.AtomicReference; import javax.annotation.Nullable; import net.starlark.java.eval.StarlarkSemantics; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Tests for {@link FilesetEntryFunction}. */ @RunWith(JUnit4.class) public final class FilesetEntryFunctionTest extends FoundationTestCase { private MemoizingEvaluator evaluator; private RecordingDifferencer differencer; @Before public void setUp() throws Exception { AtomicReference<PathPackageLocator> pkgLocator = new AtomicReference<>( new PathPackageLocator( outputBase, ImmutableList.of(Root.fromPath(rootDirectory)), BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY)); BlazeDirectories directories = new BlazeDirectories( new ServerDirectories(outputBase, outputBase, outputBase), rootDirectory, /* defaultSystemJavabase= */ null, TestConstants.PRODUCT_NAME); ExternalFilesHelper externalFilesHelper = ExternalFilesHelper.createForTesting( pkgLocator, ExternalFileAction.DEPEND_ON_EXTERNAL_PKG_FOR_EXTERNAL_REPO_PATHS, directories); Map<SkyFunctionName, SkyFunction> skyFunctions = new HashMap<>(); skyFunctions.put( FileStateKey.FILE_STATE, new FileStateFunction( Suppliers.ofInstance(new TimestampGranularityMonitor(BlazeClock.instance())), SyscallCache.NO_CACHE, externalFilesHelper)); skyFunctions.put(SkyFunctions.FILE, new FileFunction(pkgLocator, directories)); skyFunctions.put(SkyFunctions.DIRECTORY_LISTING, new DirectoryListingFunction()); skyFunctions.put( SkyFunctions.DIRECTORY_LISTING_STATE, new DirectoryListingStateFunction(externalFilesHelper, SyscallCache.NO_CACHE)); skyFunctions.put( SkyFunctions.RECURSIVE_FILESYSTEM_TRAVERSAL, new RecursiveFilesystemTraversalFunction(SyscallCache.NO_CACHE)); skyFunctions.put( SkyFunctions.PACKAGE_LOOKUP, new PackageLookupFunction( new AtomicReference<>(ImmutableSet.of()), CrossRepositoryLabelViolationStrategy.ERROR, BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY, BazelSkyframeExecutorConstants.EXTERNAL_PACKAGE_HELPER)); skyFunctions.put(SkyFunctions.IGNORED_SUBDIRECTORIES, IgnoredSubdirectoriesFunction.NOOP); skyFunctions.put( SkyFunctions.FILESET_ENTRY, new FilesetEntryFunction((unused) -> rootDirectory)); skyFunctions.put(SkyFunctions.WORKSPACE_NAME, new TestWorkspaceNameFunction()); skyFunctions.put( SkyFunctions.LOCAL_REPOSITORY_LOOKUP, new LocalRepositoryLookupFunction(BazelSkyframeExecutorConstants.EXTERNAL_PACKAGE_HELPER)); differencer = new SequencedRecordingDifferencer(); evaluator = new InMemoryMemoizingEvaluator(skyFunctions, differencer); PrecomputedValue.BUILD_ID.set(differencer, UUID.randomUUID()); PrecomputedValue.PATH_PACKAGE_LOCATOR.set(differencer, pkgLocator.get()); PrecomputedValue.STARLARK_SEMANTICS.set(differencer, StarlarkSemantics.DEFAULT); } private Artifact getSourceArtifact(String path) { return ActionsTestUtil.createArtifact( ArtifactRoot.asSourceRoot(Root.fromPath(rootDirectory)), path); } private Artifact createSourceArtifact(String path) throws Exception { Artifact result = getSourceArtifact(path); createFile(result, "foo"); return result; } private static RootedPath childOf(Artifact artifact, String relative) { return RootedPath.toRootedPath( artifact.getRoot().getRoot(), artifact.getRootRelativePath().getRelative(relative)); } private static RootedPath siblingOf(Artifact artifact, String relative) { PathFragment parent = Preconditions.checkNotNull(artifact.getRootRelativePath().getParentDirectory()); return RootedPath.toRootedPath(artifact.getRoot().getRoot(), parent.getRelative(relative)); } private void createFile(Path path, String... contents) throws Exception { if (!path.getParentDirectory().exists()) { scratch.dir(path.getParentDirectory().getPathString()); } scratch.file(path.getPathString(), contents); } private void createFile(Artifact artifact, String... contents) throws Exception { createFile(artifact.getPath(), contents); } private RootedPath createFile(RootedPath path, String... contents) throws Exception { createFile(path.asPath(), contents); return path; } private <T extends SkyValue> EvaluationResult<T> eval(SkyKey key) throws Exception { EvaluationContext evaluationContext = EvaluationContext.newBuilder() .setKeepGoing(false) .setParallelism(SkyframeExecutor.DEFAULT_THREAD_COUNT) .setEventHandler(NullEventHandler.INSTANCE) .build(); return evaluator.evaluate(ImmutableList.of(key), evaluationContext); } private FilesetEntryValue evalFilesetTraversal(FilesetTraversalParams params) throws Exception { SkyKey key = FilesetEntryKey.key(params); EvaluationResult<FilesetEntryValue> result = eval(key); assertThat(result.hasError()).isFalse(); return result.get(key); } private FilesetOutputSymlink symlink(String from, Artifact to) { return symlink(PathFragment.create(from), to.getPath(). [MASK] ()); } private FilesetOutputSymlink symlink(String from, RootedPath to) { return symlink(PathFragment.create(from), to.asPath(). [MASK] ()); } private FilesetOutputSymlink symlink(PathFragment from, PathFragment to) { return FilesetOutputSymlink.createForTesting(from, to, rootDirectory. [MASK] ()); } private void assertSymlinksCreatedInOrder( FilesetTraversalParams request, FilesetOutputSymlink... expectedSymlinks) throws Exception { Collection<FilesetOutputSymlink> actual = Collections2.transform( evalFilesetTraversal(request).getSymlinks(), // Strip the metadata from the actual results. (input) -> FilesetOutputSymlink.createAlreadyRelativizedForTesting( input.name(), input.targetPath(), input.relativeToExecRoot())); assertThat(actual).containsExactlyElementsIn(expectedSymlinks).inOrder(); } private static Label label(String label) throws Exception { return Label.parseCanonical(label); } @Test public void testFileTraversalForFile() throws Exception { Artifact file = createSourceArtifact("foo/file.real"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ file, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params, symlink("output-name", file)); } @Test public void testFileTraversalForDirectory() throws Exception { Artifact dir = getSourceArtifact("foo/dir_real"); RootedPath fileA = createFile(childOf(dir, "file.a"), "hello"); RootedPath fileB = createFile(childOf(dir, "sub/file.b"), "world"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ dir, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ true); assertSymlinksCreatedInOrder( params, symlink("output-name/file.a", fileA), symlink("output-name/sub/file.b", fileB)); } @Test public void testFileTraversalForDisallowedDirectoryThrows() throws Exception { Artifact dir = getSourceArtifact("foo/dir_real"); createFile(childOf(dir, "file.a"), "hello"); createFile(childOf(dir, "sub/file.b"), "world"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ dir, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); SkyKey key = FilesetEntryKey.key(params); EvaluationResult<FilesetEntryValue> result = eval(key); assertThat(result.hasError()).isTrue(); assertThat(result.getError(key).getException()) .hasMessageThat() .contains("foo contains a directory"); } @Test public void testFileTraversalForDanglingSymlink() throws Exception { Artifact linkName = getSourceArtifact("foo/dangling.sym"); RootedPath linkTarget = createFile(siblingOf(linkName, "target.file"), "blah"); linkName.getPath().createSymbolicLink(PathFragment.create("target.file")); linkTarget.asPath().delete(); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ linkName, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params); // expect empty results } @Test public void testFileTraversalForNonExistentFile() throws Exception { Artifact path = getSourceArtifact("foo/non-existent"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ path, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params); // expect empty results } /** * Tests that the fingerprint is a function of all arguments of the factory method. * * <p>Implementations must provide: * <ul> * <li>two different values (a domain) for each argument of the factory method and whether or not * it is expected to influence the fingerprint * <li>a way to instantiate {@link FilesetTraversalParams} with a given set of arguments from the * specified domains * </ul> * * <p>The tests will instantiate pairs of {@link FilesetTraversalParams} objects with only a given * attribute differing, and observe whether the fingerprints differ (if they are expected to) or * are the same (otherwise). */ private abstract static class FingerprintTester { private final Map<String, Domain> domains; FingerprintTester(Map<String, Domain> domains) { this.domains = domains; } abstract FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception; private Map<String, ?> getDefaultArgs() { return getKwArgs(null); } private Map<String, ?> getKwArgs(@Nullable String useAlternateFor) { Map<String, Object> values = new HashMap<>(); for (Map.Entry<String, Domain> d : domains.entrySet()) { values.put( d.getKey(), d.getKey().equals(useAlternateFor) ? d.getValue().valueA : d.getValue().valueB); } return values; } public void doTest() throws Exception { Fingerprint fp = new Fingerprint(); create(getDefaultArgs()).fingerprint(fp); String primary = fp.hexDigestAndReset(); for (String argName : domains.keySet()) { create(getKwArgs(argName)).fingerprint(fp); String secondary = fp.hexDigestAndReset(); if (domains.get(argName).includedInFingerprint) { assertWithMessage( "Argument '" + argName + "' was expected to be included in the" + " fingerprint, but wasn't") .that(primary) .isNotEqualTo(secondary); } else { assertWithMessage( "Argument '" + argName + "' was expected not to be included in the" + " fingerprint, but was") .that(primary) .isEqualTo(secondary); } } } } private static final class Domain { boolean includedInFingerprint; Object valueA; Object valueB; Domain(boolean includedInFingerprint, Object valueA, Object valueB) { this.includedInFingerprint = includedInFingerprint; this.valueA = valueA; this.valueB = valueB; } } private static Domain partOfFingerprint(Object valueA, Object valueB) { return new Domain(true, valueA, valueB); } private static Domain notPartOfFingerprint(Object valueB) { return new Domain(false, "//foo", valueB); } @Test public void testFingerprintOfFileTraversal() throws Exception { new FingerprintTester( ImmutableMap.<String, Domain>builder() .put("ownerLabel", notPartOfFingerprint("//bar")) .put("fileToTraverse", partOfFingerprint("foo/file.a", "bar/file.b")) .put("destPath", partOfFingerprint("out1", "out2")) .put("strictFilesetOutput", partOfFingerprint(true, false)) .put("permitDirectories", partOfFingerprint(true, false)) .buildOrThrow()) { @Override FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception { return FilesetTraversalParamsFactory.fileTraversal( label((String) kwArgs.get("ownerLabel")), getSourceArtifact((String) kwArgs.get("fileToTraverse")), PathFragment.create((String) kwArgs.get("destPath")), (Boolean) kwArgs.get("strictFilesetOutput"), (Boolean) kwArgs.get("permitDirectories")); } }.doTest(); } @Test public void testFingerprintOfNestedTraversal() throws Exception { Artifact nested1 = getSourceArtifact("a/b"); Artifact nested2 = getSourceArtifact("a/c"); new FingerprintTester( ImmutableMap.of( "ownerLabel", notPartOfFingerprint("//bar"), "nestedArtifact", partOfFingerprint(nested1, nested2), "destDir", partOfFingerprint("out1", "out2"), "excludes", partOfFingerprint(ImmutableSet.<String>of(), ImmutableSet.of("x")))) { @SuppressWarnings("unchecked") @Override FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception { return FilesetTraversalParamsFactory.nestedTraversal( label((String) kwArgs.get("ownerLabel")), (Artifact) kwArgs.get("nestedArtifact"), PathFragment.create((String) kwArgs.get("destDir")), (Set<String>) kwArgs.get("excludes")); } }.doTest(); } private static class TestWorkspaceNameFunction implements SkyFunction { @Nullable @Override public SkyValue compute(SkyKey skyKey, Environment env) throws SkyFunctionException, InterruptedException { return WorkspaceNameValue.withName("workspace"); } } }
asFragment
/* * Copyright (c) 2014, 2024, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package stream; import static jaxp.library.JAXPTestUtilities.USER_DIR; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.InputStream; import java.io.OutputStream; import javax.xml.stream.XMLInputFactory; import javax.xml.stream.XMLOutputFactory; import javax.xml.stream.XMLStreamReader; import javax.xml.stream.XMLStreamWriter; import org.testng.Assert; import org.testng.annotations.Test; /* * @test * @bug 6688002 * @library /javax/xml/jaxp/libs /javax/xml/jaxp/unittest * @run testng/othervm stream.Bug6688002Test * @summary Test single instance of XMLOutputFactory/XMLInputFactory create multiple Writer/Readers in parallel. */ public class Bug6688002Test { private static final XMLOutputFactory outputFactory = XMLOutputFactory.newInstance(); private static final XMLInputFactory inputFactory = XMLInputFactory.newInstance(); private static final int NO_THREADS = 3; @Test public void testMultiThread() throws Exception { Thread[] threads = new Thread[NO_THREADS]; for (int i = 0; i < NO_THREADS; i++) { threads[i] = new Thread(new [MASK] (i)); } for (int i = 0; i < NO_THREADS; i++) { threads[i].start(); } for (int i = 0; i < NO_THREADS; i++) { threads[i].join(); } } public class [MASK] implements Runnable { final String no; [MASK] (int no) { this.no = String.valueOf(no); } public void run() { try { FileOutputStream fos = new FileOutputStream(USER_DIR + no); XMLStreamWriter w = getWriter(fos); // System.out.println("Writer="+w+" Thread="+Thread.currentThread()); w.writeStartDocument(); w.writeStartElement("hello"); for (int j = 0; j < 50; j++) { w.writeStartElement("a" + j); w.writeEndElement(); } w.writeEndElement(); w.writeEndDocument(); w.close(); fos.close(); FileInputStream fis = new FileInputStream(USER_DIR + no); XMLStreamReader r = getReader(fis); while (r.hasNext()) { r.next(); } r.close(); fis.close(); } catch (Exception e) { Assert.fail(e.getMessage()); } } } public static/* synchronized */XMLStreamReader getReader(InputStream is) throws Exception { return inputFactory.createXMLStreamReader(is); // return XMLStreamReaderFactory.create(null, is, true); } public static/* synchronized */XMLStreamWriter getWriter(OutputStream os) throws Exception { return outputFactory.createXMLStreamWriter(os); // return XMLStreamWriterFactory.createXMLStreamWriter(os); } }
MyRunnable
/* * Copyright 2012-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.loader.net.protocol.jar; import java.io.File; import java.net.MalformedURLException; import java.net.URL; import java.util.jar. [MASK] ; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.io.TempDir; import org.springframework.boot.loader.net.util.UrlDecoder; import static org.assertj.core.api.Assertions.assertThat; /** * Tests for {@link JarUrl}. * * @author Phillip Webb */ class JarUrlTests { @TempDir File temp; File jarFile; String jarFileUrlPath; @BeforeEach void setup() throws MalformedURLException { this.jarFile = new File(this.temp, "my.jar"); this.jarFileUrlPath = this.jarFile.toURI().toURL().toString().substring("file:".length()).replace("!", "%21"); } @Test void createWithFileReturnsUrl() { URL url = JarUrl.create(this.jarFile); assertThat(url).hasToString("jar:file:%s!/".formatted(this.jarFileUrlPath)); } @Test void createWithFileAndEntryReturnsUrl() { [MASK] entry = new [MASK] ("lib.jar"); URL url = JarUrl.create(this.jarFile, entry); assertThat(url).hasToString("jar:nested:%s/!lib.jar!/".formatted(this.jarFileUrlPath)); } @Test void createWithFileAndNullEntryReturnsUrl() { URL url = JarUrl.create(this.jarFile, ( [MASK] ) null); assertThat(url).hasToString("jar:file:%s!/".formatted(this.jarFileUrlPath)); } @Test void createWithFileAndNameReturnsUrl() { URL url = JarUrl.create(this.jarFile, "lib.jar"); assertThat(url).hasToString("jar:nested:%s/!lib.jar!/".formatted(this.jarFileUrlPath)); } @Test void createWithFileAndNullNameReturnsUrl() { URL url = JarUrl.create(this.jarFile, (String) null); assertThat(url).hasToString("jar:file:%s!/".formatted(this.jarFileUrlPath)); } @Test void createWithFileNameAndPathReturnsUrl() { URL url = JarUrl.create(this.jarFile, "lib.jar", "com/example/My.class"); assertThat(url).hasToString("jar:nested:%s/!lib.jar!/com/example/My.class".formatted(this.jarFileUrlPath)); } @Test void createWithReservedCharsInName() throws Exception { String badFolderName = "foo#bar!/baz/!oof"; this.temp = new File(this.temp, badFolderName); setup(); URL url = JarUrl.create(this.jarFile, "lib.jar", "com/example/My.class"); assertThat(url).hasToString("jar:nested:%s/!lib.jar!/com/example/My.class".formatted(this.jarFileUrlPath)); assertThat(UrlDecoder.decode(url.toString())).contains(badFolderName); } }
JarEntry
/* * Copyright 2012-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.loader.net.protocol.jar; import java.io.File; import java.net.MalformedURLException; import java.net.URL; import java.util.jar.JarEntry; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.io.TempDir; import org.springframework.boot.loader.net.util.UrlDecoder; import static org.assertj.core.api.Assertions.assertThat; /** * Tests for {@link JarUrl}. * * @author Phillip Webb */ class JarUrlTests { @TempDir File temp; File jarFile; String jarFileUrlPath; @BeforeEach void setup() throws MalformedURLException { this.jarFile = new File(this.temp, "my.jar"); this.jarFileUrlPath = this.jarFile.toURI().toURL().toString().substring("file:".length()).replace("!", "%21"); } @Test void createWithFileReturnsUrl() { URL url = JarUrl.create(this.jarFile); assertThat(url). [MASK] ("jar:file:%s!/".formatted(this.jarFileUrlPath)); } @Test void createWithFileAndEntryReturnsUrl() { JarEntry entry = new JarEntry("lib.jar"); URL url = JarUrl.create(this.jarFile, entry); assertThat(url). [MASK] ("jar:nested:%s/!lib.jar!/".formatted(this.jarFileUrlPath)); } @Test void createWithFileAndNullEntryReturnsUrl() { URL url = JarUrl.create(this.jarFile, (JarEntry) null); assertThat(url). [MASK] ("jar:file:%s!/".formatted(this.jarFileUrlPath)); } @Test void createWithFileAndNameReturnsUrl() { URL url = JarUrl.create(this.jarFile, "lib.jar"); assertThat(url). [MASK] ("jar:nested:%s/!lib.jar!/".formatted(this.jarFileUrlPath)); } @Test void createWithFileAndNullNameReturnsUrl() { URL url = JarUrl.create(this.jarFile, (String) null); assertThat(url). [MASK] ("jar:file:%s!/".formatted(this.jarFileUrlPath)); } @Test void createWithFileNameAndPathReturnsUrl() { URL url = JarUrl.create(this.jarFile, "lib.jar", "com/example/My.class"); assertThat(url). [MASK] ("jar:nested:%s/!lib.jar!/com/example/My.class".formatted(this.jarFileUrlPath)); } @Test void createWithReservedCharsInName() throws Exception { String badFolderName = "foo#bar!/baz/!oof"; this.temp = new File(this.temp, badFolderName); setup(); URL url = JarUrl.create(this.jarFile, "lib.jar", "com/example/My.class"); assertThat(url). [MASK] ("jar:nested:%s/!lib.jar!/com/example/My.class".formatted(this.jarFileUrlPath)); assertThat(UrlDecoder.decode(url.toString())).contains(badFolderName); } }
hasToString
// Copyright 2015 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.skyframe; import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertWithMessage; import com.google.common.base.Preconditions; import com.google.common.base.Suppliers; import com.google.common.collect.Collections2; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.ArtifactRoot; import com.google.devtools.build.lib.actions.FilesetOutputSymlink; import com.google.devtools.build.lib.actions.FilesetTraversalParams; import com.google.devtools.build.lib.actions.FilesetTraversalParamsFactory; import com.google.devtools.build.lib.actions.util.ActionsTestUtil; import com.google.devtools.build.lib.analysis.BlazeDirectories; import com.google.devtools.build.lib.analysis.ServerDirectories; import com.google.devtools.build.lib.clock.BlazeClock; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.events.NullEventHandler; import com.google.devtools.build.lib.pkgcache.PathPackageLocator; import com.google.devtools.build.lib.skyframe.ExternalFilesHelper.ExternalFileAction; import com.google.devtools.build.lib.skyframe.PackageLookupFunction.CrossRepositoryLabelViolationStrategy; import com.google.devtools.build.lib.testutil.FoundationTestCase; import com.google.devtools.build.lib.testutil.TestConstants; import com.google.devtools.build.lib.util.Fingerprint; import com.google.devtools.build.lib.util.io.TimestampGranularityMonitor; import com.google.devtools.build.lib.vfs.FileStateKey; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.build.lib.vfs.Root; import com.google.devtools.build.lib.vfs.RootedPath; import com.google.devtools.build.lib.vfs.SyscallCache; import com.google.devtools.build.skyframe.EvaluationContext; import com.google.devtools.build.skyframe.EvaluationResult; import com.google.devtools.build.skyframe.InMemoryMemoizingEvaluator; import com.google.devtools.build.skyframe.MemoizingEvaluator; import com.google.devtools.build.skyframe.RecordingDifferencer; import com.google.devtools.build.skyframe.SequencedRecordingDifferencer; import com.google.devtools.build.skyframe. [MASK] ; import com.google.devtools.build.skyframe. [MASK] Exception; import com.google.devtools.build.skyframe. [MASK] Name; import com.google.devtools.build.skyframe.SkyKey; import com.google.devtools.build.skyframe.SkyValue; import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.concurrent.atomic.AtomicReference; import javax.annotation.Nullable; import net.starlark.java.eval.StarlarkSemantics; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Tests for {@link FilesetEntryFunction}. */ @RunWith(JUnit4.class) public final class FilesetEntryFunctionTest extends FoundationTestCase { private MemoizingEvaluator evaluator; private RecordingDifferencer differencer; @Before public void setUp() throws Exception { AtomicReference<PathPackageLocator> pkgLocator = new AtomicReference<>( new PathPackageLocator( outputBase, ImmutableList.of(Root.fromPath(rootDirectory)), BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY)); BlazeDirectories directories = new BlazeDirectories( new ServerDirectories(outputBase, outputBase, outputBase), rootDirectory, /* defaultSystemJavabase= */ null, TestConstants.PRODUCT_NAME); ExternalFilesHelper externalFilesHelper = ExternalFilesHelper.createForTesting( pkgLocator, ExternalFileAction.DEPEND_ON_EXTERNAL_PKG_FOR_EXTERNAL_REPO_PATHS, directories); Map< [MASK] Name, [MASK] > skyFunctions = new HashMap<>(); skyFunctions.put( FileStateKey.FILE_STATE, new FileStateFunction( Suppliers.ofInstance(new TimestampGranularityMonitor(BlazeClock.instance())), SyscallCache.NO_CACHE, externalFilesHelper)); skyFunctions.put( [MASK] s.FILE, new FileFunction(pkgLocator, directories)); skyFunctions.put( [MASK] s.DIRECTORY_LISTING, new DirectoryListingFunction()); skyFunctions.put( [MASK] s.DIRECTORY_LISTING_STATE, new DirectoryListingStateFunction(externalFilesHelper, SyscallCache.NO_CACHE)); skyFunctions.put( [MASK] s.RECURSIVE_FILESYSTEM_TRAVERSAL, new RecursiveFilesystemTraversalFunction(SyscallCache.NO_CACHE)); skyFunctions.put( [MASK] s.PACKAGE_LOOKUP, new PackageLookupFunction( new AtomicReference<>(ImmutableSet.of()), CrossRepositoryLabelViolationStrategy.ERROR, BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY, BazelSkyframeExecutorConstants.EXTERNAL_PACKAGE_HELPER)); skyFunctions.put( [MASK] s.IGNORED_SUBDIRECTORIES, IgnoredSubdirectoriesFunction.NOOP); skyFunctions.put( [MASK] s.FILESET_ENTRY, new FilesetEntryFunction((unused) -> rootDirectory)); skyFunctions.put( [MASK] s.WORKSPACE_NAME, new TestWorkspaceNameFunction()); skyFunctions.put( [MASK] s.LOCAL_REPOSITORY_LOOKUP, new LocalRepositoryLookupFunction(BazelSkyframeExecutorConstants.EXTERNAL_PACKAGE_HELPER)); differencer = new SequencedRecordingDifferencer(); evaluator = new InMemoryMemoizingEvaluator(skyFunctions, differencer); PrecomputedValue.BUILD_ID.set(differencer, UUID.randomUUID()); PrecomputedValue.PATH_PACKAGE_LOCATOR.set(differencer, pkgLocator.get()); PrecomputedValue.STARLARK_SEMANTICS.set(differencer, StarlarkSemantics.DEFAULT); } private Artifact getSourceArtifact(String path) { return ActionsTestUtil.createArtifact( ArtifactRoot.asSourceRoot(Root.fromPath(rootDirectory)), path); } private Artifact createSourceArtifact(String path) throws Exception { Artifact result = getSourceArtifact(path); createFile(result, "foo"); return result; } private static RootedPath childOf(Artifact artifact, String relative) { return RootedPath.toRootedPath( artifact.getRoot().getRoot(), artifact.getRootRelativePath().getRelative(relative)); } private static RootedPath siblingOf(Artifact artifact, String relative) { PathFragment parent = Preconditions.checkNotNull(artifact.getRootRelativePath().getParentDirectory()); return RootedPath.toRootedPath(artifact.getRoot().getRoot(), parent.getRelative(relative)); } private void createFile(Path path, String... contents) throws Exception { if (!path.getParentDirectory().exists()) { scratch.dir(path.getParentDirectory().getPathString()); } scratch.file(path.getPathString(), contents); } private void createFile(Artifact artifact, String... contents) throws Exception { createFile(artifact.getPath(), contents); } private RootedPath createFile(RootedPath path, String... contents) throws Exception { createFile(path.asPath(), contents); return path; } private <T extends SkyValue> EvaluationResult<T> eval(SkyKey key) throws Exception { EvaluationContext evaluationContext = EvaluationContext.newBuilder() .setKeepGoing(false) .setParallelism(SkyframeExecutor.DEFAULT_THREAD_COUNT) .setEventHandler(NullEventHandler.INSTANCE) .build(); return evaluator.evaluate(ImmutableList.of(key), evaluationContext); } private FilesetEntryValue evalFilesetTraversal(FilesetTraversalParams params) throws Exception { SkyKey key = FilesetEntryKey.key(params); EvaluationResult<FilesetEntryValue> result = eval(key); assertThat(result.hasError()).isFalse(); return result.get(key); } private FilesetOutputSymlink symlink(String from, Artifact to) { return symlink(PathFragment.create(from), to.getPath().asFragment()); } private FilesetOutputSymlink symlink(String from, RootedPath to) { return symlink(PathFragment.create(from), to.asPath().asFragment()); } private FilesetOutputSymlink symlink(PathFragment from, PathFragment to) { return FilesetOutputSymlink.createForTesting(from, to, rootDirectory.asFragment()); } private void assertSymlinksCreatedInOrder( FilesetTraversalParams request, FilesetOutputSymlink... expectedSymlinks) throws Exception { Collection<FilesetOutputSymlink> actual = Collections2.transform( evalFilesetTraversal(request).getSymlinks(), // Strip the metadata from the actual results. (input) -> FilesetOutputSymlink.createAlreadyRelativizedForTesting( input.name(), input.targetPath(), input.relativeToExecRoot())); assertThat(actual).containsExactlyElementsIn(expectedSymlinks).inOrder(); } private static Label label(String label) throws Exception { return Label.parseCanonical(label); } @Test public void testFileTraversalForFile() throws Exception { Artifact file = createSourceArtifact("foo/file.real"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ file, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params, symlink("output-name", file)); } @Test public void testFileTraversalForDirectory() throws Exception { Artifact dir = getSourceArtifact("foo/dir_real"); RootedPath fileA = createFile(childOf(dir, "file.a"), "hello"); RootedPath fileB = createFile(childOf(dir, "sub/file.b"), "world"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ dir, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ true); assertSymlinksCreatedInOrder( params, symlink("output-name/file.a", fileA), symlink("output-name/sub/file.b", fileB)); } @Test public void testFileTraversalForDisallowedDirectoryThrows() throws Exception { Artifact dir = getSourceArtifact("foo/dir_real"); createFile(childOf(dir, "file.a"), "hello"); createFile(childOf(dir, "sub/file.b"), "world"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ dir, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); SkyKey key = FilesetEntryKey.key(params); EvaluationResult<FilesetEntryValue> result = eval(key); assertThat(result.hasError()).isTrue(); assertThat(result.getError(key).getException()) .hasMessageThat() .contains("foo contains a directory"); } @Test public void testFileTraversalForDanglingSymlink() throws Exception { Artifact linkName = getSourceArtifact("foo/dangling.sym"); RootedPath linkTarget = createFile(siblingOf(linkName, "target.file"), "blah"); linkName.getPath().createSymbolicLink(PathFragment.create("target.file")); linkTarget.asPath().delete(); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ linkName, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params); // expect empty results } @Test public void testFileTraversalForNonExistentFile() throws Exception { Artifact path = getSourceArtifact("foo/non-existent"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ path, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params); // expect empty results } /** * Tests that the fingerprint is a function of all arguments of the factory method. * * <p>Implementations must provide: * <ul> * <li>two different values (a domain) for each argument of the factory method and whether or not * it is expected to influence the fingerprint * <li>a way to instantiate {@link FilesetTraversalParams} with a given set of arguments from the * specified domains * </ul> * * <p>The tests will instantiate pairs of {@link FilesetTraversalParams} objects with only a given * attribute differing, and observe whether the fingerprints differ (if they are expected to) or * are the same (otherwise). */ private abstract static class FingerprintTester { private final Map<String, Domain> domains; FingerprintTester(Map<String, Domain> domains) { this.domains = domains; } abstract FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception; private Map<String, ?> getDefaultArgs() { return getKwArgs(null); } private Map<String, ?> getKwArgs(@Nullable String useAlternateFor) { Map<String, Object> values = new HashMap<>(); for (Map.Entry<String, Domain> d : domains.entrySet()) { values.put( d.getKey(), d.getKey().equals(useAlternateFor) ? d.getValue().valueA : d.getValue().valueB); } return values; } public void doTest() throws Exception { Fingerprint fp = new Fingerprint(); create(getDefaultArgs()).fingerprint(fp); String primary = fp.hexDigestAndReset(); for (String argName : domains.keySet()) { create(getKwArgs(argName)).fingerprint(fp); String secondary = fp.hexDigestAndReset(); if (domains.get(argName).includedInFingerprint) { assertWithMessage( "Argument '" + argName + "' was expected to be included in the" + " fingerprint, but wasn't") .that(primary) .isNotEqualTo(secondary); } else { assertWithMessage( "Argument '" + argName + "' was expected not to be included in the" + " fingerprint, but was") .that(primary) .isEqualTo(secondary); } } } } private static final class Domain { boolean includedInFingerprint; Object valueA; Object valueB; Domain(boolean includedInFingerprint, Object valueA, Object valueB) { this.includedInFingerprint = includedInFingerprint; this.valueA = valueA; this.valueB = valueB; } } private static Domain partOfFingerprint(Object valueA, Object valueB) { return new Domain(true, valueA, valueB); } private static Domain notPartOfFingerprint(Object valueB) { return new Domain(false, "//foo", valueB); } @Test public void testFingerprintOfFileTraversal() throws Exception { new FingerprintTester( ImmutableMap.<String, Domain>builder() .put("ownerLabel", notPartOfFingerprint("//bar")) .put("fileToTraverse", partOfFingerprint("foo/file.a", "bar/file.b")) .put("destPath", partOfFingerprint("out1", "out2")) .put("strictFilesetOutput", partOfFingerprint(true, false)) .put("permitDirectories", partOfFingerprint(true, false)) .buildOrThrow()) { @Override FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception { return FilesetTraversalParamsFactory.fileTraversal( label((String) kwArgs.get("ownerLabel")), getSourceArtifact((String) kwArgs.get("fileToTraverse")), PathFragment.create((String) kwArgs.get("destPath")), (Boolean) kwArgs.get("strictFilesetOutput"), (Boolean) kwArgs.get("permitDirectories")); } }.doTest(); } @Test public void testFingerprintOfNestedTraversal() throws Exception { Artifact nested1 = getSourceArtifact("a/b"); Artifact nested2 = getSourceArtifact("a/c"); new FingerprintTester( ImmutableMap.of( "ownerLabel", notPartOfFingerprint("//bar"), "nestedArtifact", partOfFingerprint(nested1, nested2), "destDir", partOfFingerprint("out1", "out2"), "excludes", partOfFingerprint(ImmutableSet.<String>of(), ImmutableSet.of("x")))) { @SuppressWarnings("unchecked") @Override FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception { return FilesetTraversalParamsFactory.nestedTraversal( label((String) kwArgs.get("ownerLabel")), (Artifact) kwArgs.get("nestedArtifact"), PathFragment.create((String) kwArgs.get("destDir")), (Set<String>) kwArgs.get("excludes")); } }.doTest(); } private static class TestWorkspaceNameFunction implements [MASK] { @Nullable @Override public SkyValue compute(SkyKey skyKey, Environment env) throws [MASK] Exception, InterruptedException { return WorkspaceNameValue.withName("workspace"); } } }
SkyFunction
/* * Copyright (c) 2014, 2024, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package stream; import static jaxp.library.JAXPTestUtilities.USER_DIR; import java.io.FileInputStream; import java.io. [MASK] ; import java.io.InputStream; import java.io.OutputStream; import javax.xml.stream.XMLInputFactory; import javax.xml.stream.XMLOutputFactory; import javax.xml.stream.XMLStreamReader; import javax.xml.stream.XMLStreamWriter; import org.testng.Assert; import org.testng.annotations.Test; /* * @test * @bug 6688002 * @library /javax/xml/jaxp/libs /javax/xml/jaxp/unittest * @run testng/othervm stream.Bug6688002Test * @summary Test single instance of XMLOutputFactory/XMLInputFactory create multiple Writer/Readers in parallel. */ public class Bug6688002Test { private static final XMLOutputFactory outputFactory = XMLOutputFactory.newInstance(); private static final XMLInputFactory inputFactory = XMLInputFactory.newInstance(); private static final int NO_THREADS = 3; @Test public void testMultiThread() throws Exception { Thread[] threads = new Thread[NO_THREADS]; for (int i = 0; i < NO_THREADS; i++) { threads[i] = new Thread(new MyRunnable(i)); } for (int i = 0; i < NO_THREADS; i++) { threads[i].start(); } for (int i = 0; i < NO_THREADS; i++) { threads[i].join(); } } public class MyRunnable implements Runnable { final String no; MyRunnable(int no) { this.no = String.valueOf(no); } public void run() { try { [MASK] fos = new [MASK] (USER_DIR + no); XMLStreamWriter w = getWriter(fos); // System.out.println("Writer="+w+" Thread="+Thread.currentThread()); w.writeStartDocument(); w.writeStartElement("hello"); for (int j = 0; j < 50; j++) { w.writeStartElement("a" + j); w.writeEndElement(); } w.writeEndElement(); w.writeEndDocument(); w.close(); fos.close(); FileInputStream fis = new FileInputStream(USER_DIR + no); XMLStreamReader r = getReader(fis); while (r.hasNext()) { r.next(); } r.close(); fis.close(); } catch (Exception e) { Assert.fail(e.getMessage()); } } } public static/* synchronized */XMLStreamReader getReader(InputStream is) throws Exception { return inputFactory.createXMLStreamReader(is); // return XMLStreamReaderFactory.create(null, is, true); } public static/* synchronized */XMLStreamWriter getWriter(OutputStream os) throws Exception { return outputFactory.createXMLStreamWriter(os); // return XMLStreamWriterFactory.createXMLStreamWriter(os); } }
FileOutputStream
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ package org.elasticsearch.xpack.esql.plugin; import org.elasticsearch.action. [MASK] ; import org.elasticsearch.action.support.RefCountingListener; import org.elasticsearch.compute.EsqlRefCountingListener; import org.elasticsearch.compute.operator.DriverProfile; import org.elasticsearch.compute.operator.ResponseHeadersCollector; import org.elasticsearch.core.Releasable; import org.elasticsearch.threadpool.ThreadPool; import java.util.ArrayList; import java.util.Collections; import java.util.List; /** * A variant of {@link RefCountingListener} with the following differences: * 1. Automatically cancels sub tasks on failure (via runOnTaskFailure) * 2. Collects driver profiles from sub tasks. * 3. Collects response headers from sub tasks, specifically warnings emitted during compute * 4. Collects failures and returns the most appropriate exception to the caller. */ final class ComputeListener implements Releasable { private final EsqlRefCountingListener refs; private final List<DriverProfile> collectedProfiles; private final ResponseHeadersCollector responseHeaders; private final Runnable runOnFailure; ComputeListener(ThreadPool threadPool, Runnable runOnFailure, [MASK] <List<DriverProfile>> delegate) { this.runOnFailure = runOnFailure; this.responseHeaders = new ResponseHeadersCollector(threadPool.getThreadContext()); this.collectedProfiles = Collections.synchronizedList(new ArrayList<>()); // listener that executes after all the sub-listeners refs (created via acquireCompute) have completed this.refs = new EsqlRefCountingListener(delegate.delegateFailure((l, ignored) -> { responseHeaders.finish(); delegate.onResponse(collectedProfiles.stream().toList()); })); } /** * Acquires a new listener that doesn't collect result */ [MASK] <Void> acquireAvoid() { return refs.acquire().delegateResponse((l, e) -> { try { runOnFailure.run(); } finally { l.onFailure(e); } }); } /** * Acquires a new listener that collects compute result. This listener will also collect warnings emitted during compute */ [MASK] <List<DriverProfile>> acquireCompute() { final [MASK] <Void> delegate = acquireAvoid(); return [MASK] .wrap(profiles -> { responseHeaders.collect(); if (profiles != null && profiles.isEmpty() == false) { collectedProfiles.addAll(profiles); } delegate.onResponse(null); }, e -> { responseHeaders.collect(); delegate.onFailure(e); }); } @Override public void close() { refs.close(); } }
ActionListener
// Copyright 2015 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.skyframe; import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertWithMessage; import com.google.common.base.Preconditions; import com.google.common.base.Suppliers; import com.google.common.collect.Collections2; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.ArtifactRoot; import com.google.devtools.build.lib.actions.FilesetOutputSymlink; import com.google.devtools.build.lib.actions.FilesetTraversalParams; import com.google.devtools.build.lib.actions.FilesetTraversalParamsFactory; import com.google.devtools.build.lib.actions.util.ActionsTestUtil; import com.google.devtools.build.lib.analysis.BlazeDirectories; import com.google.devtools.build.lib.analysis.ServerDirectories; import com.google.devtools.build.lib.clock.BlazeClock; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.events.NullEventHandler; import com.google.devtools.build.lib.pkgcache.PathPackageLocator; import com.google.devtools.build.lib.skyframe.ExternalFilesHelper.ExternalFileAction; import com.google.devtools.build.lib.skyframe.PackageLookupFunction.CrossRepositoryLabelViolationStrategy; import com.google.devtools.build.lib.testutil.FoundationTestCase; import com.google.devtools.build.lib.testutil.TestConstants; import com.google.devtools.build.lib.util.Fingerprint; import com.google.devtools.build.lib.util.io.TimestampGranularityMonitor; import com.google.devtools.build.lib.vfs.FileStateKey; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.build.lib.vfs.Root; import com.google.devtools.build.lib.vfs.RootedPath; import com.google.devtools.build.lib.vfs.SyscallCache; import com.google.devtools.build.skyframe.EvaluationContext; import com.google.devtools.build.skyframe.EvaluationResult; import com.google.devtools.build.skyframe.InMemoryMemoizingEvaluator; import com.google.devtools.build.skyframe.MemoizingEvaluator; import com.google.devtools.build.skyframe.RecordingDifferencer; import com.google.devtools.build.skyframe.SequencedRecordingDifferencer; import com.google.devtools.build.skyframe.SkyFunction; import com.google.devtools.build.skyframe.SkyFunctionException; import com.google.devtools.build.skyframe.SkyFunctionName; import com.google.devtools.build.skyframe.SkyKey; import com.google.devtools.build.skyframe.SkyValue; import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.concurrent.atomic.AtomicReference; import javax.annotation.Nullable; import net.starlark.java.eval.StarlarkSemantics; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Tests for {@link FilesetEntryFunction}. */ @RunWith(JUnit4.class) public final class FilesetEntryFunctionTest extends FoundationTestCase { private MemoizingEvaluator evaluator; private RecordingDifferencer differencer; @Before public void setUp() throws Exception { AtomicReference<PathPackageLocator> pkgLocator = new AtomicReference<>( new PathPackageLocator( outputBase, ImmutableList.of(Root.fromPath(rootDirectory)), BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY)); BlazeDirectories directories = new BlazeDirectories( new ServerDirectories(outputBase, outputBase, outputBase), rootDirectory, /* defaultSystemJavabase= */ null, TestConstants.PRODUCT_NAME); ExternalFilesHelper externalFilesHelper = ExternalFilesHelper.createForTesting( pkgLocator, ExternalFileAction.DEPEND_ON_EXTERNAL_PKG_FOR_EXTERNAL_REPO_PATHS, directories); Map<SkyFunctionName, SkyFunction> skyFunctions = new HashMap<>(); skyFunctions.put( FileStateKey.FILE_STATE, new FileStateFunction( Suppliers.ofInstance(new TimestampGranularityMonitor(BlazeClock.instance())), SyscallCache. [MASK] , externalFilesHelper)); skyFunctions.put(SkyFunctions.FILE, new FileFunction(pkgLocator, directories)); skyFunctions.put(SkyFunctions.DIRECTORY_LISTING, new DirectoryListingFunction()); skyFunctions.put( SkyFunctions.DIRECTORY_LISTING_STATE, new DirectoryListingStateFunction(externalFilesHelper, SyscallCache. [MASK] )); skyFunctions.put( SkyFunctions.RECURSIVE_FILESYSTEM_TRAVERSAL, new RecursiveFilesystemTraversalFunction(SyscallCache. [MASK] )); skyFunctions.put( SkyFunctions.PACKAGE_LOOKUP, new PackageLookupFunction( new AtomicReference<>(ImmutableSet.of()), CrossRepositoryLabelViolationStrategy.ERROR, BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY, BazelSkyframeExecutorConstants.EXTERNAL_PACKAGE_HELPER)); skyFunctions.put(SkyFunctions.IGNORED_SUBDIRECTORIES, IgnoredSubdirectoriesFunction.NOOP); skyFunctions.put( SkyFunctions.FILESET_ENTRY, new FilesetEntryFunction((unused) -> rootDirectory)); skyFunctions.put(SkyFunctions.WORKSPACE_NAME, new TestWorkspaceNameFunction()); skyFunctions.put( SkyFunctions.LOCAL_REPOSITORY_LOOKUP, new LocalRepositoryLookupFunction(BazelSkyframeExecutorConstants.EXTERNAL_PACKAGE_HELPER)); differencer = new SequencedRecordingDifferencer(); evaluator = new InMemoryMemoizingEvaluator(skyFunctions, differencer); PrecomputedValue.BUILD_ID.set(differencer, UUID.randomUUID()); PrecomputedValue.PATH_PACKAGE_LOCATOR.set(differencer, pkgLocator.get()); PrecomputedValue.STARLARK_SEMANTICS.set(differencer, StarlarkSemantics.DEFAULT); } private Artifact getSourceArtifact(String path) { return ActionsTestUtil.createArtifact( ArtifactRoot.asSourceRoot(Root.fromPath(rootDirectory)), path); } private Artifact createSourceArtifact(String path) throws Exception { Artifact result = getSourceArtifact(path); createFile(result, "foo"); return result; } private static RootedPath childOf(Artifact artifact, String relative) { return RootedPath.toRootedPath( artifact.getRoot().getRoot(), artifact.getRootRelativePath().getRelative(relative)); } private static RootedPath siblingOf(Artifact artifact, String relative) { PathFragment parent = Preconditions.checkNotNull(artifact.getRootRelativePath().getParentDirectory()); return RootedPath.toRootedPath(artifact.getRoot().getRoot(), parent.getRelative(relative)); } private void createFile(Path path, String... contents) throws Exception { if (!path.getParentDirectory().exists()) { scratch.dir(path.getParentDirectory().getPathString()); } scratch.file(path.getPathString(), contents); } private void createFile(Artifact artifact, String... contents) throws Exception { createFile(artifact.getPath(), contents); } private RootedPath createFile(RootedPath path, String... contents) throws Exception { createFile(path.asPath(), contents); return path; } private <T extends SkyValue> EvaluationResult<T> eval(SkyKey key) throws Exception { EvaluationContext evaluationContext = EvaluationContext.newBuilder() .setKeepGoing(false) .setParallelism(SkyframeExecutor.DEFAULT_THREAD_COUNT) .setEventHandler(NullEventHandler.INSTANCE) .build(); return evaluator.evaluate(ImmutableList.of(key), evaluationContext); } private FilesetEntryValue evalFilesetTraversal(FilesetTraversalParams params) throws Exception { SkyKey key = FilesetEntryKey.key(params); EvaluationResult<FilesetEntryValue> result = eval(key); assertThat(result.hasError()).isFalse(); return result.get(key); } private FilesetOutputSymlink symlink(String from, Artifact to) { return symlink(PathFragment.create(from), to.getPath().asFragment()); } private FilesetOutputSymlink symlink(String from, RootedPath to) { return symlink(PathFragment.create(from), to.asPath().asFragment()); } private FilesetOutputSymlink symlink(PathFragment from, PathFragment to) { return FilesetOutputSymlink.createForTesting(from, to, rootDirectory.asFragment()); } private void assertSymlinksCreatedInOrder( FilesetTraversalParams request, FilesetOutputSymlink... expectedSymlinks) throws Exception { Collection<FilesetOutputSymlink> actual = Collections2.transform( evalFilesetTraversal(request).getSymlinks(), // Strip the metadata from the actual results. (input) -> FilesetOutputSymlink.createAlreadyRelativizedForTesting( input.name(), input.targetPath(), input.relativeToExecRoot())); assertThat(actual).containsExactlyElementsIn(expectedSymlinks).inOrder(); } private static Label label(String label) throws Exception { return Label.parseCanonical(label); } @Test public void testFileTraversalForFile() throws Exception { Artifact file = createSourceArtifact("foo/file.real"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ file, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params, symlink("output-name", file)); } @Test public void testFileTraversalForDirectory() throws Exception { Artifact dir = getSourceArtifact("foo/dir_real"); RootedPath fileA = createFile(childOf(dir, "file.a"), "hello"); RootedPath fileB = createFile(childOf(dir, "sub/file.b"), "world"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ dir, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ true); assertSymlinksCreatedInOrder( params, symlink("output-name/file.a", fileA), symlink("output-name/sub/file.b", fileB)); } @Test public void testFileTraversalForDisallowedDirectoryThrows() throws Exception { Artifact dir = getSourceArtifact("foo/dir_real"); createFile(childOf(dir, "file.a"), "hello"); createFile(childOf(dir, "sub/file.b"), "world"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ dir, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); SkyKey key = FilesetEntryKey.key(params); EvaluationResult<FilesetEntryValue> result = eval(key); assertThat(result.hasError()).isTrue(); assertThat(result.getError(key).getException()) .hasMessageThat() .contains("foo contains a directory"); } @Test public void testFileTraversalForDanglingSymlink() throws Exception { Artifact linkName = getSourceArtifact("foo/dangling.sym"); RootedPath linkTarget = createFile(siblingOf(linkName, "target.file"), "blah"); linkName.getPath().createSymbolicLink(PathFragment.create("target.file")); linkTarget.asPath().delete(); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ linkName, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params); // expect empty results } @Test public void testFileTraversalForNonExistentFile() throws Exception { Artifact path = getSourceArtifact("foo/non-existent"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ path, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params); // expect empty results } /** * Tests that the fingerprint is a function of all arguments of the factory method. * * <p>Implementations must provide: * <ul> * <li>two different values (a domain) for each argument of the factory method and whether or not * it is expected to influence the fingerprint * <li>a way to instantiate {@link FilesetTraversalParams} with a given set of arguments from the * specified domains * </ul> * * <p>The tests will instantiate pairs of {@link FilesetTraversalParams} objects with only a given * attribute differing, and observe whether the fingerprints differ (if they are expected to) or * are the same (otherwise). */ private abstract static class FingerprintTester { private final Map<String, Domain> domains; FingerprintTester(Map<String, Domain> domains) { this.domains = domains; } abstract FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception; private Map<String, ?> getDefaultArgs() { return getKwArgs(null); } private Map<String, ?> getKwArgs(@Nullable String useAlternateFor) { Map<String, Object> values = new HashMap<>(); for (Map.Entry<String, Domain> d : domains.entrySet()) { values.put( d.getKey(), d.getKey().equals(useAlternateFor) ? d.getValue().valueA : d.getValue().valueB); } return values; } public void doTest() throws Exception { Fingerprint fp = new Fingerprint(); create(getDefaultArgs()).fingerprint(fp); String primary = fp.hexDigestAndReset(); for (String argName : domains.keySet()) { create(getKwArgs(argName)).fingerprint(fp); String secondary = fp.hexDigestAndReset(); if (domains.get(argName).includedInFingerprint) { assertWithMessage( "Argument '" + argName + "' was expected to be included in the" + " fingerprint, but wasn't") .that(primary) .isNotEqualTo(secondary); } else { assertWithMessage( "Argument '" + argName + "' was expected not to be included in the" + " fingerprint, but was") .that(primary) .isEqualTo(secondary); } } } } private static final class Domain { boolean includedInFingerprint; Object valueA; Object valueB; Domain(boolean includedInFingerprint, Object valueA, Object valueB) { this.includedInFingerprint = includedInFingerprint; this.valueA = valueA; this.valueB = valueB; } } private static Domain partOfFingerprint(Object valueA, Object valueB) { return new Domain(true, valueA, valueB); } private static Domain notPartOfFingerprint(Object valueB) { return new Domain(false, "//foo", valueB); } @Test public void testFingerprintOfFileTraversal() throws Exception { new FingerprintTester( ImmutableMap.<String, Domain>builder() .put("ownerLabel", notPartOfFingerprint("//bar")) .put("fileToTraverse", partOfFingerprint("foo/file.a", "bar/file.b")) .put("destPath", partOfFingerprint("out1", "out2")) .put("strictFilesetOutput", partOfFingerprint(true, false)) .put("permitDirectories", partOfFingerprint(true, false)) .buildOrThrow()) { @Override FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception { return FilesetTraversalParamsFactory.fileTraversal( label((String) kwArgs.get("ownerLabel")), getSourceArtifact((String) kwArgs.get("fileToTraverse")), PathFragment.create((String) kwArgs.get("destPath")), (Boolean) kwArgs.get("strictFilesetOutput"), (Boolean) kwArgs.get("permitDirectories")); } }.doTest(); } @Test public void testFingerprintOfNestedTraversal() throws Exception { Artifact nested1 = getSourceArtifact("a/b"); Artifact nested2 = getSourceArtifact("a/c"); new FingerprintTester( ImmutableMap.of( "ownerLabel", notPartOfFingerprint("//bar"), "nestedArtifact", partOfFingerprint(nested1, nested2), "destDir", partOfFingerprint("out1", "out2"), "excludes", partOfFingerprint(ImmutableSet.<String>of(), ImmutableSet.of("x")))) { @SuppressWarnings("unchecked") @Override FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception { return FilesetTraversalParamsFactory.nestedTraversal( label((String) kwArgs.get("ownerLabel")), (Artifact) kwArgs.get("nestedArtifact"), PathFragment.create((String) kwArgs.get("destDir")), (Set<String>) kwArgs.get("excludes")); } }.doTest(); } private static class TestWorkspaceNameFunction implements SkyFunction { @Nullable @Override public SkyValue compute(SkyKey skyKey, Environment env) throws SkyFunctionException, InterruptedException { return WorkspaceNameValue.withName("workspace"); } } }
NO_CACHE
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com. [MASK] .presto.hudi; import com. [MASK] .presto.common.type.TypeManager; import com. [MASK] .presto.hive.HdfsEnvironment; import com. [MASK] .presto.hive.MetastoreClientConfig; import com. [MASK] .presto.hive.metastore.ExtendedHiveMetastore; import com. [MASK] .presto.hive.metastore.InMemoryCachingHiveMetastore; import com. [MASK] .presto.spi.connector.ConnectorMetadata; import javax.inject.Inject; import static java.util.Objects.requireNonNull; public class HudiMetadataFactory { private final ExtendedHiveMetastore metastore; private final HdfsEnvironment hdfsEnvironment; private final TypeManager typeManager; private final long perTransactionCacheMaximumSize; private final boolean metastoreImpersonationEnabled; private final int metastorePartitionCacheMaxColumnCount; @Inject public HudiMetadataFactory( ExtendedHiveMetastore metastore, HdfsEnvironment hdfsEnvironment, TypeManager typeManager, MetastoreClientConfig metastoreClientConfig) { this.metastore = requireNonNull(metastore, "metastore is null"); this.hdfsEnvironment = requireNonNull(hdfsEnvironment, "hdfsEnvironment is null"); this.typeManager = requireNonNull(typeManager, "typeManager is null"); this.perTransactionCacheMaximumSize = metastoreClientConfig.getPerTransactionMetastoreCacheMaximumSize(); this.metastoreImpersonationEnabled = metastoreClientConfig.isMetastoreImpersonationEnabled(); this.metastorePartitionCacheMaxColumnCount = metastoreClientConfig.getPartitionCacheColumnCountLimit(); } public ConnectorMetadata create() { return new HudiMetadata( InMemoryCachingHiveMetastore.memoizeMetastore(metastore, metastoreImpersonationEnabled, perTransactionCacheMaximumSize, metastorePartitionCacheMaxColumnCount), hdfsEnvironment, typeManager); } }
facebook
// Copyright 2015 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.skyframe; import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertWithMessage; import com.google.common.base.Preconditions; import com.google.common.base.Suppliers; import com.google.common.collect.Collections2; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.ArtifactRoot; import com.google.devtools.build.lib.actions.FilesetOutputSymlink; import com.google.devtools.build.lib.actions.FilesetTraversalParams; import com.google.devtools.build.lib.actions.FilesetTraversalParamsFactory; import com.google.devtools.build.lib.actions.util.ActionsTestUtil; import com.google.devtools.build.lib.analysis.BlazeDirectories; import com.google.devtools.build.lib.analysis.ServerDirectories; import com.google.devtools.build.lib.clock.BlazeClock; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.events.NullEventHandler; import com.google.devtools.build.lib.pkgcache.PathPackageLocator; import com.google.devtools.build.lib.skyframe.ExternalFilesHelper.ExternalFileAction; import com.google.devtools.build.lib.skyframe.PackageLookupFunction.CrossRepositoryLabelViolationStrategy; import com.google.devtools.build.lib.testutil.FoundationTestCase; import com.google.devtools.build.lib.testutil.TestConstants; import com.google.devtools.build.lib.util.Fingerprint; import com.google.devtools.build.lib.util.io.TimestampGranularityMonitor; import com.google.devtools.build.lib.vfs.FileStateKey; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.build.lib.vfs.Root; import com.google.devtools.build.lib.vfs.RootedPath; import com.google.devtools.build.lib.vfs.SyscallCache; import com.google.devtools.build.skyframe.EvaluationContext; import com.google.devtools.build.skyframe.EvaluationResult; import com.google.devtools.build.skyframe.InMemoryMemoizingEvaluator; import com.google.devtools.build.skyframe.MemoizingEvaluator; import com.google.devtools.build.skyframe.RecordingDifferencer; import com.google.devtools.build.skyframe.SequencedRecordingDifferencer; import com.google.devtools.build.skyframe.SkyFunction; import com.google.devtools.build.skyframe.SkyFunctionException; import com.google.devtools.build.skyframe.SkyFunctionName; import com.google.devtools.build.skyframe.SkyKey; import com.google.devtools.build.skyframe.SkyValue; import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.concurrent.atomic.AtomicReference; import javax.annotation.Nullable; import net.starlark.java.eval.StarlarkSemantics; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Tests for {@link FilesetEntryFunction}. */ @RunWith(JUnit4.class) public final class FilesetEntryFunctionTest extends FoundationTestCase { private MemoizingEvaluator evaluator; private RecordingDifferencer differencer; @Before public void setUp() throws Exception { AtomicReference<PathPackageLocator> pkgLocator = new AtomicReference<>( new PathPackageLocator( outputBase, ImmutableList.of(Root.fromPath(rootDirectory)), BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY)); BlazeDirectories directories = new BlazeDirectories( new ServerDirectories(outputBase, outputBase, outputBase), rootDirectory, /* defaultSystemJavabase= */ null, TestConstants.PRODUCT_NAME); ExternalFilesHelper externalFilesHelper = ExternalFilesHelper.createForTesting( pkgLocator, ExternalFileAction.DEPEND_ON_EXTERNAL_PKG_FOR_EXTERNAL_REPO_PATHS, directories); Map<SkyFunctionName, SkyFunction> [MASK] = new HashMap<>(); [MASK] .put( FileStateKey.FILE_STATE, new FileStateFunction( Suppliers.ofInstance(new TimestampGranularityMonitor(BlazeClock.instance())), SyscallCache.NO_CACHE, externalFilesHelper)); [MASK] .put(SkyFunctions.FILE, new FileFunction(pkgLocator, directories)); [MASK] .put(SkyFunctions.DIRECTORY_LISTING, new DirectoryListingFunction()); [MASK] .put( SkyFunctions.DIRECTORY_LISTING_STATE, new DirectoryListingStateFunction(externalFilesHelper, SyscallCache.NO_CACHE)); [MASK] .put( SkyFunctions.RECURSIVE_FILESYSTEM_TRAVERSAL, new RecursiveFilesystemTraversalFunction(SyscallCache.NO_CACHE)); [MASK] .put( SkyFunctions.PACKAGE_LOOKUP, new PackageLookupFunction( new AtomicReference<>(ImmutableSet.of()), CrossRepositoryLabelViolationStrategy.ERROR, BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY, BazelSkyframeExecutorConstants.EXTERNAL_PACKAGE_HELPER)); [MASK] .put(SkyFunctions.IGNORED_SUBDIRECTORIES, IgnoredSubdirectoriesFunction.NOOP); [MASK] .put( SkyFunctions.FILESET_ENTRY, new FilesetEntryFunction((unused) -> rootDirectory)); [MASK] .put(SkyFunctions.WORKSPACE_NAME, new TestWorkspaceNameFunction()); [MASK] .put( SkyFunctions.LOCAL_REPOSITORY_LOOKUP, new LocalRepositoryLookupFunction(BazelSkyframeExecutorConstants.EXTERNAL_PACKAGE_HELPER)); differencer = new SequencedRecordingDifferencer(); evaluator = new InMemoryMemoizingEvaluator( [MASK] , differencer); PrecomputedValue.BUILD_ID.set(differencer, UUID.randomUUID()); PrecomputedValue.PATH_PACKAGE_LOCATOR.set(differencer, pkgLocator.get()); PrecomputedValue.STARLARK_SEMANTICS.set(differencer, StarlarkSemantics.DEFAULT); } private Artifact getSourceArtifact(String path) { return ActionsTestUtil.createArtifact( ArtifactRoot.asSourceRoot(Root.fromPath(rootDirectory)), path); } private Artifact createSourceArtifact(String path) throws Exception { Artifact result = getSourceArtifact(path); createFile(result, "foo"); return result; } private static RootedPath childOf(Artifact artifact, String relative) { return RootedPath.toRootedPath( artifact.getRoot().getRoot(), artifact.getRootRelativePath().getRelative(relative)); } private static RootedPath siblingOf(Artifact artifact, String relative) { PathFragment parent = Preconditions.checkNotNull(artifact.getRootRelativePath().getParentDirectory()); return RootedPath.toRootedPath(artifact.getRoot().getRoot(), parent.getRelative(relative)); } private void createFile(Path path, String... contents) throws Exception { if (!path.getParentDirectory().exists()) { scratch.dir(path.getParentDirectory().getPathString()); } scratch.file(path.getPathString(), contents); } private void createFile(Artifact artifact, String... contents) throws Exception { createFile(artifact.getPath(), contents); } private RootedPath createFile(RootedPath path, String... contents) throws Exception { createFile(path.asPath(), contents); return path; } private <T extends SkyValue> EvaluationResult<T> eval(SkyKey key) throws Exception { EvaluationContext evaluationContext = EvaluationContext.newBuilder() .setKeepGoing(false) .setParallelism(SkyframeExecutor.DEFAULT_THREAD_COUNT) .setEventHandler(NullEventHandler.INSTANCE) .build(); return evaluator.evaluate(ImmutableList.of(key), evaluationContext); } private FilesetEntryValue evalFilesetTraversal(FilesetTraversalParams params) throws Exception { SkyKey key = FilesetEntryKey.key(params); EvaluationResult<FilesetEntryValue> result = eval(key); assertThat(result.hasError()).isFalse(); return result.get(key); } private FilesetOutputSymlink symlink(String from, Artifact to) { return symlink(PathFragment.create(from), to.getPath().asFragment()); } private FilesetOutputSymlink symlink(String from, RootedPath to) { return symlink(PathFragment.create(from), to.asPath().asFragment()); } private FilesetOutputSymlink symlink(PathFragment from, PathFragment to) { return FilesetOutputSymlink.createForTesting(from, to, rootDirectory.asFragment()); } private void assertSymlinksCreatedInOrder( FilesetTraversalParams request, FilesetOutputSymlink... expectedSymlinks) throws Exception { Collection<FilesetOutputSymlink> actual = Collections2.transform( evalFilesetTraversal(request).getSymlinks(), // Strip the metadata from the actual results. (input) -> FilesetOutputSymlink.createAlreadyRelativizedForTesting( input.name(), input.targetPath(), input.relativeToExecRoot())); assertThat(actual).containsExactlyElementsIn(expectedSymlinks).inOrder(); } private static Label label(String label) throws Exception { return Label.parseCanonical(label); } @Test public void testFileTraversalForFile() throws Exception { Artifact file = createSourceArtifact("foo/file.real"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ file, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params, symlink("output-name", file)); } @Test public void testFileTraversalForDirectory() throws Exception { Artifact dir = getSourceArtifact("foo/dir_real"); RootedPath fileA = createFile(childOf(dir, "file.a"), "hello"); RootedPath fileB = createFile(childOf(dir, "sub/file.b"), "world"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ dir, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ true); assertSymlinksCreatedInOrder( params, symlink("output-name/file.a", fileA), symlink("output-name/sub/file.b", fileB)); } @Test public void testFileTraversalForDisallowedDirectoryThrows() throws Exception { Artifact dir = getSourceArtifact("foo/dir_real"); createFile(childOf(dir, "file.a"), "hello"); createFile(childOf(dir, "sub/file.b"), "world"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ dir, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); SkyKey key = FilesetEntryKey.key(params); EvaluationResult<FilesetEntryValue> result = eval(key); assertThat(result.hasError()).isTrue(); assertThat(result.getError(key).getException()) .hasMessageThat() .contains("foo contains a directory"); } @Test public void testFileTraversalForDanglingSymlink() throws Exception { Artifact linkName = getSourceArtifact("foo/dangling.sym"); RootedPath linkTarget = createFile(siblingOf(linkName, "target.file"), "blah"); linkName.getPath().createSymbolicLink(PathFragment.create("target.file")); linkTarget.asPath().delete(); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ linkName, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params); // expect empty results } @Test public void testFileTraversalForNonExistentFile() throws Exception { Artifact path = getSourceArtifact("foo/non-existent"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ path, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params); // expect empty results } /** * Tests that the fingerprint is a function of all arguments of the factory method. * * <p>Implementations must provide: * <ul> * <li>two different values (a domain) for each argument of the factory method and whether or not * it is expected to influence the fingerprint * <li>a way to instantiate {@link FilesetTraversalParams} with a given set of arguments from the * specified domains * </ul> * * <p>The tests will instantiate pairs of {@link FilesetTraversalParams} objects with only a given * attribute differing, and observe whether the fingerprints differ (if they are expected to) or * are the same (otherwise). */ private abstract static class FingerprintTester { private final Map<String, Domain> domains; FingerprintTester(Map<String, Domain> domains) { this.domains = domains; } abstract FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception; private Map<String, ?> getDefaultArgs() { return getKwArgs(null); } private Map<String, ?> getKwArgs(@Nullable String useAlternateFor) { Map<String, Object> values = new HashMap<>(); for (Map.Entry<String, Domain> d : domains.entrySet()) { values.put( d.getKey(), d.getKey().equals(useAlternateFor) ? d.getValue().valueA : d.getValue().valueB); } return values; } public void doTest() throws Exception { Fingerprint fp = new Fingerprint(); create(getDefaultArgs()).fingerprint(fp); String primary = fp.hexDigestAndReset(); for (String argName : domains.keySet()) { create(getKwArgs(argName)).fingerprint(fp); String secondary = fp.hexDigestAndReset(); if (domains.get(argName).includedInFingerprint) { assertWithMessage( "Argument '" + argName + "' was expected to be included in the" + " fingerprint, but wasn't") .that(primary) .isNotEqualTo(secondary); } else { assertWithMessage( "Argument '" + argName + "' was expected not to be included in the" + " fingerprint, but was") .that(primary) .isEqualTo(secondary); } } } } private static final class Domain { boolean includedInFingerprint; Object valueA; Object valueB; Domain(boolean includedInFingerprint, Object valueA, Object valueB) { this.includedInFingerprint = includedInFingerprint; this.valueA = valueA; this.valueB = valueB; } } private static Domain partOfFingerprint(Object valueA, Object valueB) { return new Domain(true, valueA, valueB); } private static Domain notPartOfFingerprint(Object valueB) { return new Domain(false, "//foo", valueB); } @Test public void testFingerprintOfFileTraversal() throws Exception { new FingerprintTester( ImmutableMap.<String, Domain>builder() .put("ownerLabel", notPartOfFingerprint("//bar")) .put("fileToTraverse", partOfFingerprint("foo/file.a", "bar/file.b")) .put("destPath", partOfFingerprint("out1", "out2")) .put("strictFilesetOutput", partOfFingerprint(true, false)) .put("permitDirectories", partOfFingerprint(true, false)) .buildOrThrow()) { @Override FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception { return FilesetTraversalParamsFactory.fileTraversal( label((String) kwArgs.get("ownerLabel")), getSourceArtifact((String) kwArgs.get("fileToTraverse")), PathFragment.create((String) kwArgs.get("destPath")), (Boolean) kwArgs.get("strictFilesetOutput"), (Boolean) kwArgs.get("permitDirectories")); } }.doTest(); } @Test public void testFingerprintOfNestedTraversal() throws Exception { Artifact nested1 = getSourceArtifact("a/b"); Artifact nested2 = getSourceArtifact("a/c"); new FingerprintTester( ImmutableMap.of( "ownerLabel", notPartOfFingerprint("//bar"), "nestedArtifact", partOfFingerprint(nested1, nested2), "destDir", partOfFingerprint("out1", "out2"), "excludes", partOfFingerprint(ImmutableSet.<String>of(), ImmutableSet.of("x")))) { @SuppressWarnings("unchecked") @Override FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception { return FilesetTraversalParamsFactory.nestedTraversal( label((String) kwArgs.get("ownerLabel")), (Artifact) kwArgs.get("nestedArtifact"), PathFragment.create((String) kwArgs.get("destDir")), (Set<String>) kwArgs.get("excludes")); } }.doTest(); } private static class TestWorkspaceNameFunction implements SkyFunction { @Nullable @Override public SkyValue compute(SkyKey skyKey, Environment env) throws SkyFunctionException, InterruptedException { return WorkspaceNameValue.withName("workspace"); } } }
skyFunctions
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.hudi; import com.facebook.presto.common.type.TypeManager; import com.facebook.presto.hive.HdfsEnvironment; import com.facebook.presto.hive.MetastoreClientConfig; import com.facebook.presto.hive.metastore.ExtendedHiveMetastore; import com.facebook.presto.hive.metastore.InMemoryCachingHiveMetastore; import com.facebook.presto.spi.connector.ConnectorMetadata; import javax.inject.Inject; import static java.util.Objects.requireNonNull; public class HudiMetadataFactory { private final ExtendedHiveMetastore metastore; private final HdfsEnvironment hdfsEnvironment; private final TypeManager typeManager; private final long [MASK] ; private final boolean metastoreImpersonationEnabled; private final int metastorePartitionCacheMaxColumnCount; @Inject public HudiMetadataFactory( ExtendedHiveMetastore metastore, HdfsEnvironment hdfsEnvironment, TypeManager typeManager, MetastoreClientConfig metastoreClientConfig) { this.metastore = requireNonNull(metastore, "metastore is null"); this.hdfsEnvironment = requireNonNull(hdfsEnvironment, "hdfsEnvironment is null"); this.typeManager = requireNonNull(typeManager, "typeManager is null"); this. [MASK] = metastoreClientConfig.getPerTransactionMetastoreCacheMaximumSize(); this.metastoreImpersonationEnabled = metastoreClientConfig.isMetastoreImpersonationEnabled(); this.metastorePartitionCacheMaxColumnCount = metastoreClientConfig.getPartitionCacheColumnCountLimit(); } public ConnectorMetadata create() { return new HudiMetadata( InMemoryCachingHiveMetastore.memoizeMetastore(metastore, metastoreImpersonationEnabled, [MASK] , metastorePartitionCacheMaxColumnCount), hdfsEnvironment, typeManager); } }
perTransactionCacheMaximumSize
/* * DBeaver - Universal Database Manager * Copyright (C) 2010-2024 DBeaver Corp and others * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jkiss.dbeaver.model.sql; /** * SQL editor constants */ public class SQLConstants { public static final String NULL_VALUE = "NULL"; public static final String STR_QUOTE_SINGLE = "'"; public static final String STR_QUOTE_DOUBLE = "\""; public static final String[][] DOUBLE_QUOTE_STRINGS = {{ STR_QUOTE_DOUBLE, STR_QUOTE_DOUBLE }}; public static final String DEFAULT_STATEMENT_DELIMITER = ";"; public static final String[] DEFAULT_SCRIPT_DELIMITER = { DEFAULT_STATEMENT_DELIMITER }; public static final String STR_QUOTE_APOS = "`"; public static final String ML_COMMENT_START = "/*"; public static final String ML_COMMENT_END = "*/"; public static final String SL_COMMENT = "--"; public static final String ASTERISK = "*"; public static final String QUESTION = "?"; public static final String DOT = "."; public static final String KEYWORD_SELECT = "SELECT"; public static final String KEYWORD_INSERT = "INSERT"; public static final String KEYWORD_UPDATE = "UPDATE"; public static final String KEYWORD_DELETE = "DELETE"; public static final String KEYWORD_MERGE = "MERGE"; public static final String KEYWORD_UPSERT = "UPSERT"; public static final String KEYWORD_TRUNCATE = "TRUNCATE"; public static final String KEYWORD_FROM = "FROM"; public static final String KEYWORD_INTO = "INTO"; public static final String KEYWORD_JOIN = "JOIN"; public static final String KEYWORD_CROSS_JOIN = "CROSS JOIN"; public static final String KEYWORD_NATURAL_JOIN = "NATURAL JOIN"; public static final String [MASK] = "WHERE"; public static final String KEYWORD_SET = "SET"; public static final String KEYWORD_ON = "ON"; public static final String KEYWORD_AND = "AND"; public static final String KEYWORD_OR = "OR"; public static final String KEYWORD_BETWEEN = "BETWEEN"; public static final String KEYWORD_IS = "IS"; public static final String KEYWORD_NOT = "NOT"; public static final String KEYWORD_NULL = "NULL"; public static final String KEYWORD_IN = "IN"; public static final String KEYWORD_VALUES = "VALUES"; public static final String KEYWORD_ORDER_BY = "ORDER BY"; public static final String KEYWORD_GROUP_BY = "GROUP BY"; public static final String KEYWORD_HAVING = "HAVING"; public static final String KEYWORD_LIKE = "LIKE"; public static final String KEYWORD_ILIKE = "ILIKE"; public static final String KEYWORD_FUNCTION = "FUNCTION"; public static final String KEYWORD_PROCEDURE = "PROCEDURE"; public static final String KEYWORD_COMMIT = "COMMIT"; public static final String KEYWORD_ROLLBACK = "ROLLBACK"; public static final String KEYWORD_EXPLAIN = "EXPLAIN"; public static final String KEYWORD_CASE = "CASE"; public static final String KEYWORD_QUALIFY = "QUALIFY"; public static final String KEYWORD_AS = "AS"; public static final String KEYWORD_USING = "USING"; public static final String DATA_TYPE_VARCHAR = "varchar"; public static final String DATA_TYPE_BIGINT = "BIGINT"; public static final String DATA_TYPE_BINARY = "BINARY"; public static final String DATA_TYPE_BOOLEAN = "BOOLEAN"; public static final String DATA_TYPE_DOUBLE = "DOUBLE"; public static final String DATA_TYPE_FLOAT = "FLOAT"; public static final String DATA_TYPE_INT = "INT"; public static final String DATA_TYPE_SMALLINT = "SMALLINT"; public static final String DATA_TYPE_STRING = "STRING"; public static final String DATA_TYPE_TINYINT = "TINYINT"; public static final String[] QUERY_KEYWORDS = { KEYWORD_SELECT, KEYWORD_INSERT, KEYWORD_UPDATE, KEYWORD_DELETE, KEYWORD_MERGE, KEYWORD_UPSERT, KEYWORD_TRUNCATE }; public static final String[] TABLE_KEYWORDS = { KEYWORD_FROM, KEYWORD_INSERT, KEYWORD_UPDATE, KEYWORD_DELETE, KEYWORD_INTO, "TABLE", "VIEW", KEYWORD_JOIN, KEYWORD_TRUNCATE, KEYWORD_MERGE, }; public static final String[] COLUMN_KEYWORDS = { KEYWORD_SELECT, [MASK] , KEYWORD_SET, KEYWORD_ON, KEYWORD_AND, KEYWORD_OR, "BY", "HAVING" }; public static final String[] DDL_KEYWORDS = { "CREATE", "ALTER", "DROP", }; public static final String[] SQL2003_RESERVED_KEYWORDS = { "ALL", "ALLOCATE", "ALTER", KEYWORD_AND, "ANY", "ARE", "ARRAY", "AS", "ASENSITIVE", "ASYMMETRIC", "AT", "ATOMIC", "AUTHORIZATION", "BEGIN", KEYWORD_BETWEEN, //"BIGINT", DATA_TYPE_BINARY, "BOTH", "BY", "CALL", "CALLED", "CARDINALITY", "CASCADE", "CASCADED", KEYWORD_CASE, "CAST", "CEIL", "CEILING", "CHARACTER", "CHECK", "CLOSE", "COALESCE", "COLLATE", "COLLECT", "COLUMN", KEYWORD_COMMIT, "CONDITION", "CONNECT", "CONSTRAINT", "CONVERT", "CORR", "CORRESPONDING", "COVAR_POP", "COVAR_SAMP", "CREATE", "CROSS", "CUBE", "CUME_DIST", "CURRENT", "CURSOR", "CYCLE", "DAY", "DEALLOCATE", "DEC", "DECLARE", "DEFAULT", KEYWORD_DELETE, "DENSE_RANK", "DEREF", "DESCRIBE", "DETERMINISTIC", "DISCONNECT", "DISTINCT", "DROP", "DYNAMIC", "EACH", "ELEMENT", "ELSE", "END", "END-EXEC", "ESCAPE", "EVERY", "EXCEPT", "EXEC", "EXECUTE", "EXISTS", "EXP", "EXTERNAL", "EXTRACT", "FALSE", "FETCH", "FILTER", "FOR", "FOREIGN", "FREE", "FROM", "FULL", KEYWORD_FUNCTION, "FUSION", "GET", "GLOBAL", "GRANT", "GROUP", "GROUPING", "HAVING", "HOLD", "HOUR", "IDENTITY", "IF", KEYWORD_IN, "INDEX", "INDICATOR", "INNER", "INOUT", "INSENSITIVE", KEYWORD_INSERT, "INTERSECT", "INTERSECTION", "INTERVAL", "INTO", KEYWORD_IS, "JOIN", "LANGUAGE", "LARGE", "LATERAL", "LEADING", "LEFT", "LIKE", "LN", "LOCAL", "MATCH", "MEMBER", KEYWORD_MERGE, "METHOD", "MINUTE", "MOD", "MODIFIES", // "MODULE", // too common for column names "MONTH", "MULTISET", "NATIONAL", "NATURAL", //"NCHAR", //"NCLOB", "NEW", "NO", "NONE", "NORMALIZE", KEYWORD_NOT, KEYWORD_NULL, "NULLIF", "NUMERIC", "OF", "OLD", KEYWORD_ON, "ONLY", "OPEN", "OR", "ORDER", "OUT", "OUTER", "OVER", "OVERLAPS", "OVERLAY", "PARAMETER", "PARTITION", "POSITION", "PRECISION", "PREPARE", "PRIMARY", KEYWORD_PROCEDURE, "RANGE", "RANK", "READS", "REAL", "RECURSIVE", "REF", "REFERENCES", "REFERENCING", "RELEASE", "RENAME", "RESULT", "RETURN", "RETURNS", "REVOKE", "RIGHT", KEYWORD_ROLLBACK, "ROLLUP", "ROW", "ROW_NUMBER", "ROWS", "SAVEPOINT", "SCOPE", "SCROLL", "SEARCH", "SECOND", KEYWORD_SELECT, "SENSITIVE", "SESSION_USER", KEYWORD_SET, "SIMILAR", "SMALLINT", "SOME", "SPECIFIC", "SPECIFICTYPE", "SQL", "SQLEXCEPTION", "SQLSTATE", "SQLWARNING", "START", "STATIC", // "STDDEV_POP", // "STDDEV_SAMP", "SUBMULTISET", "SYMMETRIC", "SYSTEM", "SYSTEM_USER", "TABLE", "TABLESAMPLE", "THEN", "TIMEZONE_HOUR", "TIMEZONE_MINUTE", "TO", "TRAILING", "TRANSLATE", "TRANSLATION", "TREAT", "TRIGGER", "TRUE", "UNION", "UNIQUE", "UNKNOWN", "UNNEST", KEYWORD_UPDATE, "USER", "USING", //"VALUE", // too common for column names KEYWORD_VALUES, // "VAR_POP", // "VAR_SAMP", //"VARCHAR", "VARYING", "WHEN", "WHENEVER", [MASK] , "WIDTH_BUCKET", "WINDOW", "WITH", "WITHIN", "WITHOUT", "YEAR", "NULLS", "FIRST", "LAST", "FOLLOWING", "PRECEDING", "UNBOUNDED", "LENGTH", "KEY", "LEVEL", "VIEW", "SEQUENCE", "SCHEMA", "ROLE", "RESTRICT", "ASC", "DESC", // Not actually standard but widely used "LIMIT", // Extended keywords // "A", "ABSOLUTE", "ACTION", // "ADA", "ADD", // "ADMIN", "AFTER", "ALWAYS", // "ASC", "ASSERTION", "ASSIGNMENT", "ATTRIBUTE", "ATTRIBUTES", "BEFORE", // "BERNOULLI", // "BREADTH", // "C", "CASCADE", "CATALOG", // "CATALOG_NAME", "CHAIN", // "CHARACTER_SET_CATALOG", // "CHARACTER_SET_NAME", // "CHARACTER_SET_SCHEMA", "CHARACTERISTICS", "CHARACTERS", // "CLASS_ORIGIN", // "COBOL", "COLLATION", // "COLLATION_CATALOG", // "COLLATION_NAME", // "COLLATION_SCHEMA", // "COLUMN_NAME", // "COMMAND_FUNCTION", // "COMMAND_FUNCTION_CODE", "COMMITTED", // "CONDITION_NUMBER", "CONNECTION", // "CONNECTION_NAME", // "CONSTRAINT_CATALOG", // "CONSTRAINT_NAME", // "CONSTRAINT_SCHEMA", "CONSTRAINTS", "CONSTRUCTOR", "CONTAINS", "CONTINUE", "CURSOR_NAME", "DATA", // "DATETIME_INTERVAL_CODE", // "DATETIME_INTERVAL_PRECISION", "DEFAULTS", "DEFERRABLE", "DEFERRED", "DEFINED", "DEFINER", "DEGREE", "DEPTH", "DERIVED", // "DESC", "DESCRIPTOR", "DIAGNOSTICS", "DISPATCH", "DOMAIN", // "DYNAMIC_FUNCTION", // "DYNAMIC_FUNCTION_CODE", "EQUALS", "EXCEPTION", "EXCLUDE", "EXCLUDING", "FINAL", "FIRST", // "FORTRAN", "FOUND", // "G", "GENERAL", "GENERATED", "GO", "GOTO", "GRANTED", "HIERARCHY", "IMMEDIATE", "IMPLEMENTATION", "INCLUDING", "INCREMENT", "INITIALLY", "INPUT", "INSTANCE", "INSTANTIABLE", "INVOKER", "ISOLATION", // "K", // "KEY_MEMBER", "KEY_TYPE", "LAST", "LOCATOR", // "M", "MAP", "MATCHED", "MAXVALUE", // "MESSAGE_LENGTH", // "MESSAGE_OCTET_LENGTH", // "MESSAGE_TEXT", "MINVALUE", "MORE", "MUMPS", // "NAME", // "NAMES", "NESTING", "NEXT", "NORMALIZED", // "NULLABLE", // "NULLS", // "NUMBER", "OBJECT", "OCTETS", "OPTION", "OPTIONS", "ORDERING", "ORDINALITY", "OTHERS", "OUTPUT", "OVERRIDING", "PAD", // "PARAMETER_MODE", // "PARAMETER_NAME", // "PARAMETER_ORDINAL_POSITION", // "PARAMETER_SPECIFIC_CATALOG", // "PARAMETER_SPECIFIC_NAME", // "PARAMETER_SPECIFIC_SCHEMA", "PARTIAL", // "PASCAL", "PATH", "PLACING", // "PLI", "PRESERVE", "PRIOR", "PRIVILEGES", // "PUBLIC", "READ", "RELATIVE", "REPEATABLE", "RESTART", // "RETURNED_CARDINALITY", // "RETURNED_LENGTH", // "RETURNED_OCTET_LENGTH", // "RETURNED_SQLSTATE", "ROUTINE", // "ROUTINE_CATALOG", // "ROUTINE_NAME", // "ROUTINE_SCHEMA", // "ROW_COUNT", "SCALE", // "SCHEMA_NAME", // "SCOPE_CATALOG", // "SCOPE_NAME", // "SCOPE_SCHEMA", "SECTION", "SECURITY", "SELF", "SERIALIZABLE", // "SERVER_NAME", "SESSION", "SETS", // "SIMPLE", "SIZE", "SOURCE", "SPACE", // "SPECIFIC_NAME", // "STATE", // too common for column names "STATEMENT", "STRUCTURE", "STYLE", // "SUBCLASS_ORIGIN", // "TABLE_NAME", "TEMPORARY", "TIES", // "TOP_LEVEL_COUNT", "TRANSACTION", // "TRANSACTION_ACTIVE", // "TRANSACTIONS_COMMITTED", // "TRANSACTIONS_ROLLED_BACK", "TRANSFORM", "TRANSFORMS", // "TRIGGER_CATALOG", // "TRIGGER_NAME", // "TRIGGER_SCHEMA", "TYPE", "UNCOMMITTED", "UNDER", "UNNAMED", "USAGE", // "USER_DEFINED_TYPE_CATALOG", // "USER_DEFINED_TYPE_CODE", // "USER_DEFINED_TYPE_NAME", // "USER_DEFINED_TYPE_SCHEMA", "WORK", "WRITE", "ZONE", KEYWORD_QUALIFY }; public static final String[] SQL2003_FUNCTIONS = { "ABS", "AVG", "CHAR_LENGTH", "CHARACTER_LENGTH", "COUNT", "LOCALTIME", "LOCALTIMESTAMP", // "CURRENT_DATE", // "CURRENT_DEFAULT_TRANSFORM_GROUP", // "CURRENT_PATH", // "CURRENT_ROLE", // "CURRENT_TIME", // "CURRENT_TIMESTAMP", // "CURRENT_TRANSFORM_GROUP_FOR_TYPE", // "CURRENT_USER", "FLOOR", "LOWER", "MAX", "MIN", "OCTET_LENGTH", "PERCENT_RANK", "PERCENTILE_CONT", "PERCENTILE_DISC", "POWER", "REGR_AVGX", "REGR_AVGY", "REGR_COUNT", "REGR_INTERCEPT", "REGR_R2", "REGR_SLOPE", "REGR_SXX", "REGR_SXY", "REGR_SYY", "SQRT", "SUBSTRING", "SUM", "TRIM", "UESCAPE", "UPPER", }; public static final String[] SQL_EX_KEYWORDS = { "CHANGE", "MODIFY", }; public static final String[] DEFAULT_TYPES = { DATA_TYPE_BOOLEAN, "CHAR", "VARCHAR", DATA_TYPE_BINARY, "VARBINARY", DATA_TYPE_INT, "INTEGER", DATA_TYPE_SMALLINT, DATA_TYPE_BIGINT, "NUMBER", "NUMERIC", "DECIMAL", DATA_TYPE_FLOAT, DATA_TYPE_DOUBLE, "DATE", "TIME", "TIMESTAMP", "CLOB", "BLOB", }; public static final String BLOCK_BEGIN = "BEGIN"; public static final String BLOCK_END = "END"; /** * Pseudo variables - these are not dynamic parameters */ public static final String[] PSEUDO_VARIABLES = { ":NEW", ":OLD", }; public static final char STRUCT_SEPARATOR = '.'; //$NON-NLS-1$ public static final String CONFIG_COLOR_KEYWORD = "org.jkiss.dbeaver.sql.editor.color.keyword.foreground"; public static final String CONFIG_COLOR_DATATYPE = "org.jkiss.dbeaver.sql.editor.color.datatype.foreground"; public static final String CONFIG_COLOR_FUNCTION = "org.jkiss.dbeaver.sql.editor.color.function.foreground"; public static final String CONFIG_COLOR_STRING = "org.jkiss.dbeaver.sql.editor.color.string.foreground"; public static final String CONFIG_COLOR_TABLE = "org.jkiss.dbeaver.sql.editor.color.table.foreground"; public static final String CONFIG_COLOR_TABLE_ALIAS = "org.jkiss.dbeaver.sql.editor.color.table.alias.foreground"; public static final String CONFIG_COLOR_COLUMN = "org.jkiss.dbeaver.sql.editor.color.column.foreground"; public static final String CONFIG_COLOR_COLUMN_DERIVED = "org.jkiss.dbeaver.sql.editor.color.column.derived.foreground"; public static final String CONFIG_COLOR_SCHEMA = "org.jkiss.dbeaver.sql.editor.color.schema.foreground"; public static final String CONFIG_COLOR_COMPOSITE_FIELD = "org.jkiss.dbeaver.sql.editor.color.composite.field.foreground"; public static final String CONFIG_COLOR_SQL_VARIABLE = "org.jkiss.dbeaver.sql.editor.color.sqlVariable.foreground"; public static final String CONFIG_COLOR_SEMANTIC_ERROR = "org.jkiss.dbeaver.sql.editor.color.semanticError.foreground"; public static final String CONFIG_COLOR_NUMBER = "org.jkiss.dbeaver.sql.editor.color.number.foreground"; public static final String CONFIG_COLOR_COMMENT = "org.jkiss.dbeaver.sql.editor.color.comment.foreground"; public static final String CONFIG_COLOR_DELIMITER = "org.jkiss.dbeaver.sql.editor.color.delimiter.foreground"; public static final String CONFIG_COLOR_PARAMETER = "org.jkiss.dbeaver.sql.editor.color.parameter.foreground"; public static final String CONFIG_COLOR_COMMAND = "org.jkiss.dbeaver.sql.editor.color.command.foreground"; public static final String CONFIG_COLOR_TEXT = "org.jkiss.dbeaver.sql.editor.color.text.foreground"; public static final String CONFIG_COLOR_BACKGROUND = "org.jkiss.dbeaver.sql.editor.color.text.background"; public static final String CONFIG_COLOR_DISABLED = "org.jkiss.dbeaver.sql.editor.color.disabled.background"; public static final char DEFAULT_PARAMETER_MARK = '?'; public static final char DEFAULT_PARAMETER_PREFIX = ':'; public static final String DEFAULT_IDENTIFIER_QUOTE = "\""; public static final String DEFAULT_LIKE_ESCAPE = "\\"; public static final String KEYWORD_PATTERN_CHARS = "\\*\\"; public static final String DEFAULT_CONTROL_COMMAND_PREFIX = "@"; public final static char[] BRACKETS = {'{', '}', '(', ')', '[', ']', '<', '>'}; public static final String COLUMN_ASTERISK = "*"; }
KEYWORD_WHERE
/* ### * IP: GHIDRA * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ghidra.util.datastruct; import java.io.Serializable; import java.util.Arrays; /** * The BitTree class maintains a set of ordered keys between the values of * 0 and N. It can quickly (O(log(n))) add keys, remove keys, find the next key * greater than some value , and find the prev key less than some value. It can * determine if a key is in the set in O(1) time. This implementation has been * limited to short keys so that it can implement the ShortKeySet interface. */ public class BitTree implements ShortKeySet, Serializable { private final static long serialVersionUID = 1; private int size; // The maximum number of keys in the set. Keys range from 0 to size-1 private int power2; // The next power of 2 that is greater than size. private int[] bits; // Array of bits used to represent a tree of binary values. A bit at // position N will have a left child at 2*N and a right child at 2*N+1. // Its parent position will be at N/2. // A bit is on if any bits in its subtree are on. Leaf bits correspond // directly to keys and are on if the key is in the set. private int numKeys; // The current number of keys in the set. // masks for seting and clearing bits within an 32 bit integer. private static final int[] setMask = { 0x00000001,0x00000002,0x00000004,0x00000008, 0x00000010,0x00000020,0x00000040,0x00000080, 0x00000100,0x00000200,0x00000400,0x00000800, 0x00001000,0x00002000,0x00004000,0x00008000, 0x00010000,0x00020000,0x00040000,0x00080000, 0x00100000,0x00200000,0x00400000,0x00800000, 0x01000000,0x02000000,0x04000000,0x08000000, 0x10000000,0x20000000,0x40000000,0x80000000 }; private static final int[] clearMask={ 0xfffffffe,0xfffffffd,0xfffffffb,0xfffffff7, 0xffffffef,0xffffffdf,0xffffffbf,0xffffff7f, 0xfffffeff,0xfffffdff,0xfffffbff,0xfffff7ff, 0xffffefff,0xffffdfff,0xffffbfff,0xffff7fff, 0xfffeffff,0xfffdffff,0xfffbffff,0xfff7ffff, 0xffefffff,0xffdfffff,0xffbfffff,0xff7fffff, 0xfeffffff,0xfdffffff,0xfbffffff,0xf7ffffff, 0xefffffff,0xdfffffff,0xbfffffff,0x7fffffff }; /** * The BitTree constructor takes the maximum key value. The legal * keys for this set range from 0 to maxKey. * @param maxKey the maximum key that will ever be put into this BitTree. */ public BitTree(short maxKey) { this(maxKey,false); } /** * The BitTree constructor takes the maximum key value. The legal * keys for this set range from 0 to maxKey. * @param maxKey the maximum key value. * @param isFull if true, then the set is initilized to contain all legal keys. */ public BitTree(short maxKey, boolean isFull) { this.size = maxKey+1; // find the next power of 2 greater than or equal to n. power2 = 2; int sz = maxKey+1; while (sz > 1) { sz /= 2; power2 *= 2; } // The number of bits need to store the tree is 2 times the number of keys. // Since we are storing the bits in 32 bit integers we need N/16 integers // to store the bits. int nInts = power2/16; // must have at least 1 if (nInts < 1) { nInts = 1; } bits = new int[nInts]; if (isFull) { Arrays.fill(bits,0xffffffff); numKeys = this.size; } } /** * Removes all keys from the set. */ public void removeAll() { Arrays.fill(bits,0); numKeys = 0; } /** * Returns the number of keys currently in the set. */ public int size() { return numKeys; } /** * Adds a key to the set. * @param key to be added. * @exception IndexOutOfBoundsException if the given key is not * in the range [0, size-1]. */ public void put(short key) { if ((key < 0) || (key >= size)) { throw new IndexOutOfBoundsException(); } // The first "power2" number of bits are used for internal tree nodes. The // leaf nodes start at index "power2". int nodeIndex = power2+key; // set the leaf bit on to indicate that the key is in the set. // if the bit is already on (The key is already in the set), then just return. if (!setBit(nodeIndex)) { return; } // increment the number of keys in the set. numKeys++; // go up the tree setting each parent bit to "on" while(nodeIndex != 1) { // compute parent index. nodeIndex /= 2; // if any parent bit is already on, then all its parents are already on, // so were done. if (!setBit(nodeIndex)) { return; } } } /** * Removes the key from the set. * @param key The key to remove. * @exception IndexOutOfBoundsException if the given key is not * in the range [0, size-1]. */ public boolean remove(short key) { if ((key < 0) || (key >= size)) { throw new IndexOutOfBoundsException(); } // compute the leaf node index. int nodeIndex = power2+key; // clear the leaf bit to indicate that the key is not in the set. // if it is already "off", then we don't have to do anything if (!clearBit(nodeIndex)) { return false; } // decrement the number of keys in the set numKeys--; // traverse up the tree, clearing any parent nodes if all its child // nodes are "off". while(nodeIndex != 1) { nodeIndex /= 2; if (!isBitSet(nodeIndex)) { return true; } if (isBitSet(nodeIndex*2) || isBitSet(nodeIndex*2+1)) { return true; } clearBit(nodeIndex); } return true; } /** * Determines if a given key is in the set. * @param key the key to check if it is in this set. * @return true if the key is in the set. */ public boolean containsKey(short key) { if ((key < 0) || (key >= size)) { return false; } return isBitSet(power2+key); } /** * finds the next key that is in the set that is greater than the given key. * @param key from which to search forward. * @return the next key greater than the given key or -1 if there is no key * greater than the given key. * @exception IndexOutOfBoundsException if the given key is not * in the range [0, size-1]. */ public short getNext(short key) { if ((key < 0) || (key >= size)) { throw new IndexOutOfBoundsException(); } // compute leaf node. int nodeIndex = key + power2; // while we are not at the root, search upward until we find a right // sibling that is "on". while(nodeIndex != 1) { // see if we are odd (i.e. the right child) int odd = nodeIndex % 2; // if we are the left child see if my sibling on the right is on. // if so, then the next key must be in that subtree. if (odd == 0) { if (isBitSet(nodeIndex+1)) { // we found a right sibling that is "on", set nodeIndex to // that node. nodeIndex++; break; } } nodeIndex = nodeIndex/2; } // if we made it all the way up to the root node, then there is no key // greater than, so return -1; if (nodeIndex == 1) { return (short)-1; } // now that we found a right sibling that is "on", // follow the leftmost trail of "on" bits to an "on" leaf bit. This bit // represents the next key in the set. while (nodeIndex < power2) { nodeIndex *= 2; // if the left child is not on, then the right child must be "on". if (!isBitSet(nodeIndex)) { nodeIndex++; } } short nextKey = (short)(nodeIndex-power2); if (nextKey >= size) { nextKey = -1; } return nextKey; } /** * Finds the next key that is in the set that is less than the given key. * @param key the key to search before. * @return the next key less than the given key or -1 if there is no key * less than the given key. * @exception IndexOutOfBoundsException if the given key is not * in the range [0, size-1]. */ public short getPrevious(short key) { if ((key < 0) || (key >= size)) { throw new IndexOutOfBoundsException(); } // find the leaf node for the given key. int nodeIndex = key + power2; // while we are not at the root, search upward until we find a left // sibling that is "on". while(nodeIndex != 1) { // check if we are a right node. int odd = nodeIndex % 2; // if we are the right child see if my sibling on the left is "on". // if so, then the previous key must be in that subtree. if (odd == 1) { if (isBitSet(nodeIndex-1)) { nodeIndex--; break; } } nodeIndex = nodeIndex/2; } // If we went all the way to the root then there is no previous key, return -1. if (nodeIndex == 1) { return (short)-1; } // follow the rightmost trail of "on" bits to an "on" leaf bit. This bit // represents the previous key in the set. while (nodeIndex < power2) { nodeIndex *= 2; if (isBitSet(nodeIndex+1)) { nodeIndex++; } } return (short)(nodeIndex-power2); } /** * Checks if the set is empty. * @return true if the set is empty. */ public boolean isEmpty() { return numKeys == 0; } /** * Returns the first (lowest) key in the set. */ public short getFirst() { // if the 0 key is in the set, then return it. if(containsKey((short)0)) { return (short)0; } // otherwise return the next key after 0. return getNext((short)0); } /** * Returns the last (highest) key in the set. */ public short getLast() { // if the highest possible key is in the set, return it. if(containsKey((short)(size-1))) { return (short)(size-1); } // otherwise return the next lowest key. return getPrevious((short)(size-1)); } /** * Sets the nth bit on. */ private boolean setBit(int n) { int intIndex = n >> 5; int [MASK] = n & 0x1f; int old = bits[intIndex]; return ((bits[intIndex] |= setMask[ [MASK] ]) != old); } /** * Sets the nth bit to off. */ private boolean clearBit(int n) { int intIndex = n >> 5; int [MASK] = n & 0x1f; int old = bits[intIndex]; return ((bits[intIndex] &= clearMask[ [MASK] ]) != old); } /** * Tests if the nth bit is on. */ private boolean isBitSet(int n) { int intIndex = n >> 5; int [MASK] = n & 0x1f; return ((bits[intIndex] & setMask[ [MASK] ]) != 0); } }
maskIndex
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ package org.elasticsearch.xpack.esql.plugin; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.RefCountingListener; import org.elasticsearch.compute.EsqlRefCountingListener; import org.elasticsearch.compute.operator. [MASK] ; import org.elasticsearch.compute.operator.ResponseHeadersCollector; import org.elasticsearch.core.Releasable; import org.elasticsearch.threadpool.ThreadPool; import java.util.ArrayList; import java.util.Collections; import java.util.List; /** * A variant of {@link RefCountingListener} with the following differences: * 1. Automatically cancels sub tasks on failure (via runOnTaskFailure) * 2. Collects driver profiles from sub tasks. * 3. Collects response headers from sub tasks, specifically warnings emitted during compute * 4. Collects failures and returns the most appropriate exception to the caller. */ final class ComputeListener implements Releasable { private final EsqlRefCountingListener refs; private final List< [MASK] > collectedProfiles; private final ResponseHeadersCollector responseHeaders; private final Runnable runOnFailure; ComputeListener(ThreadPool threadPool, Runnable runOnFailure, ActionListener<List< [MASK] >> delegate) { this.runOnFailure = runOnFailure; this.responseHeaders = new ResponseHeadersCollector(threadPool.getThreadContext()); this.collectedProfiles = Collections.synchronizedList(new ArrayList<>()); // listener that executes after all the sub-listeners refs (created via acquireCompute) have completed this.refs = new EsqlRefCountingListener(delegate.delegateFailure((l, ignored) -> { responseHeaders.finish(); delegate.onResponse(collectedProfiles.stream().toList()); })); } /** * Acquires a new listener that doesn't collect result */ ActionListener<Void> acquireAvoid() { return refs.acquire().delegateResponse((l, e) -> { try { runOnFailure.run(); } finally { l.onFailure(e); } }); } /** * Acquires a new listener that collects compute result. This listener will also collect warnings emitted during compute */ ActionListener<List< [MASK] >> acquireCompute() { final ActionListener<Void> delegate = acquireAvoid(); return ActionListener.wrap(profiles -> { responseHeaders.collect(); if (profiles != null && profiles.isEmpty() == false) { collectedProfiles.addAll(profiles); } delegate.onResponse(null); }, e -> { responseHeaders.collect(); delegate.onFailure(e); }); } @Override public void close() { refs.close(); } }
DriverProfile
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.io.network.api.serialization; import org.apache.flink.core.memory.MemorySegment; import org.apache.flink.runtime.io.network.buffer.Buffer; import org.apache.flink.testutils.junit.utils.TempDirUtils; import org.apache.flink.util. [MASK] ; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.io.TempDir; import java.io.File; import java.nio.file.Path; import java.util.ArrayList; import java.util.List; import java.util.Random; import static org.apache.flink.core.memory.MemorySegmentFactory.wrap; import static org.apache.flink.runtime.io.network.api.serialization.SpillingAdaptiveSpanningRecordDeserializer.LENGTH_BYTES; import static org.assertj.core.api.Assertions.assertThat; /** {@link SpanningWrapper} test. */ class SpanningWrapperTest { private static final Random random = new Random(); @TempDir private Path folder; @Test void testLargeUnconsumedSegment() throws Exception { int recordLen = 100; int firstChunk = (int) (recordLen * .9); int spillingThreshold = (int) (firstChunk * .9); byte[] record1 = recordBytes(recordLen); byte[] record2 = recordBytes(recordLen * 2); File canNotEecutableFile = TempDirUtils.newFolder(folder); canNotEecutableFile.setExecutable(false); // Always pick 'canNotEecutableFile' first as the Spilling Channel TmpDir. Thus trigger an // IOException. SpanningWrapper spanningWrapper = new SpanningWrapper( new String[] { TempDirUtils.newFolder(folder).getAbsolutePath(), canNotEecutableFile.getAbsolutePath() + File.separator + "pathdonotexit" }, spillingThreshold, recordLen); spanningWrapper.transferFrom(wrapNonSpanning(record1, firstChunk), recordLen); spanningWrapper.addNextChunkFromMemorySegment( wrap(record1), firstChunk, recordLen - firstChunk + LENGTH_BYTES); spanningWrapper.addNextChunkFromMemorySegment(wrap(record2), 0, record2.length); [MASK] <Buffer> unconsumedSegment = spanningWrapper.getUnconsumedSegment(); spanningWrapper .getInputView() .readFully(new byte[recordLen], 0, recordLen); // read out from file spanningWrapper.transferLeftOverTo(new NonSpanningWrapper()); // clear any leftover spanningWrapper.transferFrom( wrapNonSpanning(recordBytes(recordLen), recordLen), recordLen); // overwrite with new data canNotEecutableFile.setExecutable(true); assertThat(concat(record1, record2)).isEqualTo(toByteArray(unconsumedSegment)); } private byte[] recordBytes(int recordLen) { byte[] inputData = randomBytes(recordLen + LENGTH_BYTES); for (int i = 0; i < Integer.BYTES; i++) { inputData[Integer.BYTES - i - 1] = (byte) (recordLen >>> i * 8); } return inputData; } private NonSpanningWrapper wrapNonSpanning(byte[] bytes, int len) { NonSpanningWrapper nonSpanningWrapper = new NonSpanningWrapper(); MemorySegment segment = wrap(bytes); nonSpanningWrapper.initializeFromMemorySegment(segment, 0, len); nonSpanningWrapper .readInt(); // emulate read length performed in getNextRecord to move position return nonSpanningWrapper; } private byte[] toByteArray( [MASK] <Buffer> unconsumed) { final List<Buffer> buffers = new ArrayList<>(); try { unconsumed.forEachRemaining(buffers::add); byte[] result = new byte[buffers.stream().mapToInt(Buffer::readableBytes).sum()]; int offset = 0; for (Buffer buffer : buffers) { int len = buffer.readableBytes(); buffer.getNioBuffer(0, len).get(result, offset, len); offset += len; } return result; } finally { buffers.forEach(Buffer::recycleBuffer); } } private byte[] randomBytes(int length) { byte[] inputData = new byte[length]; random.nextBytes(inputData); return inputData; } private byte[] concat(byte[] input1, byte[] input2) { byte[] expected = new byte[input1.length + input2.length]; System.arraycopy(input1, 0, expected, 0, input1.length); System.arraycopy(input2, 0, expected, input1.length, input2.length); return expected; } }
CloseableIterator
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.io.network.api.serialization; import org.apache.flink.core.memory.MemorySegment; import org.apache.flink.runtime.io.network. [MASK] .Buffer; import org.apache.flink.testutils.junit.utils.TempDirUtils; import org.apache.flink.util.CloseableIterator; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.io.TempDir; import java.io.File; import java.nio.file.Path; import java.util.ArrayList; import java.util.List; import java.util.Random; import static org.apache.flink.core.memory.MemorySegmentFactory.wrap; import static org.apache.flink.runtime.io.network.api.serialization.SpillingAdaptiveSpanningRecordDeserializer.LENGTH_BYTES; import static org.assertj.core.api.Assertions.assertThat; /** {@link SpanningWrapper} test. */ class SpanningWrapperTest { private static final Random random = new Random(); @TempDir private Path folder; @Test void testLargeUnconsumedSegment() throws Exception { int recordLen = 100; int firstChunk = (int) (recordLen * .9); int spillingThreshold = (int) (firstChunk * .9); byte[] record1 = recordBytes(recordLen); byte[] record2 = recordBytes(recordLen * 2); File canNotEecutableFile = TempDirUtils.newFolder(folder); canNotEecutableFile.setExecutable(false); // Always pick 'canNotEecutableFile' first as the Spilling Channel TmpDir. Thus trigger an // IOException. SpanningWrapper spanningWrapper = new SpanningWrapper( new String[] { TempDirUtils.newFolder(folder).getAbsolutePath(), canNotEecutableFile.getAbsolutePath() + File.separator + "pathdonotexit" }, spillingThreshold, recordLen); spanningWrapper.transferFrom(wrapNonSpanning(record1, firstChunk), recordLen); spanningWrapper.addNextChunkFromMemorySegment( wrap(record1), firstChunk, recordLen - firstChunk + LENGTH_BYTES); spanningWrapper.addNextChunkFromMemorySegment(wrap(record2), 0, record2.length); CloseableIterator<Buffer> unconsumedSegment = spanningWrapper.getUnconsumedSegment(); spanningWrapper .getInputView() .readFully(new byte[recordLen], 0, recordLen); // read out from file spanningWrapper.transferLeftOverTo(new NonSpanningWrapper()); // clear any leftover spanningWrapper.transferFrom( wrapNonSpanning(recordBytes(recordLen), recordLen), recordLen); // overwrite with new data canNotEecutableFile.setExecutable(true); assertThat(concat(record1, record2)).isEqualTo(toByteArray(unconsumedSegment)); } private byte[] recordBytes(int recordLen) { byte[] inputData = randomBytes(recordLen + LENGTH_BYTES); for (int i = 0; i < Integer.BYTES; i++) { inputData[Integer.BYTES - i - 1] = (byte) (recordLen >>> i * 8); } return inputData; } private NonSpanningWrapper wrapNonSpanning(byte[] bytes, int len) { NonSpanningWrapper nonSpanningWrapper = new NonSpanningWrapper(); MemorySegment segment = wrap(bytes); nonSpanningWrapper.initializeFromMemorySegment(segment, 0, len); nonSpanningWrapper .readInt(); // emulate read length performed in getNextRecord to move position return nonSpanningWrapper; } private byte[] toByteArray(CloseableIterator<Buffer> unconsumed) { final List<Buffer> [MASK] s = new ArrayList<>(); try { unconsumed.forEachRemaining( [MASK] s::add); byte[] result = new byte[ [MASK] s.stream().mapToInt(Buffer::readableBytes).sum()]; int offset = 0; for (Buffer [MASK] : [MASK] s) { int len = [MASK] .readableBytes(); [MASK] .getNioBuffer(0, len).get(result, offset, len); offset += len; } return result; } finally { [MASK] s.forEach(Buffer::recycleBuffer); } } private byte[] randomBytes(int length) { byte[] inputData = new byte[length]; random.nextBytes(inputData); return inputData; } private byte[] concat(byte[] input1, byte[] input2) { byte[] expected = new byte[input1.length + input2.length]; System.arraycopy(input1, 0, expected, 0, input1.length); System.arraycopy(input2, 0, expected, input1.length, input2.length); return expected; } }
buffer
// Copyright 2015 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.skyframe; import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertWithMessage; import com.google.common.base.Preconditions; import com.google.common.base.Suppliers; import com.google.common.collect.Collections2; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.ArtifactRoot; import com.google.devtools.build.lib.actions.FilesetOutputSymlink; import com.google.devtools.build.lib.actions.FilesetTraversalParams; import com.google.devtools.build.lib.actions.FilesetTraversalParamsFactory; import com.google.devtools.build.lib.actions.util.ActionsTestUtil; import com.google.devtools.build.lib.analysis.BlazeDirectories; import com.google.devtools.build.lib.analysis.ServerDirectories; import com.google.devtools.build.lib.clock.BlazeClock; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.events.NullEventHandler; import com.google.devtools.build.lib.pkgcache.PathPackageLocator; import com.google.devtools.build.lib.skyframe.ExternalFilesHelper.ExternalFileAction; import com.google.devtools.build.lib.skyframe.PackageLookupFunction.CrossRepositoryLabelViolationStrategy; import com.google.devtools.build.lib.testutil.FoundationTestCase; import com.google.devtools.build.lib.testutil.TestConstants; import com.google.devtools.build.lib.util.Fingerprint; import com.google.devtools.build.lib.util.io.TimestampGranularityMonitor; import com.google.devtools.build.lib.vfs.FileStateKey; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.build.lib.vfs.Root; import com.google.devtools.build.lib.vfs.RootedPath; import com.google.devtools.build.lib.vfs.SyscallCache; import com.google.devtools.build.skyframe.EvaluationContext; import com.google.devtools.build.skyframe.EvaluationResult; import com.google.devtools.build.skyframe.InMemoryMemoizingEvaluator; import com.google.devtools.build.skyframe.MemoizingEvaluator; import com.google.devtools.build.skyframe.RecordingDifferencer; import com.google.devtools.build.skyframe.SequencedRecordingDifferencer; import com.google.devtools.build.skyframe.SkyFunction; import com.google.devtools.build.skyframe.SkyFunctionException; import com.google.devtools.build.skyframe.SkyFunctionName; import com.google.devtools.build.skyframe.SkyKey; import com.google.devtools.build.skyframe.SkyValue; import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.concurrent.atomic.AtomicReference; import javax.annotation.Nullable; import net.starlark.java.eval.StarlarkSemantics; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Tests for {@link FilesetEntryFunction}. */ @RunWith(JUnit4.class) public final class FilesetEntryFunctionTest extends FoundationTestCase { private MemoizingEvaluator evaluator; private RecordingDifferencer differencer; @Before public void setUp() throws Exception { AtomicReference<PathPackageLocator> pkgLocator = new AtomicReference<>( new PathPackageLocator( outputBase, ImmutableList.of(Root.fromPath(rootDirectory)), BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY)); BlazeDirectories directories = new BlazeDirectories( new ServerDirectories(outputBase, outputBase, outputBase), rootDirectory, /* defaultSystemJavabase= */ null, TestConstants.PRODUCT_NAME); ExternalFilesHelper externalFilesHelper = ExternalFilesHelper.createForTesting( pkgLocator, ExternalFileAction.DEPEND_ON_EXTERNAL_PKG_FOR_EXTERNAL_REPO_PATHS, directories); Map<SkyFunctionName, SkyFunction> skyFunctions = new HashMap<>(); skyFunctions.put( FileStateKey.FILE_STATE, new FileStateFunction( Suppliers.ofInstance(new TimestampGranularityMonitor(BlazeClock.instance())), SyscallCache.NO_CACHE, externalFilesHelper)); skyFunctions.put(SkyFunctions.FILE, new FileFunction(pkgLocator, directories)); skyFunctions.put(SkyFunctions.DIRECTORY_LISTING, new DirectoryListingFunction()); skyFunctions.put( SkyFunctions.DIRECTORY_LISTING_STATE, new DirectoryListingStateFunction(externalFilesHelper, SyscallCache.NO_CACHE)); skyFunctions.put( SkyFunctions.RECURSIVE_FILESYSTEM_TRAVERSAL, new RecursiveFilesystemTraversalFunction(SyscallCache.NO_CACHE)); skyFunctions.put( SkyFunctions.PACKAGE_LOOKUP, new PackageLookupFunction( new AtomicReference<>(ImmutableSet.of()), CrossRepositoryLabelViolationStrategy.ERROR, BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY, BazelSkyframeExecutorConstants.EXTERNAL_PACKAGE_HELPER)); skyFunctions.put(SkyFunctions.IGNORED_SUBDIRECTORIES, IgnoredSubdirectoriesFunction.NOOP); skyFunctions.put( SkyFunctions.FILESET_ENTRY, new FilesetEntryFunction((unused) -> rootDirectory)); skyFunctions.put(SkyFunctions.WORKSPACE_NAME, new TestWorkspaceNameFunction()); skyFunctions.put( SkyFunctions.LOCAL_REPOSITORY_LOOKUP, new LocalRepositoryLookupFunction(BazelSkyframeExecutorConstants.EXTERNAL_PACKAGE_HELPER)); differencer = new SequencedRecordingDifferencer(); evaluator = new InMemoryMemoizingEvaluator(skyFunctions, differencer); PrecomputedValue.BUILD_ID.set(differencer, UUID.randomUUID()); PrecomputedValue.PATH_PACKAGE_LOCATOR.set(differencer, pkgLocator.get()); PrecomputedValue.STARLARK_SEMANTICS.set(differencer, StarlarkSemantics.DEFAULT); } private Artifact getSourceArtifact(String path) { return ActionsTestUtil.createArtifact( ArtifactRoot.asSourceRoot(Root.fromPath(rootDirectory)), path); } private Artifact createSourceArtifact(String path) throws Exception { Artifact result = getSourceArtifact(path); createFile(result, "foo"); return result; } private static RootedPath childOf(Artifact artifact, String relative) { return RootedPath.toRootedPath( artifact. [MASK] (). [MASK] (), artifact. [MASK] RelativePath().getRelative(relative)); } private static RootedPath siblingOf(Artifact artifact, String relative) { PathFragment parent = Preconditions.checkNotNull(artifact. [MASK] RelativePath().getParentDirectory()); return RootedPath.toRootedPath(artifact. [MASK] (). [MASK] (), parent.getRelative(relative)); } private void createFile(Path path, String... contents) throws Exception { if (!path.getParentDirectory().exists()) { scratch.dir(path.getParentDirectory().getPathString()); } scratch.file(path.getPathString(), contents); } private void createFile(Artifact artifact, String... contents) throws Exception { createFile(artifact.getPath(), contents); } private RootedPath createFile(RootedPath path, String... contents) throws Exception { createFile(path.asPath(), contents); return path; } private <T extends SkyValue> EvaluationResult<T> eval(SkyKey key) throws Exception { EvaluationContext evaluationContext = EvaluationContext.newBuilder() .setKeepGoing(false) .setParallelism(SkyframeExecutor.DEFAULT_THREAD_COUNT) .setEventHandler(NullEventHandler.INSTANCE) .build(); return evaluator.evaluate(ImmutableList.of(key), evaluationContext); } private FilesetEntryValue evalFilesetTraversal(FilesetTraversalParams params) throws Exception { SkyKey key = FilesetEntryKey.key(params); EvaluationResult<FilesetEntryValue> result = eval(key); assertThat(result.hasError()).isFalse(); return result.get(key); } private FilesetOutputSymlink symlink(String from, Artifact to) { return symlink(PathFragment.create(from), to.getPath().asFragment()); } private FilesetOutputSymlink symlink(String from, RootedPath to) { return symlink(PathFragment.create(from), to.asPath().asFragment()); } private FilesetOutputSymlink symlink(PathFragment from, PathFragment to) { return FilesetOutputSymlink.createForTesting(from, to, rootDirectory.asFragment()); } private void assertSymlinksCreatedInOrder( FilesetTraversalParams request, FilesetOutputSymlink... expectedSymlinks) throws Exception { Collection<FilesetOutputSymlink> actual = Collections2.transform( evalFilesetTraversal(request).getSymlinks(), // Strip the metadata from the actual results. (input) -> FilesetOutputSymlink.createAlreadyRelativizedForTesting( input.name(), input.targetPath(), input.relativeToExecRoot())); assertThat(actual).containsExactlyElementsIn(expectedSymlinks).inOrder(); } private static Label label(String label) throws Exception { return Label.parseCanonical(label); } @Test public void testFileTraversalForFile() throws Exception { Artifact file = createSourceArtifact("foo/file.real"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ file, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params, symlink("output-name", file)); } @Test public void testFileTraversalForDirectory() throws Exception { Artifact dir = getSourceArtifact("foo/dir_real"); RootedPath fileA = createFile(childOf(dir, "file.a"), "hello"); RootedPath fileB = createFile(childOf(dir, "sub/file.b"), "world"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ dir, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ true); assertSymlinksCreatedInOrder( params, symlink("output-name/file.a", fileA), symlink("output-name/sub/file.b", fileB)); } @Test public void testFileTraversalForDisallowedDirectoryThrows() throws Exception { Artifact dir = getSourceArtifact("foo/dir_real"); createFile(childOf(dir, "file.a"), "hello"); createFile(childOf(dir, "sub/file.b"), "world"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ dir, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); SkyKey key = FilesetEntryKey.key(params); EvaluationResult<FilesetEntryValue> result = eval(key); assertThat(result.hasError()).isTrue(); assertThat(result.getError(key).getException()) .hasMessageThat() .contains("foo contains a directory"); } @Test public void testFileTraversalForDanglingSymlink() throws Exception { Artifact linkName = getSourceArtifact("foo/dangling.sym"); RootedPath linkTarget = createFile(siblingOf(linkName, "target.file"), "blah"); linkName.getPath().createSymbolicLink(PathFragment.create("target.file")); linkTarget.asPath().delete(); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ linkName, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params); // expect empty results } @Test public void testFileTraversalForNonExistentFile() throws Exception { Artifact path = getSourceArtifact("foo/non-existent"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ path, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params); // expect empty results } /** * Tests that the fingerprint is a function of all arguments of the factory method. * * <p>Implementations must provide: * <ul> * <li>two different values (a domain) for each argument of the factory method and whether or not * it is expected to influence the fingerprint * <li>a way to instantiate {@link FilesetTraversalParams} with a given set of arguments from the * specified domains * </ul> * * <p>The tests will instantiate pairs of {@link FilesetTraversalParams} objects with only a given * attribute differing, and observe whether the fingerprints differ (if they are expected to) or * are the same (otherwise). */ private abstract static class FingerprintTester { private final Map<String, Domain> domains; FingerprintTester(Map<String, Domain> domains) { this.domains = domains; } abstract FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception; private Map<String, ?> getDefaultArgs() { return getKwArgs(null); } private Map<String, ?> getKwArgs(@Nullable String useAlternateFor) { Map<String, Object> values = new HashMap<>(); for (Map.Entry<String, Domain> d : domains.entrySet()) { values.put( d.getKey(), d.getKey().equals(useAlternateFor) ? d.getValue().valueA : d.getValue().valueB); } return values; } public void doTest() throws Exception { Fingerprint fp = new Fingerprint(); create(getDefaultArgs()).fingerprint(fp); String primary = fp.hexDigestAndReset(); for (String argName : domains.keySet()) { create(getKwArgs(argName)).fingerprint(fp); String secondary = fp.hexDigestAndReset(); if (domains.get(argName).includedInFingerprint) { assertWithMessage( "Argument '" + argName + "' was expected to be included in the" + " fingerprint, but wasn't") .that(primary) .isNotEqualTo(secondary); } else { assertWithMessage( "Argument '" + argName + "' was expected not to be included in the" + " fingerprint, but was") .that(primary) .isEqualTo(secondary); } } } } private static final class Domain { boolean includedInFingerprint; Object valueA; Object valueB; Domain(boolean includedInFingerprint, Object valueA, Object valueB) { this.includedInFingerprint = includedInFingerprint; this.valueA = valueA; this.valueB = valueB; } } private static Domain partOfFingerprint(Object valueA, Object valueB) { return new Domain(true, valueA, valueB); } private static Domain notPartOfFingerprint(Object valueB) { return new Domain(false, "//foo", valueB); } @Test public void testFingerprintOfFileTraversal() throws Exception { new FingerprintTester( ImmutableMap.<String, Domain>builder() .put("ownerLabel", notPartOfFingerprint("//bar")) .put("fileToTraverse", partOfFingerprint("foo/file.a", "bar/file.b")) .put("destPath", partOfFingerprint("out1", "out2")) .put("strictFilesetOutput", partOfFingerprint(true, false)) .put("permitDirectories", partOfFingerprint(true, false)) .buildOrThrow()) { @Override FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception { return FilesetTraversalParamsFactory.fileTraversal( label((String) kwArgs.get("ownerLabel")), getSourceArtifact((String) kwArgs.get("fileToTraverse")), PathFragment.create((String) kwArgs.get("destPath")), (Boolean) kwArgs.get("strictFilesetOutput"), (Boolean) kwArgs.get("permitDirectories")); } }.doTest(); } @Test public void testFingerprintOfNestedTraversal() throws Exception { Artifact nested1 = getSourceArtifact("a/b"); Artifact nested2 = getSourceArtifact("a/c"); new FingerprintTester( ImmutableMap.of( "ownerLabel", notPartOfFingerprint("//bar"), "nestedArtifact", partOfFingerprint(nested1, nested2), "destDir", partOfFingerprint("out1", "out2"), "excludes", partOfFingerprint(ImmutableSet.<String>of(), ImmutableSet.of("x")))) { @SuppressWarnings("unchecked") @Override FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception { return FilesetTraversalParamsFactory.nestedTraversal( label((String) kwArgs.get("ownerLabel")), (Artifact) kwArgs.get("nestedArtifact"), PathFragment.create((String) kwArgs.get("destDir")), (Set<String>) kwArgs.get("excludes")); } }.doTest(); } private static class TestWorkspaceNameFunction implements SkyFunction { @Nullable @Override public SkyValue compute(SkyKey skyKey, Environment env) throws SkyFunctionException, InterruptedException { return WorkspaceNameValue.withName("workspace"); } } }
getRoot
// Copyright 2015 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.skyframe; import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertWithMessage; import com.google.common.base.Preconditions; import com.google.common.base.Suppliers; import com.google.common.collect.Collections2; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.ArtifactRoot; import com.google.devtools.build.lib.actions.FilesetOutputSymlink; import com.google.devtools.build.lib.actions.FilesetTraversalParams; import com.google.devtools.build.lib.actions.FilesetTraversalParamsFactory; import com.google.devtools.build.lib.actions.util.ActionsTestUtil; import com.google.devtools.build.lib.analysis.BlazeDirectories; import com.google.devtools.build.lib.analysis.ServerDirectories; import com.google.devtools.build.lib.clock.BlazeClock; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.events.NullEventHandler; import com.google.devtools.build.lib.pkgcache.PathPackageLocator; import com.google.devtools.build.lib.skyframe.ExternalFilesHelper.ExternalFileAction; import com.google.devtools.build.lib.skyframe.PackageLookupFunction.CrossRepositoryLabelViolationStrategy; import com.google.devtools.build.lib.testutil.FoundationTestCase; import com.google.devtools.build.lib.testutil.TestConstants; import com.google.devtools.build.lib.util. [MASK] ; import com.google.devtools.build.lib.util.io.TimestampGranularityMonitor; import com.google.devtools.build.lib.vfs.FileStateKey; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.build.lib.vfs.Root; import com.google.devtools.build.lib.vfs.RootedPath; import com.google.devtools.build.lib.vfs.SyscallCache; import com.google.devtools.build.skyframe.EvaluationContext; import com.google.devtools.build.skyframe.EvaluationResult; import com.google.devtools.build.skyframe.InMemoryMemoizingEvaluator; import com.google.devtools.build.skyframe.MemoizingEvaluator; import com.google.devtools.build.skyframe.RecordingDifferencer; import com.google.devtools.build.skyframe.SequencedRecordingDifferencer; import com.google.devtools.build.skyframe.SkyFunction; import com.google.devtools.build.skyframe.SkyFunctionException; import com.google.devtools.build.skyframe.SkyFunctionName; import com.google.devtools.build.skyframe.SkyKey; import com.google.devtools.build.skyframe.SkyValue; import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.concurrent.atomic.AtomicReference; import javax.annotation.Nullable; import net.starlark.java.eval.StarlarkSemantics; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Tests for {@link FilesetEntryFunction}. */ @RunWith(JUnit4.class) public final class FilesetEntryFunctionTest extends FoundationTestCase { private MemoizingEvaluator evaluator; private RecordingDifferencer differencer; @Before public void setUp() throws Exception { AtomicReference<PathPackageLocator> pkgLocator = new AtomicReference<>( new PathPackageLocator( outputBase, ImmutableList.of(Root.fromPath(rootDirectory)), BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY)); BlazeDirectories directories = new BlazeDirectories( new ServerDirectories(outputBase, outputBase, outputBase), rootDirectory, /* defaultSystemJavabase= */ null, TestConstants.PRODUCT_NAME); ExternalFilesHelper externalFilesHelper = ExternalFilesHelper.createForTesting( pkgLocator, ExternalFileAction.DEPEND_ON_EXTERNAL_PKG_FOR_EXTERNAL_REPO_PATHS, directories); Map<SkyFunctionName, SkyFunction> skyFunctions = new HashMap<>(); skyFunctions.put( FileStateKey.FILE_STATE, new FileStateFunction( Suppliers.ofInstance(new TimestampGranularityMonitor(BlazeClock.instance())), SyscallCache.NO_CACHE, externalFilesHelper)); skyFunctions.put(SkyFunctions.FILE, new FileFunction(pkgLocator, directories)); skyFunctions.put(SkyFunctions.DIRECTORY_LISTING, new DirectoryListingFunction()); skyFunctions.put( SkyFunctions.DIRECTORY_LISTING_STATE, new DirectoryListingStateFunction(externalFilesHelper, SyscallCache.NO_CACHE)); skyFunctions.put( SkyFunctions.RECURSIVE_FILESYSTEM_TRAVERSAL, new RecursiveFilesystemTraversalFunction(SyscallCache.NO_CACHE)); skyFunctions.put( SkyFunctions.PACKAGE_LOOKUP, new PackageLookupFunction( new AtomicReference<>(ImmutableSet.of()), CrossRepositoryLabelViolationStrategy.ERROR, BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY, BazelSkyframeExecutorConstants.EXTERNAL_PACKAGE_HELPER)); skyFunctions.put(SkyFunctions.IGNORED_SUBDIRECTORIES, IgnoredSubdirectoriesFunction.NOOP); skyFunctions.put( SkyFunctions.FILESET_ENTRY, new FilesetEntryFunction((unused) -> rootDirectory)); skyFunctions.put(SkyFunctions.WORKSPACE_NAME, new TestWorkspaceNameFunction()); skyFunctions.put( SkyFunctions.LOCAL_REPOSITORY_LOOKUP, new LocalRepositoryLookupFunction(BazelSkyframeExecutorConstants.EXTERNAL_PACKAGE_HELPER)); differencer = new SequencedRecordingDifferencer(); evaluator = new InMemoryMemoizingEvaluator(skyFunctions, differencer); PrecomputedValue.BUILD_ID.set(differencer, UUID.randomUUID()); PrecomputedValue.PATH_PACKAGE_LOCATOR.set(differencer, pkgLocator.get()); PrecomputedValue.STARLARK_SEMANTICS.set(differencer, StarlarkSemantics.DEFAULT); } private Artifact getSourceArtifact(String path) { return ActionsTestUtil.createArtifact( ArtifactRoot.asSourceRoot(Root.fromPath(rootDirectory)), path); } private Artifact createSourceArtifact(String path) throws Exception { Artifact result = getSourceArtifact(path); createFile(result, "foo"); return result; } private static RootedPath childOf(Artifact artifact, String relative) { return RootedPath.toRootedPath( artifact.getRoot().getRoot(), artifact.getRootRelativePath().getRelative(relative)); } private static RootedPath siblingOf(Artifact artifact, String relative) { PathFragment parent = Preconditions.checkNotNull(artifact.getRootRelativePath().getParentDirectory()); return RootedPath.toRootedPath(artifact.getRoot().getRoot(), parent.getRelative(relative)); } private void createFile(Path path, String... contents) throws Exception { if (!path.getParentDirectory().exists()) { scratch.dir(path.getParentDirectory().getPathString()); } scratch.file(path.getPathString(), contents); } private void createFile(Artifact artifact, String... contents) throws Exception { createFile(artifact.getPath(), contents); } private RootedPath createFile(RootedPath path, String... contents) throws Exception { createFile(path.asPath(), contents); return path; } private <T extends SkyValue> EvaluationResult<T> eval(SkyKey key) throws Exception { EvaluationContext evaluationContext = EvaluationContext.newBuilder() .setKeepGoing(false) .setParallelism(SkyframeExecutor.DEFAULT_THREAD_COUNT) .setEventHandler(NullEventHandler.INSTANCE) .build(); return evaluator.evaluate(ImmutableList.of(key), evaluationContext); } private FilesetEntryValue evalFilesetTraversal(FilesetTraversalParams params) throws Exception { SkyKey key = FilesetEntryKey.key(params); EvaluationResult<FilesetEntryValue> result = eval(key); assertThat(result.hasError()).isFalse(); return result.get(key); } private FilesetOutputSymlink symlink(String from, Artifact to) { return symlink(PathFragment.create(from), to.getPath().asFragment()); } private FilesetOutputSymlink symlink(String from, RootedPath to) { return symlink(PathFragment.create(from), to.asPath().asFragment()); } private FilesetOutputSymlink symlink(PathFragment from, PathFragment to) { return FilesetOutputSymlink.createForTesting(from, to, rootDirectory.asFragment()); } private void assertSymlinksCreatedInOrder( FilesetTraversalParams request, FilesetOutputSymlink... expectedSymlinks) throws Exception { Collection<FilesetOutputSymlink> actual = Collections2.transform( evalFilesetTraversal(request).getSymlinks(), // Strip the metadata from the actual results. (input) -> FilesetOutputSymlink.createAlreadyRelativizedForTesting( input.name(), input.targetPath(), input.relativeToExecRoot())); assertThat(actual).containsExactlyElementsIn(expectedSymlinks).inOrder(); } private static Label label(String label) throws Exception { return Label.parseCanonical(label); } @Test public void testFileTraversalForFile() throws Exception { Artifact file = createSourceArtifact("foo/file.real"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ file, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params, symlink("output-name", file)); } @Test public void testFileTraversalForDirectory() throws Exception { Artifact dir = getSourceArtifact("foo/dir_real"); RootedPath fileA = createFile(childOf(dir, "file.a"), "hello"); RootedPath fileB = createFile(childOf(dir, "sub/file.b"), "world"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ dir, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ true); assertSymlinksCreatedInOrder( params, symlink("output-name/file.a", fileA), symlink("output-name/sub/file.b", fileB)); } @Test public void testFileTraversalForDisallowedDirectoryThrows() throws Exception { Artifact dir = getSourceArtifact("foo/dir_real"); createFile(childOf(dir, "file.a"), "hello"); createFile(childOf(dir, "sub/file.b"), "world"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ dir, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); SkyKey key = FilesetEntryKey.key(params); EvaluationResult<FilesetEntryValue> result = eval(key); assertThat(result.hasError()).isTrue(); assertThat(result.getError(key).getException()) .hasMessageThat() .contains("foo contains a directory"); } @Test public void testFileTraversalForDanglingSymlink() throws Exception { Artifact linkName = getSourceArtifact("foo/dangling.sym"); RootedPath linkTarget = createFile(siblingOf(linkName, "target.file"), "blah"); linkName.getPath().createSymbolicLink(PathFragment.create("target.file")); linkTarget.asPath().delete(); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ linkName, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params); // expect empty results } @Test public void testFileTraversalForNonExistentFile() throws Exception { Artifact path = getSourceArtifact("foo/non-existent"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ path, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params); // expect empty results } /** * Tests that the fingerprint is a function of all arguments of the factory method. * * <p>Implementations must provide: * <ul> * <li>two different values (a domain) for each argument of the factory method and whether or not * it is expected to influence the fingerprint * <li>a way to instantiate {@link FilesetTraversalParams} with a given set of arguments from the * specified domains * </ul> * * <p>The tests will instantiate pairs of {@link FilesetTraversalParams} objects with only a given * attribute differing, and observe whether the fingerprints differ (if they are expected to) or * are the same (otherwise). */ private abstract static class [MASK] Tester { private final Map<String, Domain> domains; [MASK] Tester(Map<String, Domain> domains) { this.domains = domains; } abstract FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception; private Map<String, ?> getDefaultArgs() { return getKwArgs(null); } private Map<String, ?> getKwArgs(@Nullable String useAlternateFor) { Map<String, Object> values = new HashMap<>(); for (Map.Entry<String, Domain> d : domains.entrySet()) { values.put( d.getKey(), d.getKey().equals(useAlternateFor) ? d.getValue().valueA : d.getValue().valueB); } return values; } public void doTest() throws Exception { [MASK] fp = new [MASK] (); create(getDefaultArgs()).fingerprint(fp); String primary = fp.hexDigestAndReset(); for (String argName : domains.keySet()) { create(getKwArgs(argName)).fingerprint(fp); String secondary = fp.hexDigestAndReset(); if (domains.get(argName).includedIn [MASK] ) { assertWithMessage( "Argument '" + argName + "' was expected to be included in the" + " fingerprint, but wasn't") .that(primary) .isNotEqualTo(secondary); } else { assertWithMessage( "Argument '" + argName + "' was expected not to be included in the" + " fingerprint, but was") .that(primary) .isEqualTo(secondary); } } } } private static final class Domain { boolean includedIn [MASK] ; Object valueA; Object valueB; Domain(boolean includedIn [MASK] , Object valueA, Object valueB) { this.includedIn [MASK] = includedIn [MASK] ; this.valueA = valueA; this.valueB = valueB; } } private static Domain partOf [MASK] (Object valueA, Object valueB) { return new Domain(true, valueA, valueB); } private static Domain notPartOf [MASK] (Object valueB) { return new Domain(false, "//foo", valueB); } @Test public void test [MASK] OfFileTraversal() throws Exception { new [MASK] Tester( ImmutableMap.<String, Domain>builder() .put("ownerLabel", notPartOf [MASK] ("//bar")) .put("fileToTraverse", partOf [MASK] ("foo/file.a", "bar/file.b")) .put("destPath", partOf [MASK] ("out1", "out2")) .put("strictFilesetOutput", partOf [MASK] (true, false)) .put("permitDirectories", partOf [MASK] (true, false)) .buildOrThrow()) { @Override FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception { return FilesetTraversalParamsFactory.fileTraversal( label((String) kwArgs.get("ownerLabel")), getSourceArtifact((String) kwArgs.get("fileToTraverse")), PathFragment.create((String) kwArgs.get("destPath")), (Boolean) kwArgs.get("strictFilesetOutput"), (Boolean) kwArgs.get("permitDirectories")); } }.doTest(); } @Test public void test [MASK] OfNestedTraversal() throws Exception { Artifact nested1 = getSourceArtifact("a/b"); Artifact nested2 = getSourceArtifact("a/c"); new [MASK] Tester( ImmutableMap.of( "ownerLabel", notPartOf [MASK] ("//bar"), "nestedArtifact", partOf [MASK] (nested1, nested2), "destDir", partOf [MASK] ("out1", "out2"), "excludes", partOf [MASK] (ImmutableSet.<String>of(), ImmutableSet.of("x")))) { @SuppressWarnings("unchecked") @Override FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception { return FilesetTraversalParamsFactory.nestedTraversal( label((String) kwArgs.get("ownerLabel")), (Artifact) kwArgs.get("nestedArtifact"), PathFragment.create((String) kwArgs.get("destDir")), (Set<String>) kwArgs.get("excludes")); } }.doTest(); } private static class TestWorkspaceNameFunction implements SkyFunction { @Nullable @Override public SkyValue compute(SkyKey skyKey, Environment env) throws SkyFunctionException, InterruptedException { return WorkspaceNameValue.withName("workspace"); } } }
Fingerprint
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ package org.elasticsearch.xpack.esql.plugin; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.RefCountingListener; import org.elasticsearch.compute.EsqlRefCountingListener; import org.elasticsearch.compute.operator.DriverProfile; import org.elasticsearch.compute.operator.ResponseHeadersCollector; import org.elasticsearch.core.Releasable; import org.elasticsearch.threadpool.ThreadPool; import java.util.ArrayList; import java.util.Collections; import java.util.List; /** * A variant of {@link RefCountingListener} with the following differences: * 1. Automatically cancels sub tasks on failure (via runOnTaskFailure) * 2. Collects driver profiles from sub tasks. * 3. Collects response headers from sub tasks, specifically warnings emitted during compute * 4. Collects failures and returns the most appropriate exception to the caller. */ final class ComputeListener implements Releasable { private final EsqlRefCountingListener refs; private final List<DriverProfile> [MASK] ; private final ResponseHeadersCollector responseHeaders; private final Runnable runOnFailure; ComputeListener(ThreadPool threadPool, Runnable runOnFailure, ActionListener<List<DriverProfile>> delegate) { this.runOnFailure = runOnFailure; this.responseHeaders = new ResponseHeadersCollector(threadPool.getThreadContext()); this. [MASK] = Collections.synchronizedList(new ArrayList<>()); // listener that executes after all the sub-listeners refs (created via acquireCompute) have completed this.refs = new EsqlRefCountingListener(delegate.delegateFailure((l, ignored) -> { responseHeaders.finish(); delegate.onResponse( [MASK] .stream().toList()); })); } /** * Acquires a new listener that doesn't collect result */ ActionListener<Void> acquireAvoid() { return refs.acquire().delegateResponse((l, e) -> { try { runOnFailure.run(); } finally { l.onFailure(e); } }); } /** * Acquires a new listener that collects compute result. This listener will also collect warnings emitted during compute */ ActionListener<List<DriverProfile>> acquireCompute() { final ActionListener<Void> delegate = acquireAvoid(); return ActionListener.wrap(profiles -> { responseHeaders.collect(); if (profiles != null && profiles.isEmpty() == false) { [MASK] .addAll(profiles); } delegate.onResponse(null); }, e -> { responseHeaders.collect(); delegate.onFailure(e); }); } @Override public void close() { refs.close(); } }
collectedProfiles
// Copyright 2015 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.skyframe; import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertWithMessage; import com.google.common.base.Preconditions; import com.google.common.base.Suppliers; import com.google.common.collect.Collections2; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.ArtifactRoot; import com.google.devtools.build.lib.actions.FilesetOutputSymlink; import com.google.devtools.build.lib.actions.FilesetTraversalParams; import com.google.devtools.build.lib.actions.FilesetTraversalParamsFactory; import com.google.devtools.build.lib.actions.util.ActionsTestUtil; import com.google.devtools.build.lib.analysis.BlazeDirectories; import com.google.devtools.build.lib.analysis.ServerDirectories; import com.google.devtools.build.lib.clock.BlazeClock; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.events.NullEventHandler; import com.google.devtools.build.lib.pkgcache.PathPackageLocator; import com.google.devtools.build.lib.skyframe.ExternalFilesHelper.ExternalFileAction; import com.google.devtools.build.lib.skyframe.PackageLookupFunction.CrossRepositoryLabelViolationStrategy; import com.google.devtools.build.lib.testutil.FoundationTestCase; import com.google.devtools.build.lib.testutil.TestConstants; import com.google.devtools.build.lib.util.Fingerprint; import com.google.devtools.build.lib.util.io.TimestampGranularityMonitor; import com.google.devtools.build.lib.vfs.FileStateKey; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.build.lib.vfs.Root; import com.google.devtools.build.lib.vfs.RootedPath; import com.google.devtools.build.lib.vfs.SyscallCache; import com.google.devtools.build.skyframe.EvaluationContext; import com.google.devtools.build.skyframe.EvaluationResult; import com.google.devtools.build.skyframe.InMemoryMemoizingEvaluator; import com.google.devtools.build.skyframe.MemoizingEvaluator; import com.google.devtools.build.skyframe.RecordingDifferencer; import com.google.devtools.build.skyframe.SequencedRecordingDifferencer; import com.google.devtools.build.skyframe.SkyFunction; import com.google.devtools.build.skyframe.SkyFunctionException; import com.google.devtools.build.skyframe.SkyFunctionName; import com.google.devtools.build.skyframe.SkyKey; import com.google.devtools.build.skyframe.SkyValue; import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.concurrent.atomic.AtomicReference; import javax.annotation.Nullable; import net.starlark.java.eval.StarlarkSemantics; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Tests for {@link FilesetEntryFunction}. */ @RunWith(JUnit4.class) public final class FilesetEntryFunctionTest extends FoundationTestCase { private MemoizingEvaluator evaluator; private RecordingDifferencer differencer; @Before public void setUp() throws Exception { AtomicReference<PathPackageLocator> pkgLocator = new AtomicReference<>( new PathPackageLocator( outputBase, ImmutableList.of(Root.fromPath(rootDirectory)), BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY)); BlazeDirectories directories = new BlazeDirectories( new ServerDirectories(outputBase, outputBase, outputBase), rootDirectory, /* defaultSystemJavabase= */ null, TestConstants.PRODUCT_NAME); ExternalFilesHelper externalFilesHelper = ExternalFilesHelper.createForTesting( pkgLocator, ExternalFileAction.DEPEND_ON_EXTERNAL_PKG_FOR_EXTERNAL_REPO_PATHS, directories); Map<SkyFunctionName, SkyFunction> skyFunctions = new HashMap<>(); skyFunctions.put( FileStateKey.FILE_STATE, new FileStateFunction( Suppliers.ofInstance(new TimestampGranularityMonitor(BlazeClock.instance())), SyscallCache.NO_CACHE, externalFilesHelper)); skyFunctions.put(SkyFunctions.FILE, new FileFunction(pkgLocator, directories)); skyFunctions.put(SkyFunctions.DIRECTORY_LISTING, new DirectoryListingFunction()); skyFunctions.put( SkyFunctions.DIRECTORY_LISTING_STATE, new DirectoryListingStateFunction(externalFilesHelper, SyscallCache.NO_CACHE)); skyFunctions.put( SkyFunctions.RECURSIVE_FILESYSTEM_TRAVERSAL, new RecursiveFilesystemTraversalFunction(SyscallCache.NO_CACHE)); skyFunctions.put( SkyFunctions.PACKAGE_LOOKUP, new PackageLookupFunction( new AtomicReference<>(ImmutableSet.of()), CrossRepositoryLabelViolationStrategy.ERROR, BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY, BazelSkyframeExecutorConstants.EXTERNAL_PACKAGE_HELPER)); skyFunctions.put(SkyFunctions.IGNORED_SUBDIRECTORIES, IgnoredSubdirectoriesFunction.NOOP); skyFunctions.put( SkyFunctions.FILESET_ENTRY, new FilesetEntryFunction((unused) -> rootDirectory)); skyFunctions.put(SkyFunctions.WORKSPACE_NAME, new TestWorkspaceNameFunction()); skyFunctions.put( SkyFunctions.LOCAL_REPOSITORY_LOOKUP, new LocalRepositoryLookupFunction(BazelSkyframeExecutorConstants.EXTERNAL_PACKAGE_HELPER)); differencer = new SequencedRecordingDifferencer(); evaluator = new InMemoryMemoizingEvaluator(skyFunctions, differencer); PrecomputedValue.BUILD_ID.set(differencer, UUID.randomUUID()); PrecomputedValue.PATH_PACKAGE_LOCATOR.set(differencer, pkgLocator.get()); PrecomputedValue.STARLARK_SEMANTICS.set(differencer, StarlarkSemantics.DEFAULT); } private Artifact getSourceArtifact(String path) { return ActionsTestUtil.createArtifact( ArtifactRoot.asSourceRoot(Root.fromPath(rootDirectory)), path); } private Artifact createSourceArtifact(String path) throws Exception { Artifact result = getSourceArtifact(path); createFile(result, "foo"); return result; } private static RootedPath childOf(Artifact artifact, String relative) { return RootedPath.toRootedPath( artifact.getRoot().getRoot(), artifact.getRootRelativePath().getRelative(relative)); } private static RootedPath siblingOf(Artifact artifact, String relative) { PathFragment parent = Preconditions.checkNotNull(artifact.getRootRelativePath().getParentDirectory()); return RootedPath.toRootedPath(artifact.getRoot().getRoot(), parent.getRelative(relative)); } private void createFile(Path path, String... contents) throws Exception { if (!path.getParentDirectory().exists()) { scratch.dir(path.getParentDirectory(). [MASK] String()); } scratch.file(path. [MASK] String(), contents); } private void createFile(Artifact artifact, String... contents) throws Exception { createFile(artifact. [MASK] (), contents); } private RootedPath createFile(RootedPath path, String... contents) throws Exception { createFile(path.asPath(), contents); return path; } private <T extends SkyValue> EvaluationResult<T> eval(SkyKey key) throws Exception { EvaluationContext evaluationContext = EvaluationContext.newBuilder() .setKeepGoing(false) .setParallelism(SkyframeExecutor.DEFAULT_THREAD_COUNT) .setEventHandler(NullEventHandler.INSTANCE) .build(); return evaluator.evaluate(ImmutableList.of(key), evaluationContext); } private FilesetEntryValue evalFilesetTraversal(FilesetTraversalParams params) throws Exception { SkyKey key = FilesetEntryKey.key(params); EvaluationResult<FilesetEntryValue> result = eval(key); assertThat(result.hasError()).isFalse(); return result.get(key); } private FilesetOutputSymlink symlink(String from, Artifact to) { return symlink(PathFragment.create(from), to. [MASK] ().asFragment()); } private FilesetOutputSymlink symlink(String from, RootedPath to) { return symlink(PathFragment.create(from), to.asPath().asFragment()); } private FilesetOutputSymlink symlink(PathFragment from, PathFragment to) { return FilesetOutputSymlink.createForTesting(from, to, rootDirectory.asFragment()); } private void assertSymlinksCreatedInOrder( FilesetTraversalParams request, FilesetOutputSymlink... expectedSymlinks) throws Exception { Collection<FilesetOutputSymlink> actual = Collections2.transform( evalFilesetTraversal(request).getSymlinks(), // Strip the metadata from the actual results. (input) -> FilesetOutputSymlink.createAlreadyRelativizedForTesting( input.name(), input.tar [MASK] (), input.relativeToExecRoot())); assertThat(actual).containsExactlyElementsIn(expectedSymlinks).inOrder(); } private static Label label(String label) throws Exception { return Label.parseCanonical(label); } @Test public void testFileTraversalForFile() throws Exception { Artifact file = createSourceArtifact("foo/file.real"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ file, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params, symlink("output-name", file)); } @Test public void testFileTraversalForDirectory() throws Exception { Artifact dir = getSourceArtifact("foo/dir_real"); RootedPath fileA = createFile(childOf(dir, "file.a"), "hello"); RootedPath fileB = createFile(childOf(dir, "sub/file.b"), "world"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ dir, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ true); assertSymlinksCreatedInOrder( params, symlink("output-name/file.a", fileA), symlink("output-name/sub/file.b", fileB)); } @Test public void testFileTraversalForDisallowedDirectoryThrows() throws Exception { Artifact dir = getSourceArtifact("foo/dir_real"); createFile(childOf(dir, "file.a"), "hello"); createFile(childOf(dir, "sub/file.b"), "world"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ dir, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); SkyKey key = FilesetEntryKey.key(params); EvaluationResult<FilesetEntryValue> result = eval(key); assertThat(result.hasError()).isTrue(); assertThat(result.getError(key).getException()) .hasMessageThat() .contains("foo contains a directory"); } @Test public void testFileTraversalForDanglingSymlink() throws Exception { Artifact linkName = getSourceArtifact("foo/dangling.sym"); RootedPath linkTarget = createFile(siblingOf(linkName, "target.file"), "blah"); linkName. [MASK] ().createSymbolicLink(PathFragment.create("target.file")); linkTarget.asPath().delete(); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ linkName, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params); // expect empty results } @Test public void testFileTraversalForNonExistentFile() throws Exception { Artifact path = getSourceArtifact("foo/non-existent"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ path, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params); // expect empty results } /** * Tests that the fingerprint is a function of all arguments of the factory method. * * <p>Implementations must provide: * <ul> * <li>two different values (a domain) for each argument of the factory method and whether or not * it is expected to influence the fingerprint * <li>a way to instantiate {@link FilesetTraversalParams} with a given set of arguments from the * specified domains * </ul> * * <p>The tests will instantiate pairs of {@link FilesetTraversalParams} objects with only a given * attribute differing, and observe whether the fingerprints differ (if they are expected to) or * are the same (otherwise). */ private abstract static class FingerprintTester { private final Map<String, Domain> domains; FingerprintTester(Map<String, Domain> domains) { this.domains = domains; } abstract FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception; private Map<String, ?> getDefaultArgs() { return getKwArgs(null); } private Map<String, ?> getKwArgs(@Nullable String useAlternateFor) { Map<String, Object> values = new HashMap<>(); for (Map.Entry<String, Domain> d : domains.entrySet()) { values.put( d.getKey(), d.getKey().equals(useAlternateFor) ? d.getValue().valueA : d.getValue().valueB); } return values; } public void doTest() throws Exception { Fingerprint fp = new Fingerprint(); create(getDefaultArgs()).fingerprint(fp); String primary = fp.hexDigestAndReset(); for (String argName : domains.keySet()) { create(getKwArgs(argName)).fingerprint(fp); String secondary = fp.hexDigestAndReset(); if (domains.get(argName).includedInFingerprint) { assertWithMessage( "Argument '" + argName + "' was expected to be included in the" + " fingerprint, but wasn't") .that(primary) .isNotEqualTo(secondary); } else { assertWithMessage( "Argument '" + argName + "' was expected not to be included in the" + " fingerprint, but was") .that(primary) .isEqualTo(secondary); } } } } private static final class Domain { boolean includedInFingerprint; Object valueA; Object valueB; Domain(boolean includedInFingerprint, Object valueA, Object valueB) { this.includedInFingerprint = includedInFingerprint; this.valueA = valueA; this.valueB = valueB; } } private static Domain partOfFingerprint(Object valueA, Object valueB) { return new Domain(true, valueA, valueB); } private static Domain notPartOfFingerprint(Object valueB) { return new Domain(false, "//foo", valueB); } @Test public void testFingerprintOfFileTraversal() throws Exception { new FingerprintTester( ImmutableMap.<String, Domain>builder() .put("ownerLabel", notPartOfFingerprint("//bar")) .put("fileToTraverse", partOfFingerprint("foo/file.a", "bar/file.b")) .put("destPath", partOfFingerprint("out1", "out2")) .put("strictFilesetOutput", partOfFingerprint(true, false)) .put("permitDirectories", partOfFingerprint(true, false)) .buildOrThrow()) { @Override FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception { return FilesetTraversalParamsFactory.fileTraversal( label((String) kwArgs.get("ownerLabel")), getSourceArtifact((String) kwArgs.get("fileToTraverse")), PathFragment.create((String) kwArgs.get("destPath")), (Boolean) kwArgs.get("strictFilesetOutput"), (Boolean) kwArgs.get("permitDirectories")); } }.doTest(); } @Test public void testFingerprintOfNestedTraversal() throws Exception { Artifact nested1 = getSourceArtifact("a/b"); Artifact nested2 = getSourceArtifact("a/c"); new FingerprintTester( ImmutableMap.of( "ownerLabel", notPartOfFingerprint("//bar"), "nestedArtifact", partOfFingerprint(nested1, nested2), "destDir", partOfFingerprint("out1", "out2"), "excludes", partOfFingerprint(ImmutableSet.<String>of(), ImmutableSet.of("x")))) { @SuppressWarnings("unchecked") @Override FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception { return FilesetTraversalParamsFactory.nestedTraversal( label((String) kwArgs.get("ownerLabel")), (Artifact) kwArgs.get("nestedArtifact"), PathFragment.create((String) kwArgs.get("destDir")), (Set<String>) kwArgs.get("excludes")); } }.doTest(); } private static class TestWorkspaceNameFunction implements SkyFunction { @Nullable @Override public SkyValue compute(SkyKey skyKey, Environment env) throws SkyFunctionException, InterruptedException { return WorkspaceNameValue.withName("workspace"); } } }
getPath
// Copyright 2015 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.skyframe; import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertWithMessage; import com.google.common.base.Preconditions; import com.google.common.base.Suppliers; import com.google.common.collect.Collections2; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.ArtifactRoot; import com.google.devtools.build.lib.actions.FilesetOutputSymlink; import com.google.devtools.build.lib.actions.FilesetTraversalParams; import com.google.devtools.build.lib.actions.FilesetTraversalParamsFactory; import com.google.devtools.build.lib.actions.util.ActionsTestUtil; import com.google.devtools.build.lib.analysis.BlazeDirectories; import com.google.devtools.build.lib.analysis.ServerDirectories; import com.google.devtools.build.lib.clock.BlazeClock; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.events.NullEventHandler; import com.google.devtools.build.lib.pkgcache.PathPackageLocator; import com.google.devtools.build.lib.skyframe.ExternalFilesHelper.ExternalFileAction; import com.google.devtools.build.lib.skyframe.PackageLookupFunction.CrossRepositoryLabelViolationStrategy; import com.google.devtools.build.lib.testutil.FoundationTestCase; import com.google.devtools.build.lib.testutil.TestConstants; import com.google.devtools.build.lib.util.Fingerprint; import com.google.devtools.build.lib.util.io.TimestampGranularityMonitor; import com.google.devtools.build.lib.vfs.FileStateKey; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.build.lib.vfs.Root; import com.google.devtools.build.lib.vfs.RootedPath; import com.google.devtools.build.lib.vfs.SyscallCache; import com.google.devtools.build.skyframe.EvaluationContext; import com.google.devtools.build.skyframe.EvaluationResult; import com.google.devtools.build.skyframe.InMemoryMemoizingEvaluator; import com.google.devtools.build.skyframe.MemoizingEvaluator; import com.google.devtools.build.skyframe.RecordingDifferencer; import com.google.devtools.build.skyframe.SequencedRecordingDifferencer; import com.google.devtools.build.skyframe.SkyFunction; import com.google.devtools.build.skyframe.SkyFunctionException; import com.google.devtools.build.skyframe.SkyFunctionName; import com.google.devtools.build.skyframe.SkyKey; import com.google.devtools.build.skyframe.SkyValue; import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.concurrent.atomic.AtomicReference; import javax.annotation.Nullable; import net.starlark.java.eval.StarlarkSemantics; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Tests for {@link FilesetEntryFunction}. */ @RunWith(JUnit4.class) public final class FilesetEntryFunctionTest extends FoundationTestCase { private MemoizingEvaluator evaluator; private RecordingDifferencer differencer; @Before public void setUp() throws Exception { AtomicReference<PathPackageLocator> pkgLocator = new AtomicReference<>( new PathPackageLocator( outputBase, ImmutableList.of(Root.fromPath(rootDirectory)), BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY)); BlazeDirectories directories = new BlazeDirectories( new ServerDirectories(outputBase, outputBase, outputBase), rootDirectory, /* defaultSystemJavabase= */ null, TestConstants.PRODUCT_NAME); ExternalFilesHelper externalFilesHelper = ExternalFilesHelper.createForTesting( pkgLocator, ExternalFileAction.DEPEND_ON_EXTERNAL_PKG_FOR_EXTERNAL_REPO_PATHS, directories); Map<SkyFunctionName, SkyFunction> skyFunctions = new HashMap<>(); skyFunctions.put( FileStateKey.FILE_STATE, new FileStateFunction( Suppliers.ofInstance(new TimestampGranularityMonitor(BlazeClock.instance())), SyscallCache.NO_CACHE, externalFilesHelper)); skyFunctions.put(SkyFunctions.FILE, new FileFunction(pkgLocator, directories)); skyFunctions.put(SkyFunctions.DIRECTORY_LISTING, new DirectoryListingFunction()); skyFunctions.put( SkyFunctions.DIRECTORY_LISTING_STATE, new DirectoryListingStateFunction(externalFilesHelper, SyscallCache.NO_CACHE)); skyFunctions.put( SkyFunctions.RECURSIVE_FILESYSTEM_TRAVERSAL, new RecursiveFilesystemTraversalFunction(SyscallCache.NO_CACHE)); skyFunctions.put( SkyFunctions.PACKAGE_LOOKUP, new PackageLookupFunction( new AtomicReference<>(ImmutableSet.of()), CrossRepositoryLabelViolationStrategy.ERROR, BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY, BazelSkyframeExecutorConstants.EXTERNAL_PACKAGE_HELPER)); skyFunctions.put(SkyFunctions.IGNORED_SUBDIRECTORIES, IgnoredSubdirectoriesFunction.NOOP); skyFunctions.put( SkyFunctions.FILESET_ENTRY, new FilesetEntryFunction((unused) -> rootDirectory)); skyFunctions.put(SkyFunctions.WORKSPACE_NAME, new TestWorkspaceNameFunction()); skyFunctions.put( SkyFunctions.LOCAL_REPOSITORY_LOOKUP, new LocalRepositoryLookupFunction(BazelSkyframeExecutorConstants.EXTERNAL_PACKAGE_HELPER)); differencer = new SequencedRecordingDifferencer(); evaluator = new InMemoryMemoizingEvaluator(skyFunctions, differencer); PrecomputedValue.BUILD_ID.set(differencer, UUID.randomUUID()); PrecomputedValue.PATH_PACKAGE_LOCATOR.set(differencer, pkgLocator.get()); PrecomputedValue.STARLARK_SEMANTICS.set(differencer, StarlarkSemantics.DEFAULT); } private Artifact getSourceArtifact(String path) { return ActionsTestUtil.createArtifact( ArtifactRoot.asSourceRoot(Root.fromPath(rootDirectory)), path); } private Artifact createSourceArtifact(String path) throws Exception { Artifact result = getSourceArtifact(path); createFile(result, "foo"); return result; } private static RootedPath childOf(Artifact artifact, String [MASK] ) { return RootedPath.toRootedPath( artifact.getRoot().getRoot(), artifact.getRootRelativePath().getRelative( [MASK] )); } private static RootedPath siblingOf(Artifact artifact, String [MASK] ) { PathFragment parent = Preconditions.checkNotNull(artifact.getRootRelativePath().getParentDirectory()); return RootedPath.toRootedPath(artifact.getRoot().getRoot(), parent.getRelative( [MASK] )); } private void createFile(Path path, String... contents) throws Exception { if (!path.getParentDirectory().exists()) { scratch.dir(path.getParentDirectory().getPathString()); } scratch.file(path.getPathString(), contents); } private void createFile(Artifact artifact, String... contents) throws Exception { createFile(artifact.getPath(), contents); } private RootedPath createFile(RootedPath path, String... contents) throws Exception { createFile(path.asPath(), contents); return path; } private <T extends SkyValue> EvaluationResult<T> eval(SkyKey key) throws Exception { EvaluationContext evaluationContext = EvaluationContext.newBuilder() .setKeepGoing(false) .setParallelism(SkyframeExecutor.DEFAULT_THREAD_COUNT) .setEventHandler(NullEventHandler.INSTANCE) .build(); return evaluator.evaluate(ImmutableList.of(key), evaluationContext); } private FilesetEntryValue evalFilesetTraversal(FilesetTraversalParams params) throws Exception { SkyKey key = FilesetEntryKey.key(params); EvaluationResult<FilesetEntryValue> result = eval(key); assertThat(result.hasError()).isFalse(); return result.get(key); } private FilesetOutputSymlink symlink(String from, Artifact to) { return symlink(PathFragment.create(from), to.getPath().asFragment()); } private FilesetOutputSymlink symlink(String from, RootedPath to) { return symlink(PathFragment.create(from), to.asPath().asFragment()); } private FilesetOutputSymlink symlink(PathFragment from, PathFragment to) { return FilesetOutputSymlink.createForTesting(from, to, rootDirectory.asFragment()); } private void assertSymlinksCreatedInOrder( FilesetTraversalParams request, FilesetOutputSymlink... expectedSymlinks) throws Exception { Collection<FilesetOutputSymlink> actual = Collections2.transform( evalFilesetTraversal(request).getSymlinks(), // Strip the metadata from the actual results. (input) -> FilesetOutputSymlink.createAlreadyRelativizedForTesting( input.name(), input.targetPath(), input. [MASK] ToExecRoot())); assertThat(actual).containsExactlyElementsIn(expectedSymlinks).inOrder(); } private static Label label(String label) throws Exception { return Label.parseCanonical(label); } @Test public void testFileTraversalForFile() throws Exception { Artifact file = createSourceArtifact("foo/file.real"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ file, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params, symlink("output-name", file)); } @Test public void testFileTraversalForDirectory() throws Exception { Artifact dir = getSourceArtifact("foo/dir_real"); RootedPath fileA = createFile(childOf(dir, "file.a"), "hello"); RootedPath fileB = createFile(childOf(dir, "sub/file.b"), "world"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ dir, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ true); assertSymlinksCreatedInOrder( params, symlink("output-name/file.a", fileA), symlink("output-name/sub/file.b", fileB)); } @Test public void testFileTraversalForDisallowedDirectoryThrows() throws Exception { Artifact dir = getSourceArtifact("foo/dir_real"); createFile(childOf(dir, "file.a"), "hello"); createFile(childOf(dir, "sub/file.b"), "world"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ dir, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); SkyKey key = FilesetEntryKey.key(params); EvaluationResult<FilesetEntryValue> result = eval(key); assertThat(result.hasError()).isTrue(); assertThat(result.getError(key).getException()) .hasMessageThat() .contains("foo contains a directory"); } @Test public void testFileTraversalForDanglingSymlink() throws Exception { Artifact linkName = getSourceArtifact("foo/dangling.sym"); RootedPath linkTarget = createFile(siblingOf(linkName, "target.file"), "blah"); linkName.getPath().createSymbolicLink(PathFragment.create("target.file")); linkTarget.asPath().delete(); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ linkName, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params); // expect empty results } @Test public void testFileTraversalForNonExistentFile() throws Exception { Artifact path = getSourceArtifact("foo/non-existent"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ path, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params); // expect empty results } /** * Tests that the fingerprint is a function of all arguments of the factory method. * * <p>Implementations must provide: * <ul> * <li>two different values (a domain) for each argument of the factory method and whether or not * it is expected to influence the fingerprint * <li>a way to instantiate {@link FilesetTraversalParams} with a given set of arguments from the * specified domains * </ul> * * <p>The tests will instantiate pairs of {@link FilesetTraversalParams} objects with only a given * attribute differing, and observe whether the fingerprints differ (if they are expected to) or * are the same (otherwise). */ private abstract static class FingerprintTester { private final Map<String, Domain> domains; FingerprintTester(Map<String, Domain> domains) { this.domains = domains; } abstract FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception; private Map<String, ?> getDefaultArgs() { return getKwArgs(null); } private Map<String, ?> getKwArgs(@Nullable String useAlternateFor) { Map<String, Object> values = new HashMap<>(); for (Map.Entry<String, Domain> d : domains.entrySet()) { values.put( d.getKey(), d.getKey().equals(useAlternateFor) ? d.getValue().valueA : d.getValue().valueB); } return values; } public void doTest() throws Exception { Fingerprint fp = new Fingerprint(); create(getDefaultArgs()).fingerprint(fp); String primary = fp.hexDigestAndReset(); for (String argName : domains.keySet()) { create(getKwArgs(argName)).fingerprint(fp); String secondary = fp.hexDigestAndReset(); if (domains.get(argName).includedInFingerprint) { assertWithMessage( "Argument '" + argName + "' was expected to be included in the" + " fingerprint, but wasn't") .that(primary) .isNotEqualTo(secondary); } else { assertWithMessage( "Argument '" + argName + "' was expected not to be included in the" + " fingerprint, but was") .that(primary) .isEqualTo(secondary); } } } } private static final class Domain { boolean includedInFingerprint; Object valueA; Object valueB; Domain(boolean includedInFingerprint, Object valueA, Object valueB) { this.includedInFingerprint = includedInFingerprint; this.valueA = valueA; this.valueB = valueB; } } private static Domain partOfFingerprint(Object valueA, Object valueB) { return new Domain(true, valueA, valueB); } private static Domain notPartOfFingerprint(Object valueB) { return new Domain(false, "//foo", valueB); } @Test public void testFingerprintOfFileTraversal() throws Exception { new FingerprintTester( ImmutableMap.<String, Domain>builder() .put("ownerLabel", notPartOfFingerprint("//bar")) .put("fileToTraverse", partOfFingerprint("foo/file.a", "bar/file.b")) .put("destPath", partOfFingerprint("out1", "out2")) .put("strictFilesetOutput", partOfFingerprint(true, false)) .put("permitDirectories", partOfFingerprint(true, false)) .buildOrThrow()) { @Override FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception { return FilesetTraversalParamsFactory.fileTraversal( label((String) kwArgs.get("ownerLabel")), getSourceArtifact((String) kwArgs.get("fileToTraverse")), PathFragment.create((String) kwArgs.get("destPath")), (Boolean) kwArgs.get("strictFilesetOutput"), (Boolean) kwArgs.get("permitDirectories")); } }.doTest(); } @Test public void testFingerprintOfNestedTraversal() throws Exception { Artifact nested1 = getSourceArtifact("a/b"); Artifact nested2 = getSourceArtifact("a/c"); new FingerprintTester( ImmutableMap.of( "ownerLabel", notPartOfFingerprint("//bar"), "nestedArtifact", partOfFingerprint(nested1, nested2), "destDir", partOfFingerprint("out1", "out2"), "excludes", partOfFingerprint(ImmutableSet.<String>of(), ImmutableSet.of("x")))) { @SuppressWarnings("unchecked") @Override FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception { return FilesetTraversalParamsFactory.nestedTraversal( label((String) kwArgs.get("ownerLabel")), (Artifact) kwArgs.get("nestedArtifact"), PathFragment.create((String) kwArgs.get("destDir")), (Set<String>) kwArgs.get("excludes")); } }.doTest(); } private static class TestWorkspaceNameFunction implements SkyFunction { @Nullable @Override public SkyValue compute(SkyKey skyKey, Environment env) throws SkyFunctionException, InterruptedException { return WorkspaceNameValue.withName("workspace"); } } }
relative
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.io.network.api.serialization; import org.apache.flink.core.memory.MemorySegment; import org.apache.flink.runtime.io.network.buffer.Buffer; import org.apache.flink.testutils.junit.utils.TempDirUtils; import org.apache.flink.util.CloseableIterator; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.io.TempDir; import java.io.File; import java.nio.file.Path; import java.util.ArrayList; import java.util.List; import java.util.Random; import static org.apache.flink.core.memory.MemorySegmentFactory.wrap; import static org.apache.flink.runtime.io.network.api.serialization.SpillingAdaptiveSpanningRecordDeserializer.LENGTH_BYTES; import static org.assertj.core.api.Assertions.assertThat; /** {@link SpanningWrapper} test. */ class SpanningWrapperTest { private static final Random random = new Random(); @TempDir private Path folder; @Test void testLargeUnconsumedSegment() throws Exception { int recordLen = 100; int [MASK] = (int) (recordLen * .9); int spillingThreshold = (int) ( [MASK] * .9); byte[] record1 = recordBytes(recordLen); byte[] record2 = recordBytes(recordLen * 2); File canNotEecutableFile = TempDirUtils.newFolder(folder); canNotEecutableFile.setExecutable(false); // Always pick 'canNotEecutableFile' first as the Spilling Channel TmpDir. Thus trigger an // IOException. SpanningWrapper spanningWrapper = new SpanningWrapper( new String[] { TempDirUtils.newFolder(folder).getAbsolutePath(), canNotEecutableFile.getAbsolutePath() + File.separator + "pathdonotexit" }, spillingThreshold, recordLen); spanningWrapper.transferFrom(wrapNonSpanning(record1, [MASK] ), recordLen); spanningWrapper.addNextChunkFromMemorySegment( wrap(record1), [MASK] , recordLen - [MASK] + LENGTH_BYTES); spanningWrapper.addNextChunkFromMemorySegment(wrap(record2), 0, record2.length); CloseableIterator<Buffer> unconsumedSegment = spanningWrapper.getUnconsumedSegment(); spanningWrapper .getInputView() .readFully(new byte[recordLen], 0, recordLen); // read out from file spanningWrapper.transferLeftOverTo(new NonSpanningWrapper()); // clear any leftover spanningWrapper.transferFrom( wrapNonSpanning(recordBytes(recordLen), recordLen), recordLen); // overwrite with new data canNotEecutableFile.setExecutable(true); assertThat(concat(record1, record2)).isEqualTo(toByteArray(unconsumedSegment)); } private byte[] recordBytes(int recordLen) { byte[] inputData = randomBytes(recordLen + LENGTH_BYTES); for (int i = 0; i < Integer.BYTES; i++) { inputData[Integer.BYTES - i - 1] = (byte) (recordLen >>> i * 8); } return inputData; } private NonSpanningWrapper wrapNonSpanning(byte[] bytes, int len) { NonSpanningWrapper nonSpanningWrapper = new NonSpanningWrapper(); MemorySegment segment = wrap(bytes); nonSpanningWrapper.initializeFromMemorySegment(segment, 0, len); nonSpanningWrapper .readInt(); // emulate read length performed in getNextRecord to move position return nonSpanningWrapper; } private byte[] toByteArray(CloseableIterator<Buffer> unconsumed) { final List<Buffer> buffers = new ArrayList<>(); try { unconsumed.forEachRemaining(buffers::add); byte[] result = new byte[buffers.stream().mapToInt(Buffer::readableBytes).sum()]; int offset = 0; for (Buffer buffer : buffers) { int len = buffer.readableBytes(); buffer.getNioBuffer(0, len).get(result, offset, len); offset += len; } return result; } finally { buffers.forEach(Buffer::recycleBuffer); } } private byte[] randomBytes(int length) { byte[] inputData = new byte[length]; random.nextBytes(inputData); return inputData; } private byte[] concat(byte[] input1, byte[] input2) { byte[] expected = new byte[input1.length + input2.length]; System.arraycopy(input1, 0, expected, 0, input1.length); System.arraycopy(input2, 0, expected, input1.length, input2.length); return expected; } }
firstChunk
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.qbao.cat.plugin.common; import java.util. [MASK] ; import org.aspectj.lang.ProceedingJoinPoint; import org.aspectj.lang.annotation.Around; import org.aspectj.lang.annotation.Aspect; import org.aspectj.lang.annotation.Pointcut; import com.dianping.cat.Cat; import com.dianping.cat.message.Transaction; import com.qbao.cat.plugin.DefaultPluginTemplate; @Aspect public abstract class CommonPluginTemplate extends DefaultPluginTemplate { @Override @Pointcut public void scope() {} @Override @Around(POINTCUT_NAME) public Object doAround(ProceedingJoinPoint pjp) throws Throwable { return super.doAround(pjp); } @Override protected Transaction beginLog(ProceedingJoinPoint pjp) { StringBuilder type = new StringBuilder(); String packageStr = pjp.getSignature().getDeclaringType().getPackage().getName(); [MASK] st = new [MASK] (packageStr, "."); for(int i=0;i<2;i++){ type.append(st.nextToken()); type.append("."); } type.append("Method"); Transaction transaction = Cat.newTransaction(type.toString(),pjp.getSignature().toString()); return transaction; } @Override protected void endLog(Transaction transaction, Object retVal, Object... params) {} }
StringTokenizer
package com.baeldung.bufferedreader; import org.junit.Test; import java.io.BufferedReader; import java.io.IOException; import java.io. [MASK] ; import java.nio.file.Files; import java.nio.file.Paths; import static org.junit.Assert.*; public class BufferedReaderUnitTest { private static final String FILE_PATH = "src/main/resources/input.txt"; @Test public void givenBufferedReader_whenSkipUnderscores_thenOk() throws IOException { StringBuilder result = new StringBuilder(); try (BufferedReader reader = new BufferedReader(new [MASK] ("1__2__3__4__5"))) { int value; while((value = reader.read()) != -1) { result.append((char) value); reader.skip(2L); } } assertEquals("12345", result.toString()); } @Test public void givenBufferedReader_whenSkipsWhitespacesAtBeginning_thenOk() throws IOException { String result; try (BufferedReader reader = new BufferedReader(new [MASK] (" Lorem ipsum dolor sit amet."))) { do { reader.mark(1); } while(Character.isWhitespace(reader.read())); reader.reset(); result = reader.readLine(); } assertEquals("Lorem ipsum dolor sit amet.", result); } @Test public void whenCreatesNewBufferedReader_thenOk() throws IOException { try(BufferedReader reader = Files.newBufferedReader(Paths.get(FILE_PATH))) { assertNotNull(reader); assertTrue(reader.ready()); } } }
StringReader
/* * Copyright (c) 2014, 2024, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package stream; import static jaxp.library.JAXPTestUtilities.USER_DIR; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.InputStream; import java.io.OutputStream; import javax.xml.stream.XMLInputFactory; import javax.xml.stream.XMLOutputFactory; import javax.xml.stream. [MASK] ; import javax.xml.stream.XMLStreamWriter; import org.testng.Assert; import org.testng.annotations.Test; /* * @test * @bug 6688002 * @library /javax/xml/jaxp/libs /javax/xml/jaxp/unittest * @run testng/othervm stream.Bug6688002Test * @summary Test single instance of XMLOutputFactory/XMLInputFactory create multiple Writer/Readers in parallel. */ public class Bug6688002Test { private static final XMLOutputFactory outputFactory = XMLOutputFactory.newInstance(); private static final XMLInputFactory inputFactory = XMLInputFactory.newInstance(); private static final int NO_THREADS = 3; @Test public void testMultiThread() throws Exception { Thread[] threads = new Thread[NO_THREADS]; for (int i = 0; i < NO_THREADS; i++) { threads[i] = new Thread(new MyRunnable(i)); } for (int i = 0; i < NO_THREADS; i++) { threads[i].start(); } for (int i = 0; i < NO_THREADS; i++) { threads[i].join(); } } public class MyRunnable implements Runnable { final String no; MyRunnable(int no) { this.no = String.valueOf(no); } public void run() { try { FileOutputStream fos = new FileOutputStream(USER_DIR + no); XMLStreamWriter w = getWriter(fos); // System.out.println("Writer="+w+" Thread="+Thread.currentThread()); w.writeStartDocument(); w.writeStartElement("hello"); for (int j = 0; j < 50; j++) { w.writeStartElement("a" + j); w.writeEndElement(); } w.writeEndElement(); w.writeEndDocument(); w.close(); fos.close(); FileInputStream fis = new FileInputStream(USER_DIR + no); [MASK] r = getReader(fis); while (r.hasNext()) { r.next(); } r.close(); fis.close(); } catch (Exception e) { Assert.fail(e.getMessage()); } } } public static/* synchronized */ [MASK] getReader(InputStream is) throws Exception { return inputFactory.create [MASK] (is); // return [MASK] Factory.create(null, is, true); } public static/* synchronized */XMLStreamWriter getWriter(OutputStream os) throws Exception { return outputFactory.createXMLStreamWriter(os); // return XMLStreamWriterFactory.createXMLStreamWriter(os); } }
XMLStreamReader
// Copyright 2015 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.skyframe; import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertWithMessage; import com.google.common.base.Preconditions; import com.google.common.base.Suppliers; import com.google.common.collect.Collections2; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.ArtifactRoot; import com.google.devtools.build.lib.actions.FilesetOutputSymlink; import com.google.devtools.build.lib.actions.FilesetTraversalParams; import com.google.devtools.build.lib.actions.FilesetTraversalParamsFactory; import com.google.devtools.build.lib.actions.util.ActionsTestUtil; import com.google.devtools.build.lib.analysis.BlazeDirectories; import com.google.devtools.build.lib.analysis.ServerDirectories; import com.google.devtools.build.lib.clock.BlazeClock; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.events.NullEventHandler; import com.google.devtools.build.lib.pkgcache.PathPackageLocator; import com.google.devtools.build.lib.skyframe.ExternalFilesHelper.ExternalFileAction; import com.google.devtools.build.lib.skyframe.PackageLookupFunction.CrossRepositoryLabelViolationStrategy; import com.google.devtools.build.lib.testutil.FoundationTestCase; import com.google.devtools.build.lib.testutil.TestConstants; import com.google.devtools.build.lib.util.Fingerprint; import com.google.devtools.build.lib.util.io.TimestampGranularityMonitor; import com.google.devtools.build.lib.vfs.FileStateKey; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.build.lib.vfs.Root; import com.google.devtools.build.lib.vfs.RootedPath; import com.google.devtools.build.lib.vfs.SyscallCache; import com.google.devtools.build.skyframe.EvaluationContext; import com.google.devtools.build.skyframe.EvaluationResult; import com.google.devtools.build.skyframe.InMemoryMemoizingEvaluator; import com.google.devtools.build.skyframe.MemoizingEvaluator; import com.google.devtools.build.skyframe.RecordingDifferencer; import com.google.devtools.build.skyframe.SequencedRecordingDifferencer; import com.google.devtools.build.skyframe.SkyFunction; import com.google.devtools.build.skyframe.SkyFunctionException; import com.google.devtools.build.skyframe.SkyFunctionName; import com.google.devtools.build.skyframe.SkyKey; import com.google.devtools.build.skyframe.SkyValue; import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.concurrent.atomic.AtomicReference; import javax.annotation.Nullable; import net.starlark.java.eval.StarlarkSemantics; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Tests for {@link FilesetEntryFunction}. */ @RunWith(JUnit4.class) public final class FilesetEntryFunctionTest extends FoundationTestCase { private MemoizingEvaluator evaluator; private RecordingDifferencer differencer; @Before public void setUp() throws Exception { AtomicReference<PathPackageLocator> pkgLocator = new AtomicReference<>( new PathPackageLocator( outputBase, ImmutableList.of(Root.fromPath(rootDirectory)), BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY)); BlazeDirectories directories = new BlazeDirectories( new ServerDirectories(outputBase, outputBase, outputBase), rootDirectory, /* defaultSystemJavabase= */ null, TestConstants.PRODUCT_NAME); ExternalFilesHelper externalFilesHelper = ExternalFilesHelper.createForTesting( pkgLocator, ExternalFileAction.DEPEND_ON_EXTERNAL_PKG_FOR_EXTERNAL_REPO_PATHS, directories); Map<SkyFunctionName, SkyFunction> skyFunctions = new HashMap<>(); skyFunctions.put( FileStateKey.FILE_STATE, new FileStateFunction( Suppliers.ofInstance(new TimestampGranularityMonitor(BlazeClock.instance())), SyscallCache.NO_CACHE, externalFilesHelper)); skyFunctions.put(SkyFunctions.FILE, new FileFunction(pkgLocator, directories)); skyFunctions.put(SkyFunctions.DIRECTORY_LISTING, new DirectoryListingFunction()); skyFunctions.put( SkyFunctions.DIRECTORY_LISTING_STATE, new DirectoryListingStateFunction(externalFilesHelper, SyscallCache.NO_CACHE)); skyFunctions.put( SkyFunctions.RECURSIVE_FILESYSTEM_TRAVERSAL, new RecursiveFilesystemTraversalFunction(SyscallCache.NO_CACHE)); skyFunctions.put( SkyFunctions.PACKAGE_LOOKUP, new PackageLookupFunction( new AtomicReference<>(ImmutableSet.of()), CrossRepositoryLabelViolationStrategy.ERROR, BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY, BazelSkyframeExecutorConstants.EXTERNAL_PACKAGE_HELPER)); skyFunctions.put(SkyFunctions.IGNORED_SUBDIRECTORIES, IgnoredSubdirectoriesFunction.NOOP); skyFunctions.put( SkyFunctions.FILESET_ENTRY, new FilesetEntryFunction((unused) -> rootDirectory)); skyFunctions.put(SkyFunctions.WORKSPACE_NAME, new TestWorkspaceNameFunction()); skyFunctions.put( SkyFunctions.LOCAL_REPOSITORY_LOOKUP, new LocalRepositoryLookupFunction(BazelSkyframeExecutorConstants.EXTERNAL_PACKAGE_HELPER)); differencer = new SequencedRecordingDifferencer(); evaluator = new InMemoryMemoizingEvaluator(skyFunctions, differencer); PrecomputedValue.BUILD_ID.set(differencer, UUID.randomUUID()); PrecomputedValue.PATH_PACKAGE_LOCATOR.set(differencer, pkgLocator.get()); PrecomputedValue.STARLARK_SEMANTICS.set(differencer, StarlarkSemantics.DEFAULT); } private Artifact getSourceArtifact(String path) { return ActionsTestUtil.createArtifact( ArtifactRoot.asSourceRoot(Root.fromPath(rootDirectory)), path); } private Artifact createSourceArtifact(String path) throws Exception { Artifact result = getSourceArtifact(path); createFile(result, "foo"); return result; } private static RootedPath childOf(Artifact artifact, String relative) { return RootedPath.toRootedPath( artifact.getRoot().getRoot(), artifact.getRootRelativePath().getRelative(relative)); } private static RootedPath siblingOf(Artifact artifact, String relative) { PathFragment parent = Preconditions.checkNotNull(artifact.getRootRelativePath().getParentDirectory()); return RootedPath.toRootedPath(artifact.getRoot().getRoot(), parent.getRelative(relative)); } private void createFile(Path path, String... contents) throws Exception { if (!path.getParentDirectory().exists()) { scratch.dir(path.getParentDirectory().getPathString()); } scratch.file(path.getPathString(), contents); } private void createFile(Artifact artifact, String... contents) throws Exception { createFile(artifact.getPath(), contents); } private RootedPath createFile(RootedPath path, String... contents) throws Exception { createFile(path.asPath(), contents); return path; } private <T extends SkyValue> EvaluationResult<T> eval(SkyKey key) throws Exception { EvaluationContext evaluationContext = EvaluationContext.newBuilder() .setKeepGoing(false) .setParallelism(SkyframeExecutor.DEFAULT_THREAD_COUNT) .setEventHandler(NullEventHandler.INSTANCE) .build(); return evaluator.evaluate(ImmutableList.of(key), evaluationContext); } private FilesetEntryValue evalFilesetTraversal(FilesetTraversalParams params) throws Exception { SkyKey key = FilesetEntryKey.key(params); EvaluationResult<FilesetEntryValue> result = eval(key); assertThat(result.hasError()).isFalse(); return result.get(key); } private FilesetOutputSymlink symlink(String from, Artifact to) { return symlink(PathFragment.create(from), to.getPath().asFragment()); } private FilesetOutputSymlink symlink(String from, RootedPath to) { return symlink(PathFragment.create(from), to.asPath().asFragment()); } private FilesetOutputSymlink symlink(PathFragment from, PathFragment to) { return FilesetOutputSymlink.createForTesting(from, to, rootDirectory.asFragment()); } private void assertSymlinksCreatedInOrder( FilesetTraversalParams request, FilesetOutputSymlink... expectedSymlinks) throws Exception { Collection<FilesetOutputSymlink> actual = Collections2.transform( evalFilesetTraversal(request).getSymlinks(), // Strip the metadata from the actual results. (input) -> FilesetOutputSymlink.createAlreadyRelativizedForTesting( input.name(), input.targetPath(), input.relativeToExecRoot())); assertThat(actual).containsExactlyElementsIn(expectedSymlinks).inOrder(); } private static Label label(String label) throws Exception { return Label.parseCanonical(label); } @Test public void testFileTraversalForFile() throws Exception { Artifact file = createSourceArtifact("foo/file.real"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ file, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params, symlink("output-name", file)); } @Test public void testFileTraversalForDirectory() throws Exception { Artifact dir = getSourceArtifact("foo/dir_real"); RootedPath fileA = createFile(childOf(dir, "file.a"), "hello"); RootedPath fileB = createFile(childOf(dir, "sub/file.b"), "world"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ dir, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ true); assertSymlinksCreatedInOrder( params, symlink("output-name/file.a", fileA), symlink("output-name/sub/file.b", fileB)); } @Test public void testFileTraversalForDisallowedDirectoryThrows() throws Exception { Artifact dir = getSourceArtifact("foo/dir_real"); createFile(childOf(dir, "file.a"), "hello"); createFile(childOf(dir, "sub/file.b"), "world"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ dir, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); SkyKey key = FilesetEntryKey.key(params); EvaluationResult<FilesetEntryValue> result = eval(key); assertThat(result.hasError()).isTrue(); assertThat(result.getError(key).getException()) .hasMessageThat() .contains("foo contains a directory"); } @Test public void testFileTraversalForDanglingSymlink() throws Exception { Artifact linkName = getSourceArtifact("foo/dangling.sym"); RootedPath linkTarget = createFile(siblingOf(linkName, "target.file"), "blah"); linkName.getPath().createSymbolicLink(PathFragment.create("target.file")); linkTarget.asPath().delete(); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ linkName, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params); // expect empty results } @Test public void testFileTraversalForNonExistentFile() throws Exception { Artifact path = getSourceArtifact("foo/non-existent"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ path, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params); // expect empty results } /** * Tests that the fingerprint is a function of all arguments of the factory method. * * <p>Implementations must provide: * <ul> * <li>two different values (a domain) for each argument of the factory method and whether or not * it is expected to influence the fingerprint * <li>a way to instantiate {@link FilesetTraversalParams} with a given set of arguments from the * specified domains * </ul> * * <p>The tests will instantiate pairs of {@link FilesetTraversalParams} objects with only a given * attribute differing, and observe whether the fingerprints differ (if they are expected to) or * are the same (otherwise). */ private abstract static class FingerprintTester { private final Map<String, Domain> domains; FingerprintTester(Map<String, Domain> domains) { this.domains = domains; } abstract FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception; private Map<String, ?> getDefaultArgs() { return getKwArgs(null); } private Map<String, ?> getKwArgs(@Nullable String useAlternateFor) { Map<String, Object> values = new HashMap<>(); for (Map.Entry<String, Domain> d : domains.entrySet()) { values.put( d.getKey(), d.getKey().equals(useAlternateFor) ? d.getValue().valueA : d.getValue().valueB); } return values; } public void doTest() throws Exception { Fingerprint fp = new Fingerprint(); create(getDefaultArgs()).fingerprint(fp); String primary = fp.hexDigestAndReset(); for (String argName : domains.keySet()) { create(getKwArgs(argName)).fingerprint(fp); String secondary = fp.hexDigestAndReset(); if (domains.get(argName).includedInFingerprint) { assertWithMessage( "Argument '" + argName + "' was expected to be included in the" + " fingerprint, but wasn't") .that(primary) .isNotEqualTo(secondary); } else { assertWithMessage( "Argument '" + argName + "' was expected not to be included in the" + " fingerprint, but was") .that(primary) .isEqualTo(secondary); } } } } private static final class Domain { boolean includedInFingerprint; Object valueA; Object valueB; Domain(boolean includedInFingerprint, Object valueA, Object valueB) { this.includedInFingerprint = includedInFingerprint; this.valueA = valueA; this.valueB = valueB; } } private static Domain partOfFingerprint(Object valueA, Object valueB) { return new Domain(true, valueA, valueB); } private static Domain [MASK] (Object valueB) { return new Domain(false, "//foo", valueB); } @Test public void testFingerprintOfFileTraversal() throws Exception { new FingerprintTester( ImmutableMap.<String, Domain>builder() .put("ownerLabel", [MASK] ("//bar")) .put("fileToTraverse", partOfFingerprint("foo/file.a", "bar/file.b")) .put("destPath", partOfFingerprint("out1", "out2")) .put("strictFilesetOutput", partOfFingerprint(true, false)) .put("permitDirectories", partOfFingerprint(true, false)) .buildOrThrow()) { @Override FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception { return FilesetTraversalParamsFactory.fileTraversal( label((String) kwArgs.get("ownerLabel")), getSourceArtifact((String) kwArgs.get("fileToTraverse")), PathFragment.create((String) kwArgs.get("destPath")), (Boolean) kwArgs.get("strictFilesetOutput"), (Boolean) kwArgs.get("permitDirectories")); } }.doTest(); } @Test public void testFingerprintOfNestedTraversal() throws Exception { Artifact nested1 = getSourceArtifact("a/b"); Artifact nested2 = getSourceArtifact("a/c"); new FingerprintTester( ImmutableMap.of( "ownerLabel", [MASK] ("//bar"), "nestedArtifact", partOfFingerprint(nested1, nested2), "destDir", partOfFingerprint("out1", "out2"), "excludes", partOfFingerprint(ImmutableSet.<String>of(), ImmutableSet.of("x")))) { @SuppressWarnings("unchecked") @Override FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception { return FilesetTraversalParamsFactory.nestedTraversal( label((String) kwArgs.get("ownerLabel")), (Artifact) kwArgs.get("nestedArtifact"), PathFragment.create((String) kwArgs.get("destDir")), (Set<String>) kwArgs.get("excludes")); } }.doTest(); } private static class TestWorkspaceNameFunction implements SkyFunction { @Nullable @Override public SkyValue compute(SkyKey skyKey, Environment env) throws SkyFunctionException, InterruptedException { return WorkspaceNameValue.withName("workspace"); } } }
notPartOfFingerprint
package com.taobao.arthas.grpcweb.grpc.server.httpServer; import io.netty.channel.ChannelInitializer; import io.netty.channel.ChannelPipeline; import io.netty.channel.socket.SocketChannel; import io.netty.handler.codec.http.HttpObjectAggregator; import io.netty.handler.codec.http.HttpServerCodec; import io.netty.handler.stream.ChunkedWriteHandler; public class NettyHttpInitializer extends ChannelInitializer<SocketChannel> { private final String STATIC_LOCATION; public NettyHttpInitializer(String staticLocation) { this.STATIC_LOCATION = staticLocation; } @Override public void initChannel(SocketChannel ch) throws Exception { ChannelPipeline [MASK] = ch. [MASK] (); //将请求和应答消息编码或解码为HTTP消息 [MASK] .addLast(new HttpServerCodec()); //将HTTP消息的多个部分组合成一条完整的HTTP消息 [MASK] .addLast(new HttpObjectAggregator(64 * 1024)); [MASK] .addLast(new ChunkedWriteHandler()); [MASK] .addLast(new NettyHttpStaticFileHandler(this.STATIC_LOCATION)); } }
pipeline
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.io.network.api.serialization; import org.apache.flink.core.memory.MemorySegment; import org.apache.flink.runtime.io.network.buffer.Buffer; import org.apache.flink.testutils.junit.utils. [MASK] ; import org.apache.flink.util.CloseableIterator; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.io.TempDir; import java.io.File; import java.nio.file.Path; import java.util.ArrayList; import java.util.List; import java.util.Random; import static org.apache.flink.core.memory.MemorySegmentFactory.wrap; import static org.apache.flink.runtime.io.network.api.serialization.SpillingAdaptiveSpanningRecordDeserializer.LENGTH_BYTES; import static org.assertj.core.api.Assertions.assertThat; /** {@link SpanningWrapper} test. */ class SpanningWrapperTest { private static final Random random = new Random(); @TempDir private Path folder; @Test void testLargeUnconsumedSegment() throws Exception { int recordLen = 100; int firstChunk = (int) (recordLen * .9); int spillingThreshold = (int) (firstChunk * .9); byte[] record1 = recordBytes(recordLen); byte[] record2 = recordBytes(recordLen * 2); File canNotEecutableFile = [MASK] .newFolder(folder); canNotEecutableFile.setExecutable(false); // Always pick 'canNotEecutableFile' first as the Spilling Channel TmpDir. Thus trigger an // IOException. SpanningWrapper spanningWrapper = new SpanningWrapper( new String[] { [MASK] .newFolder(folder).getAbsolutePath(), canNotEecutableFile.getAbsolutePath() + File.separator + "pathdonotexit" }, spillingThreshold, recordLen); spanningWrapper.transferFrom(wrapNonSpanning(record1, firstChunk), recordLen); spanningWrapper.addNextChunkFromMemorySegment( wrap(record1), firstChunk, recordLen - firstChunk + LENGTH_BYTES); spanningWrapper.addNextChunkFromMemorySegment(wrap(record2), 0, record2.length); CloseableIterator<Buffer> unconsumedSegment = spanningWrapper.getUnconsumedSegment(); spanningWrapper .getInputView() .readFully(new byte[recordLen], 0, recordLen); // read out from file spanningWrapper.transferLeftOverTo(new NonSpanningWrapper()); // clear any leftover spanningWrapper.transferFrom( wrapNonSpanning(recordBytes(recordLen), recordLen), recordLen); // overwrite with new data canNotEecutableFile.setExecutable(true); assertThat(concat(record1, record2)).isEqualTo(toByteArray(unconsumedSegment)); } private byte[] recordBytes(int recordLen) { byte[] inputData = randomBytes(recordLen + LENGTH_BYTES); for (int i = 0; i < Integer.BYTES; i++) { inputData[Integer.BYTES - i - 1] = (byte) (recordLen >>> i * 8); } return inputData; } private NonSpanningWrapper wrapNonSpanning(byte[] bytes, int len) { NonSpanningWrapper nonSpanningWrapper = new NonSpanningWrapper(); MemorySegment segment = wrap(bytes); nonSpanningWrapper.initializeFromMemorySegment(segment, 0, len); nonSpanningWrapper .readInt(); // emulate read length performed in getNextRecord to move position return nonSpanningWrapper; } private byte[] toByteArray(CloseableIterator<Buffer> unconsumed) { final List<Buffer> buffers = new ArrayList<>(); try { unconsumed.forEachRemaining(buffers::add); byte[] result = new byte[buffers.stream().mapToInt(Buffer::readableBytes).sum()]; int offset = 0; for (Buffer buffer : buffers) { int len = buffer.readableBytes(); buffer.getNioBuffer(0, len).get(result, offset, len); offset += len; } return result; } finally { buffers.forEach(Buffer::recycleBuffer); } } private byte[] randomBytes(int length) { byte[] inputData = new byte[length]; random.nextBytes(inputData); return inputData; } private byte[] concat(byte[] input1, byte[] input2) { byte[] expected = new byte[input1.length + input2.length]; System.arraycopy(input1, 0, expected, 0, input1.length); System.arraycopy(input2, 0, expected, input1.length, input2.length); return expected; } }
TempDirUtils
/* ### * IP: GHIDRA * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ghidra.util.datastruct; import java.io.Serializable; import java.util.Arrays; /** * The BitTree class maintains a set of ordered keys between the values of * 0 and N. It can quickly (O(log(n))) add keys, remove keys, find the next key * greater than some value , and find the prev key less than some value. It can * determine if a key is in the set in O(1) time. This implementation has been * limited to short keys so that it can implement the ShortKeySet interface. */ public class BitTree implements ShortKeySet, Serializable { private final static long serialVersionUID = 1; private int size; // The maximum number of keys in the set. Keys range from 0 to size-1 private int power2; // The next power of 2 that is greater than size. private int[] bits; // Array of bits used to represent a tree of binary values. A bit at // position N will have a left child at 2*N and a right child at 2*N+1. // Its parent position will be at N/2. // A bit is on if any bits in its subtree are on. Leaf bits correspond // directly to keys and are on if the key is in the set. private int numKeys; // The current number of keys in the set. // masks for seting and clearing bits within an 32 bit integer. private static final int[] [MASK] = { 0x00000001,0x00000002,0x00000004,0x00000008, 0x00000010,0x00000020,0x00000040,0x00000080, 0x00000100,0x00000200,0x00000400,0x00000800, 0x00001000,0x00002000,0x00004000,0x00008000, 0x00010000,0x00020000,0x00040000,0x00080000, 0x00100000,0x00200000,0x00400000,0x00800000, 0x01000000,0x02000000,0x04000000,0x08000000, 0x10000000,0x20000000,0x40000000,0x80000000 }; private static final int[] clearMask={ 0xfffffffe,0xfffffffd,0xfffffffb,0xfffffff7, 0xffffffef,0xffffffdf,0xffffffbf,0xffffff7f, 0xfffffeff,0xfffffdff,0xfffffbff,0xfffff7ff, 0xffffefff,0xffffdfff,0xffffbfff,0xffff7fff, 0xfffeffff,0xfffdffff,0xfffbffff,0xfff7ffff, 0xffefffff,0xffdfffff,0xffbfffff,0xff7fffff, 0xfeffffff,0xfdffffff,0xfbffffff,0xf7ffffff, 0xefffffff,0xdfffffff,0xbfffffff,0x7fffffff }; /** * The BitTree constructor takes the maximum key value. The legal * keys for this set range from 0 to maxKey. * @param maxKey the maximum key that will ever be put into this BitTree. */ public BitTree(short maxKey) { this(maxKey,false); } /** * The BitTree constructor takes the maximum key value. The legal * keys for this set range from 0 to maxKey. * @param maxKey the maximum key value. * @param isFull if true, then the set is initilized to contain all legal keys. */ public BitTree(short maxKey, boolean isFull) { this.size = maxKey+1; // find the next power of 2 greater than or equal to n. power2 = 2; int sz = maxKey+1; while (sz > 1) { sz /= 2; power2 *= 2; } // The number of bits need to store the tree is 2 times the number of keys. // Since we are storing the bits in 32 bit integers we need N/16 integers // to store the bits. int nInts = power2/16; // must have at least 1 if (nInts < 1) { nInts = 1; } bits = new int[nInts]; if (isFull) { Arrays.fill(bits,0xffffffff); numKeys = this.size; } } /** * Removes all keys from the set. */ public void removeAll() { Arrays.fill(bits,0); numKeys = 0; } /** * Returns the number of keys currently in the set. */ public int size() { return numKeys; } /** * Adds a key to the set. * @param key to be added. * @exception IndexOutOfBoundsException if the given key is not * in the range [0, size-1]. */ public void put(short key) { if ((key < 0) || (key >= size)) { throw new IndexOutOfBoundsException(); } // The first "power2" number of bits are used for internal tree nodes. The // leaf nodes start at index "power2". int nodeIndex = power2+key; // set the leaf bit on to indicate that the key is in the set. // if the bit is already on (The key is already in the set), then just return. if (!setBit(nodeIndex)) { return; } // increment the number of keys in the set. numKeys++; // go up the tree setting each parent bit to "on" while(nodeIndex != 1) { // compute parent index. nodeIndex /= 2; // if any parent bit is already on, then all its parents are already on, // so were done. if (!setBit(nodeIndex)) { return; } } } /** * Removes the key from the set. * @param key The key to remove. * @exception IndexOutOfBoundsException if the given key is not * in the range [0, size-1]. */ public boolean remove(short key) { if ((key < 0) || (key >= size)) { throw new IndexOutOfBoundsException(); } // compute the leaf node index. int nodeIndex = power2+key; // clear the leaf bit to indicate that the key is not in the set. // if it is already "off", then we don't have to do anything if (!clearBit(nodeIndex)) { return false; } // decrement the number of keys in the set numKeys--; // traverse up the tree, clearing any parent nodes if all its child // nodes are "off". while(nodeIndex != 1) { nodeIndex /= 2; if (!isBitSet(nodeIndex)) { return true; } if (isBitSet(nodeIndex*2) || isBitSet(nodeIndex*2+1)) { return true; } clearBit(nodeIndex); } return true; } /** * Determines if a given key is in the set. * @param key the key to check if it is in this set. * @return true if the key is in the set. */ public boolean containsKey(short key) { if ((key < 0) || (key >= size)) { return false; } return isBitSet(power2+key); } /** * finds the next key that is in the set that is greater than the given key. * @param key from which to search forward. * @return the next key greater than the given key or -1 if there is no key * greater than the given key. * @exception IndexOutOfBoundsException if the given key is not * in the range [0, size-1]. */ public short getNext(short key) { if ((key < 0) || (key >= size)) { throw new IndexOutOfBoundsException(); } // compute leaf node. int nodeIndex = key + power2; // while we are not at the root, search upward until we find a right // sibling that is "on". while(nodeIndex != 1) { // see if we are odd (i.e. the right child) int odd = nodeIndex % 2; // if we are the left child see if my sibling on the right is on. // if so, then the next key must be in that subtree. if (odd == 0) { if (isBitSet(nodeIndex+1)) { // we found a right sibling that is "on", set nodeIndex to // that node. nodeIndex++; break; } } nodeIndex = nodeIndex/2; } // if we made it all the way up to the root node, then there is no key // greater than, so return -1; if (nodeIndex == 1) { return (short)-1; } // now that we found a right sibling that is "on", // follow the leftmost trail of "on" bits to an "on" leaf bit. This bit // represents the next key in the set. while (nodeIndex < power2) { nodeIndex *= 2; // if the left child is not on, then the right child must be "on". if (!isBitSet(nodeIndex)) { nodeIndex++; } } short nextKey = (short)(nodeIndex-power2); if (nextKey >= size) { nextKey = -1; } return nextKey; } /** * Finds the next key that is in the set that is less than the given key. * @param key the key to search before. * @return the next key less than the given key or -1 if there is no key * less than the given key. * @exception IndexOutOfBoundsException if the given key is not * in the range [0, size-1]. */ public short getPrevious(short key) { if ((key < 0) || (key >= size)) { throw new IndexOutOfBoundsException(); } // find the leaf node for the given key. int nodeIndex = key + power2; // while we are not at the root, search upward until we find a left // sibling that is "on". while(nodeIndex != 1) { // check if we are a right node. int odd = nodeIndex % 2; // if we are the right child see if my sibling on the left is "on". // if so, then the previous key must be in that subtree. if (odd == 1) { if (isBitSet(nodeIndex-1)) { nodeIndex--; break; } } nodeIndex = nodeIndex/2; } // If we went all the way to the root then there is no previous key, return -1. if (nodeIndex == 1) { return (short)-1; } // follow the rightmost trail of "on" bits to an "on" leaf bit. This bit // represents the previous key in the set. while (nodeIndex < power2) { nodeIndex *= 2; if (isBitSet(nodeIndex+1)) { nodeIndex++; } } return (short)(nodeIndex-power2); } /** * Checks if the set is empty. * @return true if the set is empty. */ public boolean isEmpty() { return numKeys == 0; } /** * Returns the first (lowest) key in the set. */ public short getFirst() { // if the 0 key is in the set, then return it. if(containsKey((short)0)) { return (short)0; } // otherwise return the next key after 0. return getNext((short)0); } /** * Returns the last (highest) key in the set. */ public short getLast() { // if the highest possible key is in the set, return it. if(containsKey((short)(size-1))) { return (short)(size-1); } // otherwise return the next lowest key. return getPrevious((short)(size-1)); } /** * Sets the nth bit on. */ private boolean setBit(int n) { int intIndex = n >> 5; int maskIndex = n & 0x1f; int old = bits[intIndex]; return ((bits[intIndex] |= [MASK] [maskIndex]) != old); } /** * Sets the nth bit to off. */ private boolean clearBit(int n) { int intIndex = n >> 5; int maskIndex = n & 0x1f; int old = bits[intIndex]; return ((bits[intIndex] &= clearMask[maskIndex]) != old); } /** * Tests if the nth bit is on. */ private boolean isBitSet(int n) { int intIndex = n >> 5; int maskIndex = n & 0x1f; return ((bits[intIndex] & [MASK] [maskIndex]) != 0); } }
setMask
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.qbao.cat.plugin.common; import java.util.StringTokenizer; import org.aspectj.lang.ProceedingJoinPoint; import org.aspectj.lang.annotation.Around; import org.aspectj.lang.annotation.Aspect; import org.aspectj.lang.annotation.Pointcut; import com.dianping.cat.Cat; import com.dianping.cat.message.Transaction; import com.qbao.cat.plugin.DefaultPluginTemplate; @Aspect public abstract class CommonPluginTemplate extends DefaultPluginTemplate { @ [MASK] @Pointcut public void scope() {} @ [MASK] @Around(POINTCUT_NAME) public Object doAround(ProceedingJoinPoint pjp) throws Throwable { return super.doAround(pjp); } @ [MASK] protected Transaction beginLog(ProceedingJoinPoint pjp) { StringBuilder type = new StringBuilder(); String packageStr = pjp.getSignature().getDeclaringType().getPackage().getName(); StringTokenizer st = new StringTokenizer(packageStr, "."); for(int i=0;i<2;i++){ type.append(st.nextToken()); type.append("."); } type.append("Method"); Transaction transaction = Cat.newTransaction(type.toString(),pjp.getSignature().toString()); return transaction; } @ [MASK] protected void endLog(Transaction transaction, Object retVal, Object... params) {} }
Override
/* * Copyright (c) 2014, 2024, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package stream; import static jaxp.library.JAXPTestUtilities.USER_DIR; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.InputStream; import java.io.OutputStream; import javax.xml.stream.XMLInputFactory; import javax.xml.stream. [MASK] ; import javax.xml.stream.XMLStreamReader; import javax.xml.stream.XMLStreamWriter; import org.testng.Assert; import org.testng.annotations.Test; /* * @test * @bug 6688002 * @library /javax/xml/jaxp/libs /javax/xml/jaxp/unittest * @run testng/othervm stream.Bug6688002Test * @summary Test single instance of [MASK] /XMLInputFactory create multiple Writer/Readers in parallel. */ public class Bug6688002Test { private static final [MASK] outputFactory = [MASK] .newInstance(); private static final XMLInputFactory inputFactory = XMLInputFactory.newInstance(); private static final int NO_THREADS = 3; @Test public void testMultiThread() throws Exception { Thread[] threads = new Thread[NO_THREADS]; for (int i = 0; i < NO_THREADS; i++) { threads[i] = new Thread(new MyRunnable(i)); } for (int i = 0; i < NO_THREADS; i++) { threads[i].start(); } for (int i = 0; i < NO_THREADS; i++) { threads[i].join(); } } public class MyRunnable implements Runnable { final String no; MyRunnable(int no) { this.no = String.valueOf(no); } public void run() { try { FileOutputStream fos = new FileOutputStream(USER_DIR + no); XMLStreamWriter w = getWriter(fos); // System.out.println("Writer="+w+" Thread="+Thread.currentThread()); w.writeStartDocument(); w.writeStartElement("hello"); for (int j = 0; j < 50; j++) { w.writeStartElement("a" + j); w.writeEndElement(); } w.writeEndElement(); w.writeEndDocument(); w.close(); fos.close(); FileInputStream fis = new FileInputStream(USER_DIR + no); XMLStreamReader r = getReader(fis); while (r.hasNext()) { r.next(); } r.close(); fis.close(); } catch (Exception e) { Assert.fail(e.getMessage()); } } } public static/* synchronized */XMLStreamReader getReader(InputStream is) throws Exception { return inputFactory.createXMLStreamReader(is); // return XMLStreamReaderFactory.create(null, is, true); } public static/* synchronized */XMLStreamWriter getWriter(OutputStream os) throws Exception { return outputFactory.createXMLStreamWriter(os); // return XMLStreamWriterFactory.createXMLStreamWriter(os); } }
XMLOutputFactory
/* * Copyright 2002-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.beans.factory.xml; import org.jspecify.annotations.Nullable; import org.w3c.dom.Attr; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.springframework.beans.MutablePropertyValues; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.beans.factory.config.BeanDefinitionHolder; import org.springframework.beans.factory.config.RuntimeBeanReference; import org.springframework.core.Conventions; /** * Simple {@code NamespaceHandler} implementation that maps custom attributes * directly through to bean properties. An important point to note is that this * {@code NamespaceHandler} does not have a corresponding schema since there * is no way to know in advance all possible attribute names. * * <p>An example of the usage of this {@code NamespaceHandler} is shown below: * * <pre class="code"> * &lt;bean id=&quot;rob&quot; class=&quot;..TestBean&quot; p:name=&quot;Rob Harrop&quot; p:spouse-ref=&quot;sally&quot;/&gt;</pre> * * Here the '{@code p:name}' corresponds directly to the '{@code name}' * property on class '{@code TestBean}'. The '{@code p:spouse-ref}' * attributes corresponds to the '{@code spouse}' property and, rather * than being the concrete value, it contains the name of the bean that will * be injected into that property. * * @author Rob Harrop * @author Juergen Hoeller * @since 2.0 */ public class SimplePropertyNamespaceHandler implements NamespaceHandler { private static final String REF_SUFFIX = "-ref"; @ [MASK] public void init() { } @ [MASK] public @Nullable BeanDefinition parse(Element element, ParserContext parserContext) { parserContext.getReaderContext().error( "Class [" + getClass().getName() + "] does not support custom elements.", element); return null; } @ [MASK] public BeanDefinitionHolder decorate(Node node, BeanDefinitionHolder definition, ParserContext parserContext) { if (node instanceof Attr attr) { String propertyName = parserContext.getDelegate().getLocalName(attr); String propertyValue = attr.getValue(); MutablePropertyValues pvs = definition.getBeanDefinition().getPropertyValues(); if (pvs.contains(propertyName)) { parserContext.getReaderContext().error("Property '" + propertyName + "' is already defined using " + "both <property> and inline syntax. Only one approach may be used per property.", attr); } if (propertyName.endsWith(REF_SUFFIX)) { propertyName = propertyName.substring(0, propertyName.length() - REF_SUFFIX.length()); pvs.add(Conventions.attributeNameToPropertyName(propertyName), new RuntimeBeanReference(propertyValue)); } else { pvs.add(Conventions.attributeNameToPropertyName(propertyName), propertyValue); } } return definition; } }
Override
/* ### * IP: GHIDRA * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ghidra.util.datastruct; import java.io.Serializable; import java.util.Arrays; /** * The BitTree class maintains a set of ordered keys between the values of * 0 and N. It can quickly (O(log(n))) add keys, remove keys, find the next key * greater than some value , and find the prev key less than some value. It can * determine if a key is in the set in O(1) time. This implementation has been * limited to short keys so that it can implement the ShortKeySet interface. */ public class BitTree implements ShortKeySet, Serializable { private final static long serialVersionUID = 1; private int size; // The maximum number of keys in the set. Keys range from 0 to size-1 private int power2; // The next power of 2 that is greater than size. private int[] bits; // Array of bits used to represent a tree of binary values. A bit at // position N will have a left child at 2*N and a right child at 2*N+1. // Its parent position will be at N/2. // A bit is on if any bits in its subtree are on. Leaf bits correspond // directly to keys and are on if the key is in the set. private int [MASK] ; // The current number of keys in the set. // masks for seting and clearing bits within an 32 bit integer. private static final int[] setMask = { 0x00000001,0x00000002,0x00000004,0x00000008, 0x00000010,0x00000020,0x00000040,0x00000080, 0x00000100,0x00000200,0x00000400,0x00000800, 0x00001000,0x00002000,0x00004000,0x00008000, 0x00010000,0x00020000,0x00040000,0x00080000, 0x00100000,0x00200000,0x00400000,0x00800000, 0x01000000,0x02000000,0x04000000,0x08000000, 0x10000000,0x20000000,0x40000000,0x80000000 }; private static final int[] clearMask={ 0xfffffffe,0xfffffffd,0xfffffffb,0xfffffff7, 0xffffffef,0xffffffdf,0xffffffbf,0xffffff7f, 0xfffffeff,0xfffffdff,0xfffffbff,0xfffff7ff, 0xffffefff,0xffffdfff,0xffffbfff,0xffff7fff, 0xfffeffff,0xfffdffff,0xfffbffff,0xfff7ffff, 0xffefffff,0xffdfffff,0xffbfffff,0xff7fffff, 0xfeffffff,0xfdffffff,0xfbffffff,0xf7ffffff, 0xefffffff,0xdfffffff,0xbfffffff,0x7fffffff }; /** * The BitTree constructor takes the maximum key value. The legal * keys for this set range from 0 to maxKey. * @param maxKey the maximum key that will ever be put into this BitTree. */ public BitTree(short maxKey) { this(maxKey,false); } /** * The BitTree constructor takes the maximum key value. The legal * keys for this set range from 0 to maxKey. * @param maxKey the maximum key value. * @param isFull if true, then the set is initilized to contain all legal keys. */ public BitTree(short maxKey, boolean isFull) { this.size = maxKey+1; // find the next power of 2 greater than or equal to n. power2 = 2; int sz = maxKey+1; while (sz > 1) { sz /= 2; power2 *= 2; } // The number of bits need to store the tree is 2 times the number of keys. // Since we are storing the bits in 32 bit integers we need N/16 integers // to store the bits. int nInts = power2/16; // must have at least 1 if (nInts < 1) { nInts = 1; } bits = new int[nInts]; if (isFull) { Arrays.fill(bits,0xffffffff); [MASK] = this.size; } } /** * Removes all keys from the set. */ public void removeAll() { Arrays.fill(bits,0); [MASK] = 0; } /** * Returns the number of keys currently in the set. */ public int size() { return [MASK] ; } /** * Adds a key to the set. * @param key to be added. * @exception IndexOutOfBoundsException if the given key is not * in the range [0, size-1]. */ public void put(short key) { if ((key < 0) || (key >= size)) { throw new IndexOutOfBoundsException(); } // The first "power2" number of bits are used for internal tree nodes. The // leaf nodes start at index "power2". int nodeIndex = power2+key; // set the leaf bit on to indicate that the key is in the set. // if the bit is already on (The key is already in the set), then just return. if (!setBit(nodeIndex)) { return; } // increment the number of keys in the set. [MASK] ++; // go up the tree setting each parent bit to "on" while(nodeIndex != 1) { // compute parent index. nodeIndex /= 2; // if any parent bit is already on, then all its parents are already on, // so were done. if (!setBit(nodeIndex)) { return; } } } /** * Removes the key from the set. * @param key The key to remove. * @exception IndexOutOfBoundsException if the given key is not * in the range [0, size-1]. */ public boolean remove(short key) { if ((key < 0) || (key >= size)) { throw new IndexOutOfBoundsException(); } // compute the leaf node index. int nodeIndex = power2+key; // clear the leaf bit to indicate that the key is not in the set. // if it is already "off", then we don't have to do anything if (!clearBit(nodeIndex)) { return false; } // decrement the number of keys in the set [MASK] --; // traverse up the tree, clearing any parent nodes if all its child // nodes are "off". while(nodeIndex != 1) { nodeIndex /= 2; if (!isBitSet(nodeIndex)) { return true; } if (isBitSet(nodeIndex*2) || isBitSet(nodeIndex*2+1)) { return true; } clearBit(nodeIndex); } return true; } /** * Determines if a given key is in the set. * @param key the key to check if it is in this set. * @return true if the key is in the set. */ public boolean containsKey(short key) { if ((key < 0) || (key >= size)) { return false; } return isBitSet(power2+key); } /** * finds the next key that is in the set that is greater than the given key. * @param key from which to search forward. * @return the next key greater than the given key or -1 if there is no key * greater than the given key. * @exception IndexOutOfBoundsException if the given key is not * in the range [0, size-1]. */ public short getNext(short key) { if ((key < 0) || (key >= size)) { throw new IndexOutOfBoundsException(); } // compute leaf node. int nodeIndex = key + power2; // while we are not at the root, search upward until we find a right // sibling that is "on". while(nodeIndex != 1) { // see if we are odd (i.e. the right child) int odd = nodeIndex % 2; // if we are the left child see if my sibling on the right is on. // if so, then the next key must be in that subtree. if (odd == 0) { if (isBitSet(nodeIndex+1)) { // we found a right sibling that is "on", set nodeIndex to // that node. nodeIndex++; break; } } nodeIndex = nodeIndex/2; } // if we made it all the way up to the root node, then there is no key // greater than, so return -1; if (nodeIndex == 1) { return (short)-1; } // now that we found a right sibling that is "on", // follow the leftmost trail of "on" bits to an "on" leaf bit. This bit // represents the next key in the set. while (nodeIndex < power2) { nodeIndex *= 2; // if the left child is not on, then the right child must be "on". if (!isBitSet(nodeIndex)) { nodeIndex++; } } short nextKey = (short)(nodeIndex-power2); if (nextKey >= size) { nextKey = -1; } return nextKey; } /** * Finds the next key that is in the set that is less than the given key. * @param key the key to search before. * @return the next key less than the given key or -1 if there is no key * less than the given key. * @exception IndexOutOfBoundsException if the given key is not * in the range [0, size-1]. */ public short getPrevious(short key) { if ((key < 0) || (key >= size)) { throw new IndexOutOfBoundsException(); } // find the leaf node for the given key. int nodeIndex = key + power2; // while we are not at the root, search upward until we find a left // sibling that is "on". while(nodeIndex != 1) { // check if we are a right node. int odd = nodeIndex % 2; // if we are the right child see if my sibling on the left is "on". // if so, then the previous key must be in that subtree. if (odd == 1) { if (isBitSet(nodeIndex-1)) { nodeIndex--; break; } } nodeIndex = nodeIndex/2; } // If we went all the way to the root then there is no previous key, return -1. if (nodeIndex == 1) { return (short)-1; } // follow the rightmost trail of "on" bits to an "on" leaf bit. This bit // represents the previous key in the set. while (nodeIndex < power2) { nodeIndex *= 2; if (isBitSet(nodeIndex+1)) { nodeIndex++; } } return (short)(nodeIndex-power2); } /** * Checks if the set is empty. * @return true if the set is empty. */ public boolean isEmpty() { return [MASK] == 0; } /** * Returns the first (lowest) key in the set. */ public short getFirst() { // if the 0 key is in the set, then return it. if(containsKey((short)0)) { return (short)0; } // otherwise return the next key after 0. return getNext((short)0); } /** * Returns the last (highest) key in the set. */ public short getLast() { // if the highest possible key is in the set, return it. if(containsKey((short)(size-1))) { return (short)(size-1); } // otherwise return the next lowest key. return getPrevious((short)(size-1)); } /** * Sets the nth bit on. */ private boolean setBit(int n) { int intIndex = n >> 5; int maskIndex = n & 0x1f; int old = bits[intIndex]; return ((bits[intIndex] |= setMask[maskIndex]) != old); } /** * Sets the nth bit to off. */ private boolean clearBit(int n) { int intIndex = n >> 5; int maskIndex = n & 0x1f; int old = bits[intIndex]; return ((bits[intIndex] &= clearMask[maskIndex]) != old); } /** * Tests if the nth bit is on. */ private boolean isBitSet(int n) { int intIndex = n >> 5; int maskIndex = n & 0x1f; return ((bits[intIndex] & setMask[maskIndex]) != 0); } }
numKeys
// Copyright 2015 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.skyframe; import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertWithMessage; import com.google.common.base.Preconditions; import com.google.common.base.Suppliers; import com.google.common.collect.Collections2; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.ArtifactRoot; import com.google.devtools.build.lib.actions.FilesetOutputSymlink; import com.google.devtools.build.lib.actions.FilesetTraversalParams; import com.google.devtools.build.lib.actions.FilesetTraversalParamsFactory; import com.google.devtools.build.lib.actions.util.ActionsTestUtil; import com.google.devtools.build.lib.analysis.BlazeDirectories; import com.google.devtools.build.lib.analysis.ServerDirectories; import com.google.devtools.build.lib.clock.BlazeClock; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.events.NullEventHandler; import com.google.devtools.build.lib.pkgcache.PathPackageLocator; import com.google.devtools.build.lib.skyframe.ExternalFilesHelper.ExternalFileAction; import com.google.devtools.build.lib.skyframe.PackageLookupFunction.CrossRepositoryLabelViolationStrategy; import com.google.devtools.build.lib.testutil.FoundationTestCase; import com.google.devtools.build.lib.testutil.TestConstants; import com.google.devtools.build.lib.util.Fingerprint; import com.google.devtools.build.lib.util.io.TimestampGranularityMonitor; import com.google.devtools.build.lib.vfs.FileStateKey; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.build.lib.vfs.Root; import com.google.devtools.build.lib.vfs.RootedPath; import com.google.devtools.build.lib.vfs.SyscallCache; import com.google.devtools.build.skyframe.EvaluationContext; import com.google.devtools.build.skyframe.EvaluationResult; import com.google.devtools.build.skyframe.InMemoryMemoizingEvaluator; import com.google.devtools.build.skyframe.MemoizingEvaluator; import com.google.devtools.build.skyframe.RecordingDifferencer; import com.google.devtools.build.skyframe.SequencedRecordingDifferencer; import com.google.devtools.build.skyframe.SkyFunction; import com.google.devtools.build.skyframe.SkyFunctionException; import com.google.devtools.build.skyframe.SkyFunctionName; import com.google.devtools.build.skyframe.SkyKey; import com.google.devtools.build.skyframe.SkyValue; import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.concurrent.atomic.AtomicReference; import javax.annotation.Nullable; import net.starlark.java.eval.StarlarkSemantics; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Tests for {@link FilesetEntryFunction}. */ @RunWith(JUnit4.class) public final class FilesetEntryFunctionTest extends FoundationTestCase { private MemoizingEvaluator evaluator; private RecordingDifferencer differencer; @Before public void setUp() throws Exception { AtomicReference<PathPackageLocator> pkgLocator = new AtomicReference<>( new PathPackageLocator( outputBase, ImmutableList.of(Root.fromPath(rootDirectory)), BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY)); BlazeDirectories directories = new BlazeDirectories( new ServerDirectories(outputBase, outputBase, outputBase), rootDirectory, /* defaultSystemJavabase= */ null, TestConstants.PRODUCT_NAME); ExternalFilesHelper externalFilesHelper = ExternalFilesHelper.createForTesting( pkgLocator, ExternalFileAction.DEPEND_ON_EXTERNAL_PKG_FOR_EXTERNAL_REPO_PATHS, directories); Map<SkyFunctionName, SkyFunction> skyFunctions = new HashMap<>(); skyFunctions.put( FileStateKey.FILE_STATE, new FileStateFunction( Suppliers.ofInstance(new TimestampGranularityMonitor(BlazeClock.instance())), SyscallCache.NO_CACHE, externalFilesHelper)); skyFunctions.put(SkyFunctions.FILE, new FileFunction(pkgLocator, directories)); skyFunctions.put(SkyFunctions.DIRECTORY_LISTING, new DirectoryListingFunction()); skyFunctions.put( SkyFunctions.DIRECTORY_LISTING_STATE, new DirectoryListingStateFunction(externalFilesHelper, SyscallCache.NO_CACHE)); skyFunctions.put( SkyFunctions.RECURSIVE_FILESYSTEM_TRAVERSAL, new RecursiveFilesystemTraversalFunction(SyscallCache.NO_CACHE)); skyFunctions.put( SkyFunctions.PACKAGE_LOOKUP, new PackageLookupFunction( new AtomicReference<>(ImmutableSet.of()), CrossRepositoryLabelViolationStrategy.ERROR, BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY, BazelSkyframeExecutorConstants.EXTERNAL_PACKAGE_HELPER)); skyFunctions.put(SkyFunctions.IGNORED_SUBDIRECTORIES, IgnoredSubdirectoriesFunction.NOOP); skyFunctions.put( SkyFunctions.FILESET_ENTRY, new FilesetEntryFunction((unused) -> rootDirectory)); skyFunctions.put(SkyFunctions.WORKSPACE_NAME, new TestWorkspaceNameFunction()); skyFunctions.put( SkyFunctions.LOCAL_REPOSITORY_LOOKUP, new LocalRepositoryLookupFunction(BazelSkyframeExecutorConstants.EXTERNAL_PACKAGE_HELPER)); differencer = new SequencedRecordingDifferencer(); evaluator = new InMemoryMemoizingEvaluator(skyFunctions, differencer); PrecomputedValue.BUILD_ID.set(differencer, UUID.randomUUID()); PrecomputedValue.PATH_PACKAGE_LOCATOR.set(differencer, pkgLocator.get()); PrecomputedValue.STARLARK_SEMANTICS.set(differencer, StarlarkSemantics.DEFAULT); } private Artifact getSourceArtifact(String path) { return ActionsTestUtil.createArtifact( ArtifactRoot.asSourceRoot(Root.fromPath(rootDirectory)), path); } private Artifact createSourceArtifact(String path) throws Exception { Artifact result = getSourceArtifact(path); createFile(result, "foo"); return result; } private static RootedPath childOf(Artifact artifact, String relative) { return RootedPath.toRootedPath( artifact.getRoot().getRoot(), artifact.getRootRelativePath().getRelative(relative)); } private static RootedPath siblingOf(Artifact artifact, String relative) { PathFragment parent = Preconditions.checkNotNull(artifact.getRootRelativePath().getParentDirectory()); return RootedPath.toRootedPath(artifact.getRoot().getRoot(), parent.getRelative(relative)); } private void createFile(Path path, String... contents) throws Exception { if (!path.getParentDirectory().exists()) { scratch.dir(path.getParentDirectory().getPathString()); } scratch.file(path.getPathString(), contents); } private void createFile(Artifact artifact, String... contents) throws Exception { createFile(artifact.getPath(), contents); } private RootedPath createFile(RootedPath path, String... contents) throws Exception { createFile(path.asPath(), contents); return path; } private <T extends SkyValue> EvaluationResult<T> eval(SkyKey key) throws Exception { EvaluationContext evaluationContext = EvaluationContext.newBuilder() .setKeepGoing(false) .setParallelism(SkyframeExecutor.DEFAULT_THREAD_COUNT) .setEventHandler(NullEventHandler.INSTANCE) .build(); return evaluator.evaluate(ImmutableList.of(key), evaluationContext); } private FilesetEntryValue evalFilesetTraversal(FilesetTraversalParams params) throws Exception { SkyKey key = FilesetEntryKey.key(params); EvaluationResult<FilesetEntryValue> result = eval(key); assertThat(result.hasError()).isFalse(); return result.get(key); } private FilesetOutputSymlink symlink(String from, Artifact to) { return symlink(PathFragment.create(from), to.getPath().asFragment()); } private FilesetOutputSymlink symlink(String from, RootedPath to) { return symlink(PathFragment.create(from), to.asPath().asFragment()); } private FilesetOutputSymlink symlink(PathFragment from, PathFragment to) { return FilesetOutputSymlink.createForTesting(from, to, rootDirectory.asFragment()); } private void assertSymlinksCreatedInOrder( FilesetTraversalParams request, FilesetOutputSymlink... expectedSymlinks) throws Exception { Collection<FilesetOutputSymlink> actual = Collections2.transform( evalFilesetTraversal(request).getSymlinks(), // Strip the metadata from the actual results. (input) -> FilesetOutputSymlink.createAlreadyRelativizedForTesting( input.name(), input.targetPath(), input.relativeToExecRoot())); assertThat(actual).containsExactlyElementsIn(expectedSymlinks).inOrder(); } private static Label label(String label) throws Exception { return Label.parseCanonical(label); } @Test public void testFileTraversalForFile() throws Exception { Artifact file = createSourceArtifact("foo/file.real"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ file, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params, symlink("output-name", file)); } @Test public void testFileTraversalForDirectory() throws Exception { Artifact dir = getSourceArtifact("foo/dir_real"); RootedPath fileA = createFile(childOf(dir, "file.a"), "hello"); RootedPath fileB = createFile(childOf(dir, "sub/file.b"), "world"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ dir, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ true); assertSymlinksCreatedInOrder( params, symlink("output-name/file.a", fileA), symlink("output-name/sub/file.b", fileB)); } @Test public void testFileTraversalForDisallowedDirectoryThrows() throws Exception { Artifact dir = getSourceArtifact("foo/dir_real"); createFile(childOf(dir, "file.a"), "hello"); createFile(childOf(dir, "sub/file.b"), "world"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ dir, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); SkyKey key = FilesetEntryKey.key(params); EvaluationResult<FilesetEntryValue> result = eval(key); assertThat(result.hasError()).isTrue(); assertThat(result.getError(key).getException()) .hasMessageThat() .contains("foo contains a directory"); } @Test public void testFileTraversalForDanglingSymlink() throws Exception { Artifact [MASK] = getSourceArtifact("foo/dangling.sym"); RootedPath linkTarget = createFile(siblingOf( [MASK] , "target.file"), "blah"); [MASK] .getPath().createSymbolicLink(PathFragment.create("target.file")); linkTarget.asPath().delete(); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ [MASK] , PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params); // expect empty results } @Test public void testFileTraversalForNonExistentFile() throws Exception { Artifact path = getSourceArtifact("foo/non-existent"); FilesetTraversalParams params = FilesetTraversalParamsFactory.fileTraversal( /* ownerLabel= */ label("//foo"), /* fileToTraverse= */ path, PathFragment.create("output-name"), /* strictFilesetOutput= */ false, /* permitDirectories= */ false); assertSymlinksCreatedInOrder(params); // expect empty results } /** * Tests that the fingerprint is a function of all arguments of the factory method. * * <p>Implementations must provide: * <ul> * <li>two different values (a domain) for each argument of the factory method and whether or not * it is expected to influence the fingerprint * <li>a way to instantiate {@link FilesetTraversalParams} with a given set of arguments from the * specified domains * </ul> * * <p>The tests will instantiate pairs of {@link FilesetTraversalParams} objects with only a given * attribute differing, and observe whether the fingerprints differ (if they are expected to) or * are the same (otherwise). */ private abstract static class FingerprintTester { private final Map<String, Domain> domains; FingerprintTester(Map<String, Domain> domains) { this.domains = domains; } abstract FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception; private Map<String, ?> getDefaultArgs() { return getKwArgs(null); } private Map<String, ?> getKwArgs(@Nullable String useAlternateFor) { Map<String, Object> values = new HashMap<>(); for (Map.Entry<String, Domain> d : domains.entrySet()) { values.put( d.getKey(), d.getKey().equals(useAlternateFor) ? d.getValue().valueA : d.getValue().valueB); } return values; } public void doTest() throws Exception { Fingerprint fp = new Fingerprint(); create(getDefaultArgs()).fingerprint(fp); String primary = fp.hexDigestAndReset(); for (String argName : domains.keySet()) { create(getKwArgs(argName)).fingerprint(fp); String secondary = fp.hexDigestAndReset(); if (domains.get(argName).includedInFingerprint) { assertWithMessage( "Argument '" + argName + "' was expected to be included in the" + " fingerprint, but wasn't") .that(primary) .isNotEqualTo(secondary); } else { assertWithMessage( "Argument '" + argName + "' was expected not to be included in the" + " fingerprint, but was") .that(primary) .isEqualTo(secondary); } } } } private static final class Domain { boolean includedInFingerprint; Object valueA; Object valueB; Domain(boolean includedInFingerprint, Object valueA, Object valueB) { this.includedInFingerprint = includedInFingerprint; this.valueA = valueA; this.valueB = valueB; } } private static Domain partOfFingerprint(Object valueA, Object valueB) { return new Domain(true, valueA, valueB); } private static Domain notPartOfFingerprint(Object valueB) { return new Domain(false, "//foo", valueB); } @Test public void testFingerprintOfFileTraversal() throws Exception { new FingerprintTester( ImmutableMap.<String, Domain>builder() .put("ownerLabel", notPartOfFingerprint("//bar")) .put("fileToTraverse", partOfFingerprint("foo/file.a", "bar/file.b")) .put("destPath", partOfFingerprint("out1", "out2")) .put("strictFilesetOutput", partOfFingerprint(true, false)) .put("permitDirectories", partOfFingerprint(true, false)) .buildOrThrow()) { @Override FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception { return FilesetTraversalParamsFactory.fileTraversal( label((String) kwArgs.get("ownerLabel")), getSourceArtifact((String) kwArgs.get("fileToTraverse")), PathFragment.create((String) kwArgs.get("destPath")), (Boolean) kwArgs.get("strictFilesetOutput"), (Boolean) kwArgs.get("permitDirectories")); } }.doTest(); } @Test public void testFingerprintOfNestedTraversal() throws Exception { Artifact nested1 = getSourceArtifact("a/b"); Artifact nested2 = getSourceArtifact("a/c"); new FingerprintTester( ImmutableMap.of( "ownerLabel", notPartOfFingerprint("//bar"), "nestedArtifact", partOfFingerprint(nested1, nested2), "destDir", partOfFingerprint("out1", "out2"), "excludes", partOfFingerprint(ImmutableSet.<String>of(), ImmutableSet.of("x")))) { @SuppressWarnings("unchecked") @Override FilesetTraversalParams create(Map<String, ?> kwArgs) throws Exception { return FilesetTraversalParamsFactory.nestedTraversal( label((String) kwArgs.get("ownerLabel")), (Artifact) kwArgs.get("nestedArtifact"), PathFragment.create((String) kwArgs.get("destDir")), (Set<String>) kwArgs.get("excludes")); } }.doTest(); } private static class TestWorkspaceNameFunction implements SkyFunction { @Nullable @Override public SkyValue compute(SkyKey skyKey, Environment env) throws SkyFunctionException, InterruptedException { return WorkspaceNameValue.withName("workspace"); } } }
linkName
package com.alibaba.druid.bvt.sql.mysql.createTable; import com.alibaba.druid.sql.MysqlTest; import com.alibaba.druid.sql.ast.SQLStatement; import com.alibaba.druid.sql.dialect.mysql.ast.statement.MySqlCreateTableStatement; import com.alibaba.druid.sql.dialect.mysql.parser. [MASK] ; import java.util.List; public class MySqlCreateTableTest139 extends MysqlTest { public void test_0() throws Exception { String sql = " CREATE TABLE IF NOT EXISTS simiao_alter_partition2 (id int, id2 int, name varchar(30), time timestamp NOT NULL, PRIMARY KEY (id, time), KEY idx_id_time USING BTREE (id, time)) PARTITION BY RANGE (UNIX_TIMESTAMP(time))( PARTITION p0 VALUES LESS THAN (UNIX_TIMESTAMP('2013-01-01 00:00:00')), PARTITION p1 VALUES LESS THAN (UNIX_TIMESTAMP('2013-02-01 00:00:00')), PARTITION p2 VALUES LESS THAN (UNIX_TIMESTAMP('2013-03-01 00:00:00')), PARTITION p3 VALUES LESS THAN (UNIX_TIMESTAMP('2013-04-01 00:00:00')), PARTITION p4 VALUES LESS THAN (UNIX_TIMESTAMP('2013-05-01 00:00:00')), PARTITION p5 VALUES LESS THAN (UNIX_TIMESTAMP('2013-06-01 00:00:00')), PARTITION p6 VALUES LESS THAN (UNIX_TIMESTAMP('2013-07-01 00:00:00')), PARTITION p7 VALUES LESS THAN (UNIX_TIMESTAMP('2013-08-01 00:00:00')), PARTITION p8 VALUES LESS THAN (UNIX_TIMESTAMP('2013-09-01 00:00:00')), PARTITION p10 VALUES LESS THAN (UNIX_TIMESTAMP('2013-10-01 00:00:00')), PARTITION p11 VALUES LESS THAN (UNIX_TIMESTAMP('2013-11-01 00:00:00')), PARTITION p12 VALUES LESS THAN (UNIX_TIMESTAMP('2013-12-01 00:00:00')), PARTITION p13 VALUES LESS THAN (MAXVALUE) ) dbpartition by hash(id) dbpartitions 4;"; [MASK] parser = new [MASK] (sql); List<SQLStatement> statementList = parser.parseStatementList(); MySqlCreateTableStatement stmt = (MySqlCreateTableStatement) statementList.get(0); assertEquals(1, statementList.size()); assertEquals("CREATE TABLE IF NOT EXISTS simiao_alter_partition2 (\n" + "\tid int,\n" + "\tid2 int,\n" + "\tname varchar(30),\n" + "\ttime timestamp NOT NULL,\n" + "\tPRIMARY KEY (id, time),\n" + "\tKEY idx_id_time USING BTREE (id, time)\n" + ")\n" + "PARTITION BY RANGE (UNIX_TIMESTAMP(time)) (\n" + "\tPARTITION p0 VALUES LESS THAN (UNIX_TIMESTAMP('2013-01-01 00:00:00')),\n" + "\tPARTITION p1 VALUES LESS THAN (UNIX_TIMESTAMP('2013-02-01 00:00:00')),\n" + "\tPARTITION p2 VALUES LESS THAN (UNIX_TIMESTAMP('2013-03-01 00:00:00')),\n" + "\tPARTITION p3 VALUES LESS THAN (UNIX_TIMESTAMP('2013-04-01 00:00:00')),\n" + "\tPARTITION p4 VALUES LESS THAN (UNIX_TIMESTAMP('2013-05-01 00:00:00')),\n" + "\tPARTITION p5 VALUES LESS THAN (UNIX_TIMESTAMP('2013-06-01 00:00:00')),\n" + "\tPARTITION p6 VALUES LESS THAN (UNIX_TIMESTAMP('2013-07-01 00:00:00')),\n" + "\tPARTITION p7 VALUES LESS THAN (UNIX_TIMESTAMP('2013-08-01 00:00:00')),\n" + "\tPARTITION p8 VALUES LESS THAN (UNIX_TIMESTAMP('2013-09-01 00:00:00')),\n" + "\tPARTITION p10 VALUES LESS THAN (UNIX_TIMESTAMP('2013-10-01 00:00:00')),\n" + "\tPARTITION p11 VALUES LESS THAN (UNIX_TIMESTAMP('2013-11-01 00:00:00')),\n" + "\tPARTITION p12 VALUES LESS THAN (UNIX_TIMESTAMP('2013-12-01 00:00:00')),\n" + "\tPARTITION p13 VALUES LESS THAN MAXVALUE\n" + ")\n" + "DBPARTITION BY hash(id) DBPARTITIONS 4;", stmt.toString()); assertEquals("create table if not exists simiao_alter_partition2 (\n" + "\tid int,\n" + "\tid2 int,\n" + "\tname varchar(30),\n" + "\ttime timestamp not null,\n" + "\tprimary key (id, time),\n" + "\tkey idx_id_time using BTREE (id, time)\n" + ")\n" + "partition by range (UNIX_TIMESTAMP(time)) (\n" + "\tpartition p0 values less than (UNIX_TIMESTAMP('2013-01-01 00:00:00')),\n" + "\tpartition p1 values less than (UNIX_TIMESTAMP('2013-02-01 00:00:00')),\n" + "\tpartition p2 values less than (UNIX_TIMESTAMP('2013-03-01 00:00:00')),\n" + "\tpartition p3 values less than (UNIX_TIMESTAMP('2013-04-01 00:00:00')),\n" + "\tpartition p4 values less than (UNIX_TIMESTAMP('2013-05-01 00:00:00')),\n" + "\tpartition p5 values less than (UNIX_TIMESTAMP('2013-06-01 00:00:00')),\n" + "\tpartition p6 values less than (UNIX_TIMESTAMP('2013-07-01 00:00:00')),\n" + "\tpartition p7 values less than (UNIX_TIMESTAMP('2013-08-01 00:00:00')),\n" + "\tpartition p8 values less than (UNIX_TIMESTAMP('2013-09-01 00:00:00')),\n" + "\tpartition p10 values less than (UNIX_TIMESTAMP('2013-10-01 00:00:00')),\n" + "\tpartition p11 values less than (UNIX_TIMESTAMP('2013-11-01 00:00:00')),\n" + "\tpartition p12 values less than (UNIX_TIMESTAMP('2013-12-01 00:00:00')),\n" + "\tpartition p13 values less than maxvalue\n" + ")\n" + "dbpartition by hash(id) dbpartitions 4;", stmt.toLowerCaseString()); } }
MySqlStatementParser
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.hudi; import com.facebook.presto.common.type.TypeManager; import com.facebook.presto.hive.HdfsEnvironment; import com.facebook.presto.hive.MetastoreClientConfig; import com.facebook.presto.hive.metastore.ExtendedHiveMetastore; import com.facebook.presto.hive.metastore.InMemoryCachingHiveMetastore; import com.facebook.presto.spi.connector.ConnectorMetadata; import javax.inject.Inject; import static java.util.Objects.requireNonNull; public class HudiMetadataFactory { private final ExtendedHiveMetastore metastore; private final HdfsEnvironment [MASK] ; private final TypeManager typeManager; private final long perTransactionCacheMaximumSize; private final boolean metastoreImpersonationEnabled; private final int metastorePartitionCacheMaxColumnCount; @Inject public HudiMetadataFactory( ExtendedHiveMetastore metastore, HdfsEnvironment [MASK] , TypeManager typeManager, MetastoreClientConfig metastoreClientConfig) { this.metastore = requireNonNull(metastore, "metastore is null"); this. [MASK] = requireNonNull( [MASK] , " [MASK] is null"); this.typeManager = requireNonNull(typeManager, "typeManager is null"); this.perTransactionCacheMaximumSize = metastoreClientConfig.getPerTransactionMetastoreCacheMaximumSize(); this.metastoreImpersonationEnabled = metastoreClientConfig.isMetastoreImpersonationEnabled(); this.metastorePartitionCacheMaxColumnCount = metastoreClientConfig.getPartitionCacheColumnCountLimit(); } public ConnectorMetadata create() { return new HudiMetadata( InMemoryCachingHiveMetastore.memoizeMetastore(metastore, metastoreImpersonationEnabled, perTransactionCacheMaximumSize, metastorePartitionCacheMaxColumnCount), [MASK] , typeManager); } }
hdfsEnvironment
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ package org.elasticsearch.xpack.esql.plugin; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.RefCountingListener; import org.elasticsearch.compute.EsqlRefCountingListener; import org.elasticsearch.compute.operator.DriverProfile; import org.elasticsearch.compute.operator.ResponseHeadersCollector; import org.elasticsearch.core.Releasable; import org.elasticsearch.threadpool.ThreadPool; import java.util.ArrayList; import java.util.Collections; import java.util.List; /** * A variant of {@link RefCountingListener} with the following differences: * 1. Automatically cancels sub tasks on failure (via runOnTaskFailure) * 2. Collects driver profiles from sub tasks. * 3. Collects response headers from sub tasks, specifically warnings emitted during compute * 4. Collects failures and returns the most appropriate exception to the caller. */ final class ComputeListener implements Releasable { private final EsqlRefCountingListener refs; private final List<DriverProfile> collectedProfiles; private final ResponseHeadersCollector responseHeaders; private final Runnable [MASK] ; ComputeListener(ThreadPool threadPool, Runnable [MASK] , ActionListener<List<DriverProfile>> delegate) { this. [MASK] = [MASK] ; this.responseHeaders = new ResponseHeadersCollector(threadPool.getThreadContext()); this.collectedProfiles = Collections.synchronizedList(new ArrayList<>()); // listener that executes after all the sub-listeners refs (created via acquireCompute) have completed this.refs = new EsqlRefCountingListener(delegate.delegateFailure((l, ignored) -> { responseHeaders.finish(); delegate.onResponse(collectedProfiles.stream().toList()); })); } /** * Acquires a new listener that doesn't collect result */ ActionListener<Void> acquireAvoid() { return refs.acquire().delegateResponse((l, e) -> { try { [MASK] .run(); } finally { l.onFailure(e); } }); } /** * Acquires a new listener that collects compute result. This listener will also collect warnings emitted during compute */ ActionListener<List<DriverProfile>> acquireCompute() { final ActionListener<Void> delegate = acquireAvoid(); return ActionListener.wrap(profiles -> { responseHeaders.collect(); if (profiles != null && profiles.isEmpty() == false) { collectedProfiles.addAll(profiles); } delegate.onResponse(null); }, e -> { responseHeaders.collect(); delegate.onFailure(e); }); } @Override public void close() { refs.close(); } }
runOnFailure
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ package org.elasticsearch.xpack.esql.plugin; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.RefCountingListener; import org.elasticsearch.compute.EsqlRefCountingListener; import org.elasticsearch.compute.operator.DriverProfile; import org.elasticsearch.compute.operator. [MASK] ; import org.elasticsearch.core.Releasable; import org.elasticsearch.threadpool.ThreadPool; import java.util.ArrayList; import java.util.Collections; import java.util.List; /** * A variant of {@link RefCountingListener} with the following differences: * 1. Automatically cancels sub tasks on failure (via runOnTaskFailure) * 2. Collects driver profiles from sub tasks. * 3. Collects response headers from sub tasks, specifically warnings emitted during compute * 4. Collects failures and returns the most appropriate exception to the caller. */ final class ComputeListener implements Releasable { private final EsqlRefCountingListener refs; private final List<DriverProfile> collectedProfiles; private final [MASK] responseHeaders; private final Runnable runOnFailure; ComputeListener(ThreadPool threadPool, Runnable runOnFailure, ActionListener<List<DriverProfile>> delegate) { this.runOnFailure = runOnFailure; this.responseHeaders = new [MASK] (threadPool.getThreadContext()); this.collectedProfiles = Collections.synchronizedList(new ArrayList<>()); // listener that executes after all the sub-listeners refs (created via acquireCompute) have completed this.refs = new EsqlRefCountingListener(delegate.delegateFailure((l, ignored) -> { responseHeaders.finish(); delegate.onResponse(collectedProfiles.stream().toList()); })); } /** * Acquires a new listener that doesn't collect result */ ActionListener<Void> acquireAvoid() { return refs.acquire().delegateResponse((l, e) -> { try { runOnFailure.run(); } finally { l.onFailure(e); } }); } /** * Acquires a new listener that collects compute result. This listener will also collect warnings emitted during compute */ ActionListener<List<DriverProfile>> acquireCompute() { final ActionListener<Void> delegate = acquireAvoid(); return ActionListener.wrap(profiles -> { responseHeaders.collect(); if (profiles != null && profiles.isEmpty() == false) { collectedProfiles.addAll(profiles); } delegate.onResponse(null); }, e -> { responseHeaders.collect(); delegate.onFailure(e); }); } @Override public void close() { refs.close(); } }
ResponseHeadersCollector