language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
junit-team__junit5
|
junit-vintage-engine/src/testFixtures/java/org/junit/vintage/engine/samples/junit4/AbstractJUnit4TestCase.java
|
{
"start": 405,
"end": 475
}
|
class ____ {
@Test
public void theTest() {
}
}
|
AbstractJUnit4TestCase
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/io/InputWriter.java
|
{
"start": 1010,
"end": 1500
}
|
class ____<K, V> {
/**
* Initializes the InputWriter. This method has to be called before calling
* any of the other methods.
*/
public void initialize(PipeMapRed pipeMapRed) throws IOException {
// nothing here yet, but that might change in the future
}
/**
* Writes an input key.
*/
public abstract void writeKey(K key) throws IOException;
/**
* Writes an input value.
*/
public abstract void writeValue(V value) throws IOException;
}
|
InputWriter
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/SecuredConfigFileAccessPermission.java
|
{
"start": 1120,
"end": 1279
}
|
class ____ extends BasicPermission {
public SecuredConfigFileAccessPermission(String path) {
super(path, "");
}
}
|
SecuredConfigFileAccessPermission
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/refaster/DescriptionBasedDiffTest.java
|
{
"start": 5698,
"end": 6412
}
|
class ____ {",
" public static void main(String[] args) {",
" System.out.println(\"baz\");",
" }",
"}")
.inOrder();
}
@Test
public void applyDifferences_addsImportAndSorts_whenAddingNewImport() {
DescriptionBasedDiff diff = createDescriptionBasedDiff();
diff.onDescribed(
dummyDescription(SuggestedFix.builder().addImport("com.google.foo.Bar").build()));
diff.applyDifferences(sourceFile);
assertThat(sourceFile.getLines())
.containsExactly(
"package foo.bar;",
"import com.foo.Bar;",
"import com.google.foo.Bar;",
"import org.bar.Baz;",
"",
"
|
Foo
|
java
|
spring-projects__spring-boot
|
loader/spring-boot-loader-tools/src/test/java/org/springframework/boot/loader/tools/AbstractPackagerTests.java
|
{
"start": 5501,
"end": 33059
}
|
class ____ the following candidates [a.b.C, a.b.D]");
}
@Test
void noMainClass() throws Exception {
this.testJarFile.addClass("a/b/C.class", ClassWithoutMainMethod.class);
P packager = createPackager(this.testJarFile.getFile());
assertThatIllegalStateException().isThrownBy(() -> execute(packager, NO_LIBRARIES))
.withMessageContaining("Unable to find main class");
}
@Test
void noMainClassAndLayoutIsNone() throws Exception {
this.testJarFile.addClass("a/b/C.class", ClassWithMainMethod.class);
P packager = createPackager();
packager.setLayout(new Layouts.None());
execute(packager, NO_LIBRARIES);
Manifest actualManifest = getPackagedManifest();
assertThat(actualManifest).isNotNull();
assertThat(actualManifest.getMainAttributes().getValue("Main-Class")).isEqualTo("a.b.C");
assertThat(hasPackagedLauncherClasses()).isFalse();
}
@Test
void noMainClassAndLayoutIsNoneWithNoMain() throws Exception {
this.testJarFile.addClass("a/b/C.class", ClassWithoutMainMethod.class);
P packager = createPackager();
packager.setLayout(new Layouts.None());
execute(packager, NO_LIBRARIES);
Manifest actualManifest = getPackagedManifest();
assertThat(actualManifest).isNotNull();
assertThat(actualManifest.getMainAttributes().getValue("Main-Class")).isNull();
assertThat(hasPackagedLauncherClasses()).isFalse();
}
@Test
@SuppressWarnings("NullAway") // Test null check
void nullLibraries() throws Exception {
this.testJarFile.addClass("a/b/C.class", ClassWithMainMethod.class);
P packager = createPackager();
assertThatIllegalArgumentException().isThrownBy(() -> execute(packager, null))
.withMessageContaining("'libraries' must not be null");
}
@Test
void libraries() throws Exception {
TestJarFile libJar = new TestJarFile(this.tempDir);
libJar.addClass("a/b/C.class", ClassWithoutMainMethod.class, JAN_1_1985);
File libJarFile = libJar.getFile();
File libJarFileToUnpack = libJar.getFile();
File libNonJarFile = new File(this.tempDir, "non-lib.jar");
FileCopyUtils.copy(new byte[] { 0, 1, 2, 3, 4, 5, 6, 7, 8 }, libNonJarFile);
this.testJarFile.addClass("a/b/C.class", ClassWithMainMethod.class);
this.testJarFile.addFile("BOOT-INF/lib/" + libJarFileToUnpack.getName(), libJarFileToUnpack);
libJarFile.setLastModified(JAN_1_1980);
P packager = createPackager();
execute(packager, (callback) -> {
callback.library(newLibrary(libJarFile, LibraryScope.COMPILE, false));
callback.library(newLibrary(libJarFileToUnpack, LibraryScope.COMPILE, true));
callback.library(newLibrary(libNonJarFile, LibraryScope.COMPILE, false));
});
assertThat(hasPackagedEntry("BOOT-INF/lib/" + libJarFile.getName())).isTrue();
assertThat(hasPackagedEntry("BOOT-INF/lib/" + libJarFileToUnpack.getName())).isTrue();
assertThat(hasPackagedEntry("BOOT-INF/lib/" + libNonJarFile.getName())).isFalse();
ZipEntry entry = getPackagedEntry("BOOT-INF/lib/" + libJarFile.getName());
assertThat(entry).isNotNull();
assertThat(entry.getTime()).isEqualTo(JAN_1_1985);
entry = getPackagedEntry("BOOT-INF/lib/" + libJarFileToUnpack.getName());
assertThat(entry).isNotNull();
assertThat(entry.getComment()).isEqualTo("UNPACK");
}
@Test
void classPathIndex() throws Exception {
TestJarFile libJar1 = new TestJarFile(this.tempDir);
libJar1.addClass("a/b/C.class", ClassWithoutMainMethod.class, JAN_1_1985);
File libJarFile1 = libJar1.getFile();
TestJarFile libJar2 = new TestJarFile(this.tempDir);
libJar2.addClass("a/b/C.class", ClassWithoutMainMethod.class, JAN_1_1985);
File libJarFile2 = libJar2.getFile();
TestJarFile libJar3 = new TestJarFile(this.tempDir);
libJar3.addClass("a/b/C.class", ClassWithoutMainMethod.class, JAN_1_1985);
File libJarFile3 = libJar3.getFile();
this.testJarFile.addClass("a/b/C.class", ClassWithMainMethod.class);
File file = this.testJarFile.getFile();
P packager = createPackager(file);
packager.setIncludeRelevantJarModeJars(false);
execute(packager, (callback) -> {
callback.library(newLibrary(libJarFile1, LibraryScope.COMPILE, false));
callback.library(newLibrary(libJarFile2, LibraryScope.COMPILE, false));
callback.library(newLibrary(libJarFile3, LibraryScope.COMPILE, false));
});
assertThat(hasPackagedEntry("BOOT-INF/classpath.idx")).isTrue();
String index = getPackagedEntryContent("BOOT-INF/classpath.idx");
assertThat(index).isNotNull();
String[] libraries = index.split("\\r?\\n");
List<String> expected = Stream.of(libJarFile1, libJarFile2, libJarFile3)
.map((jar) -> "- \"BOOT-INF/lib/" + jar.getName() + "\"")
.toList();
assertThat(Arrays.asList(libraries)).containsExactlyElementsOf(expected);
}
@Test
void layersIndex() throws Exception {
TestJarFile libJar1 = new TestJarFile(this.tempDir);
libJar1.addClass("a/b/C.class", ClassWithoutMainMethod.class, JAN_1_1985);
File libJarFile1 = libJar1.getFile();
TestJarFile libJar2 = new TestJarFile(this.tempDir);
libJar2.addClass("a/b/C.class", ClassWithoutMainMethod.class, JAN_1_1985);
File libJarFile2 = libJar2.getFile();
TestJarFile libJar3 = new TestJarFile(this.tempDir);
libJar3.addClass("a/b/C.class", ClassWithoutMainMethod.class, JAN_1_1985);
File libJarFile3 = libJar3.getFile();
this.testJarFile.addClass("a/b/C.class", ClassWithMainMethod.class);
P packager = createPackager();
TestLayers layers = new TestLayers();
layers.addLibrary(libJarFile1, "0001");
layers.addLibrary(libJarFile2, "0002");
layers.addLibrary(libJarFile3, "0003");
packager.setLayers(layers);
packager.setIncludeRelevantJarModeJars(false);
execute(packager, (callback) -> {
callback.library(newLibrary(libJarFile1, LibraryScope.COMPILE, false));
callback.library(newLibrary(libJarFile2, LibraryScope.COMPILE, false));
callback.library(newLibrary(libJarFile3, LibraryScope.COMPILE, false));
});
assertThat(hasPackagedEntry("BOOT-INF/classpath.idx")).isTrue();
String classpathIndex = getPackagedEntryContent("BOOT-INF/classpath.idx");
assertThat(classpathIndex).isNotNull();
List<String> expectedClasspathIndex = Stream.of(libJarFile1, libJarFile2, libJarFile3)
.map((file) -> "- \"BOOT-INF/lib/" + file.getName() + "\"")
.toList();
assertThat(Arrays.asList(classpathIndex.split("\\n"))).containsExactlyElementsOf(expectedClasspathIndex);
assertThat(hasPackagedEntry("BOOT-INF/layers.idx")).isTrue();
String layersIndex = getPackagedEntryContent("BOOT-INF/layers.idx");
assertThat(layersIndex).isNotNull();
List<String> expectedLayers = new ArrayList<>();
expectedLayers.add("- 'default':");
expectedLayers.add(" - 'BOOT-INF/classes/'");
expectedLayers.add(" - 'BOOT-INF/classpath.idx'");
expectedLayers.add(" - 'BOOT-INF/layers.idx'");
expectedLayers.add(" - 'META-INF/'");
expectedLayers.add(" - 'org/'");
expectedLayers.add("- '0001':");
expectedLayers.add(" - 'BOOT-INF/lib/" + libJarFile1.getName() + "'");
expectedLayers.add("- '0002':");
expectedLayers.add(" - 'BOOT-INF/lib/" + libJarFile2.getName() + "'");
expectedLayers.add("- '0003':");
expectedLayers.add(" - 'BOOT-INF/lib/" + libJarFile3.getName() + "'");
assertThat(layersIndex.split("\\n"))
.containsExactly(expectedLayers.stream().map((s) -> s.replace('\'', '"')).toArray(String[]::new));
}
@Test
void layersEnabledAddJarModeJar() throws Exception {
this.testJarFile.addClass("a/b/C.class", ClassWithMainMethod.class);
P packager = createPackager();
TestLayers layers = new TestLayers();
packager.setLayers(layers);
execute(packager, Libraries.NONE);
assertThat(hasPackagedEntry("BOOT-INF/classpath.idx")).isTrue();
String classpathIndex = getPackagedEntryContent("BOOT-INF/classpath.idx");
assertThat(classpathIndex).isNotNull();
assertThat(Arrays.asList(classpathIndex.split("\\n")))
.containsExactly("- \"BOOT-INF/lib/spring-boot-jarmode-tools.jar\"");
assertThat(hasPackagedEntry("BOOT-INF/layers.idx")).isTrue();
String layersIndex = getPackagedEntryContent("BOOT-INF/layers.idx");
assertThat(layersIndex).isNotNull();
List<String> expectedLayers = new ArrayList<>();
expectedLayers.add("- 'default':");
expectedLayers.add(" - 'BOOT-INF/'");
expectedLayers.add(" - 'META-INF/'");
expectedLayers.add(" - 'org/'");
assertThat(layersIndex.split("\\n"))
.containsExactly(expectedLayers.stream().map((s) -> s.replace('\'', '"')).toArray(String[]::new));
}
@Test
void duplicateLibraries() throws Exception {
TestJarFile libJar = new TestJarFile(this.tempDir);
libJar.addClass("a/b/C.class", ClassWithoutMainMethod.class);
File libJarFile = libJar.getFile();
this.testJarFile.addClass("a/b/C.class", ClassWithMainMethod.class);
P packager = createPackager();
assertThatIllegalStateException().isThrownBy(() -> execute(packager, (callback) -> {
callback.library(newLibrary(libJarFile, LibraryScope.COMPILE, false));
callback.library(newLibrary(libJarFile, LibraryScope.COMPILE, false));
})).withMessageContaining("Duplicate library");
}
@Test
void customLayout() throws Exception {
TestJarFile libJar = new TestJarFile(this.tempDir);
libJar.addClass("a/b/C.class", ClassWithoutMainMethod.class);
File libJarFile = libJar.getFile();
this.testJarFile.addClass("a/b/C.class", ClassWithMainMethod.class);
P packager = createPackager();
Layout layout = mock(Layout.class);
LibraryScope scope = mock(LibraryScope.class);
given(layout.getLauncherClassName()).willReturn("testLauncher");
given(layout.getLibraryLocation(anyString(), eq(scope))).willReturn("test/");
given(layout.getLibraryLocation(anyString(), eq(LibraryScope.COMPILE))).willReturn("test-lib/");
packager.setLayout(layout);
execute(packager, (callback) -> callback.library(newLibrary(libJarFile, scope, false)));
assertThat(hasPackagedEntry("test/" + libJarFile.getName())).isTrue();
Manifest manifest = getPackagedManifest();
assertThat(manifest).isNotNull();
assertThat(manifest.getMainAttributes().getValue("Spring-Boot-Lib")).isEqualTo("test-lib/");
assertThat(manifest.getMainAttributes().getValue("Main-Class")).isEqualTo("testLauncher");
}
@Test
void customLayoutNoBootLib() throws Exception {
TestJarFile libJar = new TestJarFile(this.tempDir);
libJar.addClass("a/b/C.class", ClassWithoutMainMethod.class);
File libJarFile = libJar.getFile();
this.testJarFile.addClass("a/b/C.class", ClassWithMainMethod.class);
P packager = createPackager();
Layout layout = mock(Layout.class);
LibraryScope scope = mock(LibraryScope.class);
given(layout.getLauncherClassName()).willReturn("testLauncher");
packager.setLayout(layout);
execute(packager, (callback) -> callback.library(newLibrary(libJarFile, scope, false)));
Manifest manifest = getPackagedManifest();
assertThat(manifest).isNotNull();
assertThat(manifest.getMainAttributes().getValue("Spring-Boot-Lib")).isNull();
assertThat(manifest.getMainAttributes().getValue("Main-Class")).isEqualTo("testLauncher");
}
@Test
void springBootVersion() throws Exception {
this.testJarFile.addClass("a/b/C.class", ClassWithMainMethod.class);
P packager = createPackager();
execute(packager, NO_LIBRARIES);
Manifest actualManifest = getPackagedManifest();
assertThat(actualManifest).isNotNull();
assertThat(actualManifest.getMainAttributes()).containsKey(new Attributes.Name("Spring-Boot-Version"));
}
@Test
void executableJarLayoutAttributes() throws Exception {
this.testJarFile.addClass("a/b/C.class", ClassWithMainMethod.class);
P packager = createPackager();
execute(packager, NO_LIBRARIES);
Manifest actualManifest = getPackagedManifest();
assertThat(actualManifest).isNotNull();
assertThat(actualManifest.getMainAttributes()).containsEntry(new Attributes.Name("Spring-Boot-Lib"),
"BOOT-INF/lib/");
assertThat(actualManifest.getMainAttributes()).containsEntry(new Attributes.Name("Spring-Boot-Classes"),
"BOOT-INF/classes/");
}
@Test
void executableWarLayoutAttributes() throws Exception {
this.testJarFile.addClass("WEB-INF/classes/a/b/C.class", ClassWithMainMethod.class);
P packager = createPackager(this.testJarFile.getFile("war"));
execute(packager, NO_LIBRARIES);
Manifest actualManifest = getPackagedManifest();
assertThat(actualManifest).isNotNull();
assertThat(actualManifest.getMainAttributes()).containsEntry(new Attributes.Name("Spring-Boot-Lib"),
"WEB-INF/lib/");
assertThat(actualManifest.getMainAttributes()).containsEntry(new Attributes.Name("Spring-Boot-Classes"),
"WEB-INF/classes/");
}
@Test
@SuppressWarnings("NullAway") // Test null check
void nullCustomLayout() throws Exception {
this.testJarFile.addClass("a/b/C.class", ClassWithoutMainMethod.class);
Packager packager = createPackager();
assertThatIllegalArgumentException().isThrownBy(() -> packager.setLayout(null))
.withMessageContaining("Layout must not be null");
}
@Test
void dontRecompressZips() throws Exception {
TestJarFile nested = new TestJarFile(this.tempDir);
nested.addClass("a/b/C.class", ClassWithoutMainMethod.class);
File nestedFile = nested.getFile();
this.testJarFile.addFile("test/nested.jar", nestedFile);
this.testJarFile.addClass("A.class", ClassWithMainMethod.class);
P packager = createPackager();
execute(packager, (callback) -> callback.library(newLibrary(nestedFile, LibraryScope.COMPILE, false)));
ZipEntry entry = getPackagedEntry("BOOT-INF/lib/" + nestedFile.getName());
assertThat(entry).isNotNull();
assertThat(entry.getMethod()).isZero();
entry = getPackagedEntry("BOOT-INF/classes/test/nested.jar");
assertThat(entry).isNotNull();
assertThat(entry.getMethod()).isZero();
}
@Test
void unpackLibrariesTakePrecedenceOverExistingSourceEntries() throws Exception {
TestJarFile nested = new TestJarFile(this.tempDir);
nested.addClass("a/b/C.class", ClassWithoutMainMethod.class);
File nestedFile = nested.getFile();
String name = "BOOT-INF/lib/" + nestedFile.getName();
this.testJarFile.addFile(name, nested.getFile());
this.testJarFile.addClass("A.class", ClassWithMainMethod.class);
P packager = createPackager();
execute(packager, (callback) -> callback.library(newLibrary(nestedFile, LibraryScope.COMPILE, true)));
ZipEntry entry = getPackagedEntry(name);
assertThat(entry).isNotNull();
assertThat(entry.getComment()).isEqualTo("UNPACK");
}
@Test
void existingSourceEntriesTakePrecedenceOverStandardLibraries() throws Exception {
TestJarFile nested = new TestJarFile(this.tempDir);
nested.addClass("a/b/C.class", ClassWithoutMainMethod.class);
File nestedFile = nested.getFile();
this.testJarFile.addFile("BOOT-INF/lib/" + nestedFile.getName(), nested.getFile());
this.testJarFile.addClass("A.class", ClassWithMainMethod.class);
P packager = createPackager();
long sourceLength = nestedFile.length();
execute(packager, (callback) -> {
nestedFile.delete();
File toZip = new File(this.tempDir, "to-zip");
toZip.createNewFile();
ZipUtil.packEntry(toZip, nestedFile);
callback.library(newLibrary(nestedFile, LibraryScope.COMPILE, false));
});
ZipEntry entry = getPackagedEntry("BOOT-INF/lib/" + nestedFile.getName());
assertThat(entry).isNotNull();
assertThat(entry.getSize()).isEqualTo(sourceLength);
}
@Test
void metaInfIndexListIsRemovedFromRepackagedJar() throws Exception {
this.testJarFile.addClass("A.class", ClassWithMainMethod.class);
File indexList = new File(this.tempDir, "INDEX.LIST");
indexList.createNewFile();
this.testJarFile.addFile("META-INF/INDEX.LIST", indexList);
P packager = createPackager();
execute(packager, NO_LIBRARIES);
assertThat(getPackagedEntry("META-INF/INDEX.LIST")).isNull();
}
@Test
void customLayoutFactoryWithoutLayout() throws Exception {
this.testJarFile.addClass("a/b/C.class", ClassWithMainMethod.class);
P packager = createPackager();
packager.setLayoutFactory(new TestLayoutFactory());
execute(packager, NO_LIBRARIES);
assertThat(getPackagedEntry("test")).isNotNull();
}
@Test
void customLayoutFactoryWithLayout() throws Exception {
this.testJarFile.addClass("a/b/C.class", ClassWithMainMethod.class);
P packager = createPackager();
packager.setLayoutFactory(new TestLayoutFactory());
packager.setLayout(new Layouts.Jar());
execute(packager, NO_LIBRARIES);
assertThat(getPackagedEntry("test")).isNull();
}
@Test
void metaInfAopXmlIsMovedBeneathBootInfClassesWhenRepackaged() throws Exception {
this.testJarFile.addClass("A.class", ClassWithMainMethod.class);
File aopXml = new File(this.tempDir, "aop.xml");
aopXml.createNewFile();
this.testJarFile.addFile("META-INF/aop.xml", aopXml);
P packager = createPackager();
execute(packager, NO_LIBRARIES);
assertThat(getPackagedEntry("META-INF/aop.xml")).isNull();
assertThat(getPackagedEntry("BOOT-INF/classes/META-INF/aop.xml")).isNotNull();
}
@Test
void metaInfServicesFilesAreMovedBeneathBootInfClassesWhenRepackaged() throws Exception {
this.testJarFile.addClass("A.class", ClassWithMainMethod.class);
File service = new File(this.tempDir, "com.example.Service");
service.createNewFile();
this.testJarFile.addFile("META-INF/services/com.example.Service", service);
P packager = createPackager();
execute(packager, NO_LIBRARIES);
assertThat(getPackagedEntry("META-INF/services/com.example.Service")).isNull();
assertThat(getPackagedEntry("BOOT-INF/classes/META-INF/services/com.example.Service")).isNotNull();
}
@Test
void allEntriesUseUnixPlatformAndUtf8NameEncoding() throws IOException {
this.testJarFile.addClass("A.class", ClassWithMainMethod.class);
P packager = createPackager();
execute(packager, NO_LIBRARIES);
for (ZipArchiveEntry entry : getAllPackagedEntries()) {
assertThat(entry.getPlatform()).isEqualTo(ZipArchiveEntry.PLATFORM_UNIX);
assertThat(entry.getGeneralPurposeBit().usesUTF8ForNames()).isTrue();
}
}
@Test
void loaderIsWrittenFirstThenApplicationClassesThenLibraries() throws IOException {
this.testJarFile.addClass("com/example/Application.class", ClassWithMainMethod.class);
File libraryOne = createLibraryJar();
File libraryTwo = createLibraryJar();
File libraryThree = createLibraryJar();
P packager = createPackager();
execute(packager, (callback) -> {
callback.library(newLibrary(libraryOne, LibraryScope.COMPILE, false));
callback.library(newLibrary(libraryTwo, LibraryScope.COMPILE, true));
callback.library(newLibrary(libraryThree, LibraryScope.COMPILE, false));
});
assertThat(getPackagedEntryNames()).containsSubsequence("org/springframework/boot/loader/",
"BOOT-INF/classes/com/example/Application.class", "BOOT-INF/lib/" + libraryOne.getName(),
"BOOT-INF/lib/" + libraryTwo.getName(), "BOOT-INF/lib/" + libraryThree.getName());
}
@Test
void existingEntryThatMatchesUnpackLibraryIsMarkedForUnpack() throws IOException {
File library = createLibraryJar();
this.testJarFile.addClass("WEB-INF/classes/com/example/Application.class", ClassWithMainMethod.class);
this.testJarFile.addFile("WEB-INF/lib/" + library.getName(), library);
P packager = createPackager(this.testJarFile.getFile("war"));
packager.setLayout(new Layouts.War());
execute(packager, (callback) -> callback.library(newLibrary(library, LibraryScope.COMPILE, true)));
assertThat(getPackagedEntryNames()).containsSubsequence("org/springframework/boot/loader/",
"WEB-INF/classes/com/example/Application.class", "WEB-INF/lib/" + library.getName());
ZipEntry unpackLibrary = getPackagedEntry("WEB-INF/lib/" + library.getName());
assertThat(unpackLibrary).isNotNull();
assertThat(unpackLibrary.getComment()).isEqualTo("UNPACK");
}
@Test
void layoutCanOmitLibraries() throws IOException {
TestJarFile libJar = new TestJarFile(this.tempDir);
libJar.addClass("a/b/C.class", ClassWithoutMainMethod.class);
File libJarFile = libJar.getFile();
this.testJarFile.addClass("a/b/C.class", ClassWithMainMethod.class);
P packager = createPackager();
Layout layout = mock(Layout.class);
LibraryScope scope = mock(LibraryScope.class);
packager.setLayout(layout);
execute(packager, (callback) -> callback.library(newLibrary(libJarFile, scope, false)));
assertThat(getPackagedEntryNames()).containsExactly("META-INF/", "META-INF/MANIFEST.MF", "a/", "a/b/",
"a/b/C.class");
}
@Test
void jarThatUsesCustomCompressionConfigurationCanBeRepackaged() throws IOException {
File source = new File(this.tempDir, "source.jar");
ZipOutputStream output = new ZipOutputStream(new FileOutputStream(source)) {
{
this.def = new Deflater(Deflater.NO_COMPRESSION, true);
}
};
byte[] data = new byte[1024 * 1024];
new Random().nextBytes(data);
ZipEntry entry = new ZipEntry("entry.dat");
output.putNextEntry(entry);
output.write(data);
output.closeEntry();
output.close();
P packager = createPackager(source);
packager.setMainClass("com.example.Main");
execute(packager, NO_LIBRARIES);
}
@Test
void moduleInfoClassRemainsInRootOfJarWhenRepackaged() throws Exception {
this.testJarFile.addClass("A.class", ClassWithMainMethod.class);
this.testJarFile.addClass("module-info.class", ClassWithoutMainMethod.class);
P packager = createPackager();
execute(packager, NO_LIBRARIES);
assertThat(getPackagedEntry("module-info.class")).isNotNull();
assertThat(getPackagedEntry("BOOT-INF/classes/module-info.class")).isNull();
}
@Test
void kotlinModuleMetadataMovesBeneathBootInfClassesWhenRepackaged() throws Exception {
this.testJarFile.addClass("A.class", ClassWithMainMethod.class);
File kotlinModule = new File(this.tempDir, "test.kotlin_module");
kotlinModule.createNewFile();
this.testJarFile.addFile("META-INF/test.kotlin_module", kotlinModule);
P packager = createPackager();
execute(packager, NO_LIBRARIES);
assertThat(getPackagedEntry("META-INF/test.kotlin_module")).isNull();
assertThat(getPackagedEntry("BOOT-INF/classes/META-INF/test.kotlin_module")).isNotNull();
}
@Test
void entryFiltering() throws Exception {
File webLibrary = createLibraryJar();
File libraryOne = createLibraryJar();
File libraryTwo = createLibraryJar();
this.testJarFile.addClass("WEB-INF/classes/com/example/Application.class", ClassWithMainMethod.class);
this.testJarFile.addFile("WEB-INF/lib/" + webLibrary.getName(), webLibrary);
P packager = createPackager(this.testJarFile.getFile("war"));
packager.setIncludeRelevantJarModeJars(false);
packager.setLayout(new Layouts.War());
execute(packager, (callback) -> {
callback.library(newLibrary(webLibrary, LibraryScope.COMPILE, false, false));
callback.library(newLibrary(libraryOne, LibraryScope.COMPILE, false, false));
callback.library(newLibrary(libraryTwo, LibraryScope.COMPILE, false, true));
});
Collection<String> packagedEntryNames = getPackagedEntryNames();
packagedEntryNames.removeIf((name) -> !name.endsWith(".jar"));
assertThat(packagedEntryNames).containsExactly("WEB-INF/lib/" + libraryTwo.getName());
}
@Test
void nativeImageArgFileWithExcludesIsWritten() throws Exception {
this.testJarFile.addClass("com/example/Application.class", ClassWithMainMethod.class);
File libraryOne = createLibraryJar();
File libraryTwo = createLibraryJar();
File libraryThree = createLibraryJar();
File libraryFour = createLibraryJar();
this.testJarFile.addFile("META-INF/native-image/com.example.one/lib-one/123/reachability-metadata.properties",
new ByteArrayInputStream("override=true\n".getBytes(StandardCharsets.ISO_8859_1)));
this.testJarFile.addFile("META-INF/native-image/com.example.two/lib-two/123/reachability-metadata.properties",
new ByteArrayInputStream("override=true\n".getBytes(StandardCharsets.ISO_8859_1)));
this.testJarFile.addFile(
"META-INF/native-image/com.example.three/lib-three/123/reachability-metadata.properties",
new ByteArrayInputStream("other=test\n".getBytes(StandardCharsets.ISO_8859_1)));
P packager = createPackager(this.testJarFile.getFile());
execute(packager, (callback) -> {
callback.library(new Library(null, libraryOne, LibraryScope.COMPILE,
LibraryCoordinates.of("com.example.one", "lib-one", "123"), false, false, true));
callback.library(new Library(null, libraryTwo, LibraryScope.COMPILE,
LibraryCoordinates.of("com.example.two", "lib-two", "123"), false, false, true));
callback.library(new Library(null, libraryThree, LibraryScope.COMPILE,
LibraryCoordinates.of("com.example.three", "lib-three", "123"), false, false, true));
callback.library(new Library(null, libraryFour, LibraryScope.COMPILE,
LibraryCoordinates.of("com.example.four", "lib-four", "123"), false, false, true));
});
List<String> expected = new ArrayList<>();
expected.add("--exclude-config");
expected.add("\\Q" + libraryOne.getName() + "\\E");
expected.add("^/META-INF/native-image/.*");
expected.add("--exclude-config");
expected.add("\\Q" + libraryTwo.getName() + "\\E");
expected.add("^/META-INF/native-image/.*");
assertThat(getPackagedEntryContent("META-INF/native-image/argfile"))
.isEqualTo(String.join("\n", expected) + "\n");
}
@Test
void sbomManifestEntriesAreWritten() throws IOException {
this.testJarFile.addClass("com/example/Application.class", ClassWithMainMethod.class);
this.testJarFile.addFile("META-INF/sbom/application.cdx.json", new ByteArrayInputStream(new byte[0]));
P packager = createPackager(this.testJarFile.getFile());
execute(packager, NO_LIBRARIES);
Manifest manifest = getPackagedManifest();
assertThat(manifest).isNotNull();
assertThat(manifest.getMainAttributes().getValue("Sbom-Format")).isEqualTo("CycloneDX");
assertThat(manifest.getMainAttributes().getValue("Sbom-Location"))
.isEqualTo("META-INF/sbom/application.cdx.json");
}
private File createLibraryJar() throws IOException {
TestJarFile library = new TestJarFile(this.tempDir);
library.addClass("com/example/library/Library.class", ClassWithoutMainMethod.class);
return library.getFile();
}
protected Library newLibrary(File file, LibraryScope scope, boolean unpackRequired) {
return new Library(null, file, scope, null, unpackRequired, false, true);
}
private Library newLibrary(File file, LibraryScope scope, boolean unpackRequired, boolean included) {
return new Library(null, file, scope, null, unpackRequired, false, included);
}
protected final P createPackager() {
return createPackager(this.testJarFile.getFile());
}
protected abstract P createPackager(File source);
protected abstract void execute(P packager, Libraries libraries) throws IOException;
protected Collection<String> getPackagedEntryNames() throws IOException {
return getAllPackagedEntries().stream()
.map(ZipArchiveEntry::getName)
.collect(Collectors.toCollection(ArrayList::new));
}
protected boolean hasPackagedLauncherClasses() throws IOException {
return hasPackagedEntry("org/springframework/boot/")
&& hasPackagedEntry("org/springframework/boot/loader/launch/JarLauncher.class");
}
protected boolean hasPackagedEntry(String name) throws IOException {
return getPackagedEntry(name) != null;
}
protected @Nullable ZipEntry getPackagedEntry(String name) throws IOException {
return getAllPackagedEntries().stream()
.filter((entry) -> name.equals(entry.getName()))
.findFirst()
.orElse(null);
}
protected abstract Collection<ZipArchiveEntry> getAllPackagedEntries() throws IOException;
protected abstract @Nullable Manifest getPackagedManifest() throws IOException;
protected abstract @Nullable String getPackagedEntryContent(String name) throws IOException;
static
|
from
|
java
|
elastic__elasticsearch
|
modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpDatabaseProvider.java
|
{
"start": 663,
"end": 1745
}
|
interface ____ {
/**
* Determines if the given database name corresponds to an expired database. Expired databases will not be loaded.
* <br/><br/>
* Verifying database expiration is left to each provider implementation to determine. A return value of <code>false</code> does not
* preclude the possibility of a provider returning <code>true</code> in the future.
*
* @param projectId projectId to look for database.
* @param name the name of the database to provide.
* @return <code>false</code> IFF the requested database file is expired,
* <code>true</code> for all other cases (including unknown file name, file missing, wrong database type, etc).
*/
Boolean isValid(ProjectId projectId, String name);
/**
* @param projectId projectId to look for database.
* @param name the name of the database to provide.
* @return a ready-to-use database instance, or <code>null</code> if no database could be loaded.
*/
IpDatabase getDatabase(ProjectId projectId, String name);
}
|
IpDatabaseProvider
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/common/io/stream/ReleasableBytesStreamOutput.java
|
{
"start": 1193,
"end": 1850
}
|
class ____ extends BytesStreamOutput implements Releasable {
public ReleasableBytesStreamOutput(BigArrays bigarrays) {
this(PageCacheRecycler.PAGE_SIZE_IN_BYTES, bigarrays);
}
public ReleasableBytesStreamOutput(int expectedSize, BigArrays bigArrays) {
super(expectedSize, bigArrays);
}
@Override
public void close() {
Releasables.close(bytes);
}
@Override
public void reset() {
assert false;
// not supported, close and create a new instance instead
throw new UnsupportedOperationException("must not reuse a pooled bytes backed stream");
}
}
|
ReleasableBytesStreamOutput
|
java
|
google__dagger
|
javatests/dagger/hilt/android/processor/internal/GeneratorsTest.java
|
{
"start": 12252,
"end": 13034
}
|
class ____ extends Hilt_MyView {",
" public MyView(String[] strs, int i, Context context, AttributeSet attrs) {",
" super(strs, i, context, attrs);",
" }",
"}");
HiltCompilerTests.hiltCompiler(baseView, myView).compile(subject -> subject.hasErrorCount(0));
}
// This is a regression test for https://github.com/google/dagger/issues/3296
@Test
public void isRestrictedApiConstructorWithTypeParameterTest() {
Source baseView =
HiltCompilerTests.javaSource(
"test.BaseView",
"package test;",
"",
"import android.content.Context;",
"import android.util.AttributeSet;",
"import android.view.View;",
"",
"public
|
MyView
|
java
|
apache__camel
|
components/camel-ai/camel-pinecone/src/main/java/org/apache/camel/component/pinecone/PineconeVectorDbComponent.java
|
{
"start": 1127,
"end": 2212
}
|
class ____ extends DefaultComponent {
@Metadata
private PineconeVectorDbConfiguration configuration;
public PineconeVectorDbComponent() {
this(null);
}
public PineconeVectorDbComponent(CamelContext context) {
super(context);
this.configuration = new PineconeVectorDbConfiguration();
}
public PineconeVectorDbConfiguration getConfiguration() {
return configuration;
}
/**
* The configuration;
*/
public void setConfiguration(PineconeVectorDbConfiguration configuration) {
this.configuration = configuration;
}
@Override
protected Endpoint createEndpoint(
String uri,
String remaining,
Map<String, Object> parameters)
throws Exception {
PineconeVectorDbConfiguration configuration = this.configuration.copy();
PineconeVectorDbEndpoint endpoint = new PineconeVectorDbEndpoint(uri, this, remaining, configuration);
setProperties(endpoint, parameters);
return endpoint;
}
}
|
PineconeVectorDbComponent
|
java
|
ReactiveX__RxJava
|
src/main/java/io/reactivex/rxjava3/internal/operators/flowable/FlowableConcatMapEagerPublisher.java
|
{
"start": 1157,
"end": 2048
}
|
class ____<T, R> extends Flowable<R> {
final Publisher<T> source;
final Function<? super T, ? extends Publisher<? extends R>> mapper;
final int maxConcurrency;
final int prefetch;
final ErrorMode errorMode;
public FlowableConcatMapEagerPublisher(Publisher<T> source,
Function<? super T, ? extends Publisher<? extends R>> mapper,
int maxConcurrency,
int prefetch,
ErrorMode errorMode) {
this.source = source;
this.mapper = mapper;
this.maxConcurrency = maxConcurrency;
this.prefetch = prefetch;
this.errorMode = errorMode;
}
@Override
protected void subscribeActual(Subscriber<? super R> s) {
source.subscribe(new ConcatMapEagerDelayErrorSubscriber<>(
s, mapper, maxConcurrency, prefetch, errorMode));
}
}
|
FlowableConcatMapEagerPublisher
|
java
|
apache__flink
|
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Model.java
|
{
"start": 2399,
"end": 5642
}
|
interface ____ {
/**
* Returns the resolved input schema of this model.
*
* <p>The input schema describes the structure and data types of the input columns that the
* model expects for inference operations.
*
* @return the resolved input schema.
*/
ResolvedSchema getResolvedInputSchema();
/**
* Returns the resolved output schema of this model.
*
* <p>The output schema describes the structure and data types of the output columns that the
* model produces during inference operations.
*
* @return the resolved output schema.
*/
ResolvedSchema getResolvedOutputSchema();
/**
* Performs prediction on the given table using specified input columns.
*
* <p>This method applies the model to the input data to generate predictions. The input columns
* must match the model's expected input schema.
*
* <p>Example:
*
* <pre>{@code
* Table predictions = model.predict(inputTable, ColumnList.of("feature1", "feature2"));
* }</pre>
*
* @param table the input table containing data for prediction
* @param inputColumns the columns from the input table to use as model input
* @return a table containing the input data along with prediction results
*/
Table predict(Table table, ColumnList inputColumns);
/**
* Performs prediction on the given table using specified input columns with runtime options.
*
* <p>This method applies the model to the input data to generate predictions with additional
* runtime configuration options such as max-concurrent-operations, timeout, and execution mode
* settings.
*
* <p>For Common runtime options, see {@link MLPredictRuntimeConfigOptions}.
*
* <p>Example:
*
* <pre>{@code
* Map<String, String> options = Map.of("max-concurrent-operations", "100", "timeout", "30s", "async", "true");
* Table predictions = model.predict(inputTable,
* ColumnList.of("feature1", "feature2"), options);
* }</pre>
*
* @param table the input table containing data for prediction
* @param inputColumns the columns from the input table to use as model input
* @param options runtime options for configuring the prediction operation
* @return a table containing the input data along with prediction results
*/
Table predict(Table table, ColumnList inputColumns, Map<String, String> options);
/**
* Converts this model object into a named argument.
*
* <p>This method is intended for use in function calls that accept model arguments,
* particularly in process table functions (PTFs) or other operations that work with models.
*
* <p>Example:
*
* <pre>{@code
* env.fromCall(
* "ML_PREDICT",
* inputTable.asArgument("INPUT"),
* model.asArgument("MODEL"),
* Expressions.descriptor(ColumnList.of("feature1", "feature2")).asArgument("ARGS")
* )
* }</pre>
*
* @param name the name to assign to this model argument
* @return an expression that can be passed to functions expecting model arguments
*/
ApiExpression asArgument(String name);
}
|
Model
|
java
|
apache__flink
|
flink-core/src/test/java/org/apache/flink/api/common/typeutils/base/array/IntPrimitiveArrayComparatorTest.java
|
{
"start": 993,
"end": 1638
}
|
class ____ extends PrimitiveArrayComparatorTestBase<int[]> {
public IntPrimitiveArrayComparatorTest() {
super(PrimitiveArrayTypeInfo.INT_PRIMITIVE_ARRAY_TYPE_INFO);
}
@Override
protected void deepEquals(String message, int[] should, int[] is) {
assertThat(is).as(message).isEqualTo(should);
}
@Override
protected int[][] getSortedTestData() {
return new int[][] {
new int[] {-1, 0},
new int[] {0, -1},
new int[] {0, 0},
new int[] {0, 1},
new int[] {0, 1, 2},
new int[] {2}
};
}
}
|
IntPrimitiveArrayComparatorTest
|
java
|
spring-projects__spring-framework
|
spring-webmvc/src/test/java/org/springframework/web/servlet/mvc/method/annotation/MvcUriComponentsBuilderTests.java
|
{
"start": 26708,
"end": 26809
}
|
class ____ extends ControllerWithMethods {
}
@RequestMapping("/something")
public
|
ExtendedController
|
java
|
micronaut-projects__micronaut-core
|
management/src/main/java/io/micronaut/management/endpoint/refresh/RefreshEndpoint.java
|
{
"start": 1456,
"end": 2802
}
|
class ____ {
private final Environment environment;
private final ApplicationEventPublisher<RefreshEvent> eventPublisher;
/**
* @param environment The Environment
* @param eventPublisher The Application event publisher
*/
public RefreshEndpoint(Environment environment, ApplicationEventPublisher<RefreshEvent> eventPublisher) {
this.environment = environment;
this.eventPublisher = eventPublisher;
}
/**
* Refresh application state only if environment has changed (unless <code>force</code> is set to true).
*
* @param force {@link Nullable} body property to indicate whether to force all {@link io.micronaut.runtime.context.scope.Refreshable} beans to be refreshed
* @return array of change keys if applicable
*/
@Write
public String[] refresh(@Nullable Boolean force) {
if (force != null && force) {
eventPublisher.publishEvent(new RefreshEvent());
return EMPTY_STRING_ARRAY;
} else {
Map<String, Object> changes = environment.refreshAndDiff();
if (!changes.isEmpty()) {
eventPublisher.publishEvent(new RefreshEvent(changes));
}
Set<String> keys = changes.keySet();
return keys.toArray(EMPTY_STRING_ARRAY);
}
}
}
|
RefreshEndpoint
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/DisableSpecialKeyDetectTest.java
|
{
"start": 305,
"end": 2022
}
|
class ____ extends TestCase {
public void test_0() throws Exception {
String json = "{\"schema\":{\"$ref\":{\"@title\":\"类目ID\",\"@type\":\"string\"},\"$\":{\"@\":\"类目名称\",\"type\":\"string\"},\"cat_desc\":{\"title\":\"类目描述\",\"type\":\"string\"}}}";
JSONObject errorJson = JSON.parseObject(json, Feature.DisableSpecialKeyDetect);
JSONObject schema = errorJson.getJSONObject("schema");
Set<Map.Entry<String, Object>> es2 = schema.entrySet();
for (Map.Entry<String, Object> entry : es2) {
System.out.println(entry.getKey() + "_" + entry.getValue());
}
}
public void test_1() throws Exception {
String text = "{\"@v1\":\"v1\",\"@type\":\"v2\", \"@\":\"v3\",\"$\":\"v4\",\"$ref\":\"v5\"}";
JSONObject json = JSON.parseObject(text, Feature.DisableSpecialKeyDetect);
Assert.assertEquals("v1", json.getString("@v1"));
Assert.assertEquals("v2", json.getString("@type"));
Assert.assertEquals("v3", json.getString("@"));
Assert.assertEquals("v4", json.getString("$"));
Assert.assertEquals("v5", json.getString("$ref"));
}
public void test_2() throws Exception {
String text = "{\"@v1\":\"v1\",\"@type\":\"v2\", \"@\":\"v3\",\"$\":\"v4\",\"$ref\":\"v5\"}";
Map<String,String> map = JSON.parseObject(text, new TypeReference<Map<String,String>>(){}, Feature.DisableSpecialKeyDetect);
Assert.assertEquals("v1", map.get("@v1"));
Assert.assertEquals("v2", map.get("@type"));
Assert.assertEquals("v3", map.get("@"));
Assert.assertEquals("v4", map.get("$"));
Assert.assertEquals("v5", map.get("$ref"));
}
}
|
DisableSpecialKeyDetectTest
|
java
|
apache__flink
|
flink-core/src/main/java/org/apache/flink/core/io/PostVersionedIOReadableWritable.java
|
{
"start": 1514,
"end": 3815
}
|
class ____ extends VersionedIOReadableWritable {
/** NOTE: CANNOT CHANGE! */
private static final byte[] VERSIONED_IDENTIFIER = new byte[] {-15, -51, -123, -97};
/**
* Read from the provided {@link DataInputView in}. A flag {@code wasVersioned} can be used to
* determine whether or not the data to read was previously written by a {@link
* VersionedIOReadableWritable}.
*/
protected abstract void read(DataInputView in, boolean wasVersioned) throws IOException;
@Override
public void write(DataOutputView out) throws IOException {
out.write(VERSIONED_IDENTIFIER);
super.write(out);
}
/**
* This read attempts to first identify if the input view contains the special {@link
* #VERSIONED_IDENTIFIER} by reading and buffering the first few bytes. If identified to be
* versioned, the usual version resolution read path in {@link
* VersionedIOReadableWritable#read(DataInputView)} is invoked. Otherwise, we "reset" the input
* stream by pushing back the read buffered bytes into the stream.
*/
public final void read(InputStream inputStream) throws IOException {
byte[] tmp = new byte[VERSIONED_IDENTIFIER.length];
int totalRead = IOUtils.tryReadFully(inputStream, tmp);
if (Arrays.equals(tmp, VERSIONED_IDENTIFIER)) {
DataInputView inputView = new DataInputViewStreamWrapper(inputStream);
super.read(inputView);
read(inputView, true);
} else {
InputStream streamToRead = inputStream;
if (totalRead > 0) {
PushbackInputStream resetStream = new PushbackInputStream(inputStream, totalRead);
resetStream.unread(tmp, 0, totalRead);
streamToRead = resetStream;
}
read(new DataInputViewStreamWrapper(streamToRead), false);
}
}
/**
* We do not support reading from a {@link DataInputView}, because it does not support pushing
* back already read bytes.
*/
@Override
public final void read(DataInputView in) throws IOException {
throw new UnsupportedOperationException(
"PostVersionedIOReadableWritable cannot read from a DataInputView.");
}
}
|
PostVersionedIOReadableWritable
|
java
|
elastic__elasticsearch
|
libs/core/src/test/java/org/elasticsearch/core/GlobTests.java
|
{
"start": 7611,
"end": 8624
}
|
interface ____ {
boolean test(char c);
}
private String randomAsciiString(int length) {
return randomAsciiString(length, ch -> ch >= ' ' && ch <= '~');
}
private String randomAsciiStringNoAsterisks(final int length) {
return randomAsciiString(length, ch -> ch >= ' ' && ch <= '~' && ch != '*');
}
private String randomAsciiString(int length, CharPredicate validCharacters) {
StringBuilder str = new StringBuilder(length);
nextChar: for (int i = 0; i < length; i++) {
for (int attempts = 0; attempts < 200; attempts++) {
char ch = (char) randomIntBetween(0x1, 0x7f);
if (validCharacters.test(ch)) {
str.append(ch);
continue nextChar;
}
}
throw new IllegalStateException("Cannot find valid character for string");
}
assertThat(str.length(), equalTo(length));
return str.toString();
}
}
|
CharPredicate
|
java
|
apache__camel
|
dsl/camel-jbang/camel-jbang-core/src/main/java/org/apache/camel/dsl/jbang/core/commands/update/UpdateCommand.java
|
{
"start": 1120,
"end": 1417
}
|
class ____ extends CamelCommand {
public UpdateCommand(CamelJBangMain main) {
super(main);
}
@Override
public Integer doCall() throws Exception {
// defaults to list
new CommandLine(new UpdateList(getMain())).execute();
return 0;
}
}
|
UpdateCommand
|
java
|
quarkusio__quarkus
|
extensions/panache/hibernate-reactive-rest-data-panache/runtime/src/main/java/io/quarkus/hibernate/reactive/rest/data/panache/RestDataResourceMethodListener.java
|
{
"start": 100,
"end": 229
}
|
interface ____ subscribe to the entity events in REST Data with Panache.
*
* @param <ENTITY> the entity to subscribe.
*/
public
|
to
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/QueryWatchesAction.java
|
{
"start": 8073,
"end": 9912
}
|
class ____ extends ActionResponse implements ToXContentObject {
private final List<Item> watches;
private final long watchTotalCount;
public Response(long watchTotalCount, List<Item> watches) {
this.watches = watches;
this.watchTotalCount = watchTotalCount;
}
public Response(StreamInput in) throws IOException {
watches = in.readCollectionAsList(Item::new);
watchTotalCount = in.readVLong();
}
public List<Item> getWatches() {
return watches;
}
public long getWatchTotalCount() {
return watchTotalCount;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeCollection(watches);
out.writeVLong(watchTotalCount);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field("count", watchTotalCount);
builder.startArray("watches");
for (Item watch : watches) {
builder.startObject();
watch.toXContent(builder, params);
builder.endObject();
}
builder.endArray();
return builder.endObject();
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Response response = (Response) o;
return watchTotalCount == response.watchTotalCount && watches.equals(response.watches);
}
@Override
public int hashCode() {
return Objects.hash(watches, watchTotalCount);
}
public static
|
Response
|
java
|
quarkusio__quarkus
|
integration-tests/grpc-inprocess/src/test/java/io/quarkus/grpc/examples/hello/HelloWorldNewServiceTest.java
|
{
"start": 359,
"end": 1113
}
|
class ____ {
@GrpcClient("hello")
GreeterGrpc.GreeterBlockingStub stub;
@GrpcClient("hello")
MutinyGreeterGrpc.MutinyGreeterStub mutiny;
@Test
public void testHelloWorldServiceUsingBlockingStub() {
HelloReply reply = stub.sayHello(HelloRequest.newBuilder().setName("neo-blocking").build());
Assertions.assertEquals(reply.getMessage(), "Hello neo-blocking");
}
@Test
public void testHelloWorldServiceUsingMutinyStub() {
HelloReply reply = mutiny
.sayHello(HelloRequest.newBuilder().setName("neo-blocking").build())
.await().atMost(Duration.ofSeconds(5));
Assertions.assertEquals(reply.getMessage(), "Hello neo-blocking");
}
}
|
HelloWorldNewServiceTest
|
java
|
spring-projects__spring-boot
|
build-plugin/spring-boot-gradle-plugin/src/main/java/org/springframework/boot/gradle/tasks/bundling/LayeredSpec.java
|
{
"start": 11420,
"end": 11660
}
|
class ____
implements Function<String, DependenciesIntoLayerSpec>, Serializable {
@Override
public DependenciesIntoLayerSpec apply(String layer) {
return new DependenciesIntoLayerSpec(layer);
}
}
}
}
|
IntoLayerSpecFactory
|
java
|
spring-projects__spring-security
|
webauthn/src/main/java/org/springframework/security/web/webauthn/jackson/CredProtectAuthenticationExtensionsClientInputSerializer.java
|
{
"start": 1237,
"end": 2531
}
|
class ____
extends StdSerializer<CredProtectAuthenticationExtensionsClientInput> {
protected CredProtectAuthenticationExtensionsClientInputSerializer() {
super(CredProtectAuthenticationExtensionsClientInput.class);
}
@Override
public void serialize(CredProtectAuthenticationExtensionsClientInput input, JsonGenerator jgen,
SerializationContext ctxt) throws JacksonException {
CredProtectAuthenticationExtensionsClientInput.CredProtect credProtect = input.getInput();
String policy = toString(credProtect.getCredProtectionPolicy());
jgen.writePOJOProperty("credentialProtectionPolicy", policy);
jgen.writePOJOProperty("enforceCredentialProtectionPolicy", credProtect.isEnforceCredentialProtectionPolicy());
}
private static String toString(CredProtectAuthenticationExtensionsClientInput.CredProtect.ProtectionPolicy policy) {
switch (policy) {
case USER_VERIFICATION_OPTIONAL:
return "userVerificationOptional";
case USER_VERIFICATION_OPTIONAL_WITH_CREDENTIAL_ID_LIST:
return "userVerificationOptionalWithCredentialIdList";
case USER_VERIFICATION_REQUIRED:
return "userVerificationRequired";
default:
throw new IllegalArgumentException("Unsupported ProtectionPolicy " + policy);
}
}
}
|
CredProtectAuthenticationExtensionsClientInputSerializer
|
java
|
square__retrofit
|
retrofit-converters/jackson/src/test/java/retrofit2/converter/jackson/JacksonConverterFactoryTest.java
|
{
"start": 4124,
"end": 7984
}
|
interface ____ {
@POST("/")
Call<AnImplementation> anImplementation(@Body AnImplementation impl);
@POST("/")
Call<AnInterface> anInterface(@Body AnInterface impl);
@POST("/")
Call<Void> erroringValue(@Body ErroringValue value);
}
@Rule public final MockWebServer server = new MockWebServer();
private final Service service;
private final boolean streaming;
public JacksonConverterFactoryTest(@TestParameter boolean streaming) {
this.streaming = streaming;
SimpleModule module = new SimpleModule();
module.addSerializer(AnInterface.class, new AnInterfaceSerializer());
module.addSerializer(ErroringValue.class, new ErroringValueSerializer());
module.addDeserializer(AnInterface.class, new AnInterfaceDeserializer());
ObjectMapper mapper = new ObjectMapper();
mapper.registerModule(module);
mapper.configure(MapperFeature.AUTO_DETECT_GETTERS, false);
mapper.configure(MapperFeature.AUTO_DETECT_SETTERS, false);
mapper.configure(MapperFeature.AUTO_DETECT_IS_GETTERS, false);
mapper.setVisibilityChecker(
mapper
.getSerializationConfig()
.getDefaultVisibilityChecker()
.withFieldVisibility(JsonAutoDetect.Visibility.ANY));
JacksonConverterFactory factory = JacksonConverterFactory.create(mapper);
if (streaming) {
factory = factory.withStreaming();
}
Retrofit retrofit =
new Retrofit.Builder().baseUrl(server.url("/")).addConverterFactory(factory).build();
service = retrofit.create(Service.class);
}
@Test
public void anInterface() throws IOException, InterruptedException {
server.enqueue(new MockResponse().setBody("{\"name\":\"value\"}"));
Call<AnInterface> call = service.anInterface(new AnImplementation("value"));
Response<AnInterface> response = call.execute();
AnInterface body = response.body();
assertThat(body.getName()).isEqualTo("value");
RecordedRequest request = server.takeRequest();
assertThat(request.getBody().readUtf8()).isEqualTo("{\"name\":\"value\"}");
assertThat(request.getHeader("Content-Type")).isEqualTo("application/json; charset=UTF-8");
}
@Test
public void anImplementation() throws IOException, InterruptedException {
server.enqueue(new MockResponse().setBody("{\"theName\":\"value\"}"));
Call<AnImplementation> call = service.anImplementation(new AnImplementation("value"));
Response<AnImplementation> response = call.execute();
AnImplementation body = response.body();
assertThat(body.theName).isEqualTo("value");
RecordedRequest request = server.takeRequest();
// TODO figure out how to get Jackson to stop using AnInterface's serializer here.
assertThat(request.getBody().readUtf8()).isEqualTo("{\"name\":\"value\"}");
assertThat(request.getHeader("Content-Type")).isEqualTo("application/json; charset=UTF-8");
}
@Test
public void serializeIsStreamed() throws InterruptedException {
assumeTrue(streaming);
Call<Void> call = service.erroringValue(new ErroringValue("hi"));
final AtomicReference<Throwable> throwableRef = new AtomicReference<>();
final CountDownLatch latch = new CountDownLatch(1);
// If streaming were broken, the call to enqueue would throw the exception synchronously.
call.enqueue(
new Callback<Void>() {
@Override
public void onResponse(Call<Void> call, Response<Void> response) {
latch.countDown();
}
@Override
public void onFailure(Call<Void> call, Throwable t) {
throwableRef.set(t);
latch.countDown();
}
});
latch.await();
Throwable throwable = throwableRef.get();
assertThat(throwable).isInstanceOf(EOFException.class);
assertThat(throwable).hasMessageThat().isEqualTo("oops!");
}
}
|
Service
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPointFromStringEvaluator.java
|
{
"start": 4585,
"end": 5183
}
|
class ____ implements EvalOperator.ExpressionEvaluator.Factory {
private final Source source;
private final EvalOperator.ExpressionEvaluator.Factory in;
public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory in) {
this.source = source;
this.in = in;
}
@Override
public ToGeoPointFromStringEvaluator get(DriverContext context) {
return new ToGeoPointFromStringEvaluator(source, in.get(context), context);
}
@Override
public String toString() {
return "ToGeoPointFromStringEvaluator[" + "in=" + in + "]";
}
}
}
|
Factory
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java
|
{
"start": 3526,
"end": 3969
}
|
class ____ extends FieldMapper {
public static final String CONTENT_TYPE = "completion";
/**
* Maximum allowed number of completion contexts in a mapping.
*/
static final int COMPLETION_CONTEXTS_LIMIT = 10;
@Override
public FieldMapper.Builder getMergeBuilder() {
return new Builder(leafName(), builder.defaultAnalyzer, builder.indexVersionCreated).init(this);
}
public static
|
CompletionFieldMapper
|
java
|
apache__flink
|
flink-core/src/main/java/org/apache/flink/api/common/accumulators/Histogram.java
|
{
"start": 1150,
"end": 1275
}
|
class ____ not extend to continuous values later, because it makes no attempt to put the
* data in bins.
*/
@Public
public
|
does
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/metamodel/internal/EmbeddableInstantiatorPojoStandard.java
|
{
"start": 1593,
"end": 3054
}
|
class ____ interface", getMappedPojoClass()
);
}
if ( constructor == null ) {
throw new InstantiationException( "Unable to locate constructor for embeddable", getMappedPojoClass() );
}
try {
final var values = valuesAccess == null ? null : valuesAccess.getValues();
final Object instance = constructor.newInstance();
if ( values != null ) {
// At this point, createEmptyCompositesEnabled is always true.
// We can only set the property values on the compositeInstance though if there is at least one non null value.
// If the values are all null, we would normally not create a composite instance at all because no values exist.
// Setting all properties to null could cause IllegalArgumentExceptions though when the component has primitive properties.
// To avoid this exception and align with what Hibernate 5 did, we skip setting properties if all values are null.
// A possible alternative could be to initialize the resolved values for primitive fields to their default value,
// but that might cause unexpected outcomes for Hibernate 5 users that use createEmptyCompositesEnabled when updating.
// You can see the need for this by running EmptyCompositeEquivalentToNullTest
embeddableMappingAccess.get().setValues( instance, values );
}
return instance;
}
catch ( Exception e ) {
throw new InstantiationException( "Could not instantiate entity", getMappedPojoClass(), e );
}
}
}
|
or
|
java
|
hibernate__hibernate-orm
|
hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/collection/embeddable/Name.java
|
{
"start": 377,
"end": 1584
}
|
class ____ implements Serializable {
private String firstName;
private String lastName;
public Name() {
}
public Name(String firstName, String lastName) {
this.firstName = firstName;
this.lastName = lastName;
}
@Override
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
if ( !(o instanceof Name) ) {
return false;
}
Name name = (Name) o;
if ( firstName != null ? !firstName.equals( name.firstName ) : name.firstName != null ) {
return false;
}
if ( lastName != null ? !lastName.equals( name.lastName ) : name.lastName != null ) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = firstName != null ? firstName.hashCode() : 0;
result = 31 * result + (lastName != null ? lastName.hashCode() : 0);
return result;
}
@Override
public String toString() {
return "Name(firstName = " + firstName + ", lastName = " + lastName + ")";
}
public String getFirstName() {
return firstName;
}
public void setFirstName(String firstName) {
this.firstName = firstName;
}
public String getLastName() {
return lastName;
}
public void setLastName(String lastName) {
this.lastName = lastName;
}
}
|
Name
|
java
|
apache__camel
|
components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/config/XmlConfigTestSupport.java
|
{
"start": 1274,
"end": 2053
}
|
class ____ extends TestSupport {
protected static final Logger LOG = LoggerFactory.getLogger(XmlConfigTestSupport.class);
protected void assertValidContext(CamelContext context) {
assertNotNull(context, "No context found!");
List<RouteDefinition> routes = ((ModelCamelContext) context).getRouteDefinitions();
LOG.debug("Found routes: {}", routes);
assertEquals(1, routes.size(), "One Route should be found");
for (RouteDefinition route : routes) {
FromDefinition fromType = route.getInput();
assertEquals("seda:test.a", fromType.getUri(), "from URI");
List<?> outputs = route.getOutputs();
assertEquals(1, outputs.size(), "Number of outputs");
}
}
}
|
XmlConfigTestSupport
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/onetomany/OneToManyDuplicatesTest.java
|
{
"start": 2371,
"end": 3181
}
|
class ____ {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
private Long id;
private String name;
@OneToMany(cascade = CascadeType.ALL, mappedBy = "userContact")
private List<ContactInfo> contactInfos = new ArrayList<>();
public UserContact() {
}
public UserContact(Long id, String name) {
this.id = id;
this.name = name;
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public List<ContactInfo> getContactInfos() {
return contactInfos;
}
public void setContactInfos(List<ContactInfo> contactInfos) {
this.contactInfos = contactInfos;
}
}
@Entity(name = "ContactInfo")
public static
|
UserContact
|
java
|
hibernate__hibernate-orm
|
hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/components/collections/CollectionOfComponents.java
|
{
"start": 1030,
"end": 2901
}
|
class ____ {
private Integer id1;
private Integer id2;
@BeforeClassTemplate
public void initData(EntityManagerFactoryScope scope) {
// Revision 1
scope.inTransaction( em -> {
ComponentSetTestEntity cte1 = new ComponentSetTestEntity();
ComponentSetTestEntity cte2 = new ComponentSetTestEntity();
cte2.getComps().add( new Component1( "string1", null ) );
em.persist( cte2 );
em.persist( cte1 );
id1 = cte1.getId();
id2 = cte2.getId();
} );
// Revision 2
scope.inTransaction( em -> {
ComponentSetTestEntity cte1 = em.find( ComponentSetTestEntity.class, id1 );
cte1.getComps().add( new Component1( "a", "b" ) );
} );
}
@Test
public void testRevisionsCounts(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
assertEquals( Arrays.asList( 1, 2 ), AuditReaderFactory.get( em ).getRevisions( ComponentSetTestEntity.class, id1 ) );
} );
}
@Test
public void testHistoryOfId1(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
final var auditReader = AuditReaderFactory.get( em );
assertEquals( 0, auditReader.find( ComponentSetTestEntity.class, id1, 1 ).getComps().size() );
Set<Component1> comps1 = auditReader.find( ComponentSetTestEntity.class, id1, 2 ).getComps();
assertEquals( 1, comps1.size() );
assertTrue( comps1.contains( new Component1( "a", "b" ) ) );
} );
}
@Test
@JiraKey(value = "HHH-8968")
public void testCollectionOfEmbeddableWithNullValue(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
final var auditReader = AuditReaderFactory.get( em );
final Component1 componentV1 = new Component1( "string1", null );
final ComponentSetTestEntity entityV1 = auditReader.find( ComponentSetTestEntity.class, id2, 1 );
assertEquals( Collections.singleton( componentV1 ), entityV1.getComps() );
} );
}
}
|
CollectionOfComponents
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/threadsafety/ImmutableCheckerTest.java
|
{
"start": 59685,
"end": 60250
}
|
class ____ {
<T extends MyImmutableType> A<T> h() {
return new A<>();
}
}
""")
.doTest();
}
@Test
public void immutableTypeParameterInstantiation_violation() {
compilationHelper
.addSourceLines(
"A.java",
"""
import com.google.errorprone.annotations.ImmutableTypeParameter;
import com.google.errorprone.annotations.Immutable;
import com.google.common.collect.ImmutableList;
@Immutable
|
Test
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/boot/models/xml/dynamic/Employee.java
|
{
"start": 296,
"end": 415
}
|
class ____ {
@Id
private String name;
@Id
private int number;
@OneToMany
private List<Address> addresses;
}
|
Employee
|
java
|
spring-projects__spring-security
|
web/src/main/java/org/springframework/security/web/server/authentication/ConcurrentSessionControlServerAuthenticationSuccessHandler.java
|
{
"start": 1745,
"end": 4343
}
|
class ____
implements ServerAuthenticationSuccessHandler {
private final ReactiveSessionRegistry sessionRegistry;
private final ServerMaximumSessionsExceededHandler maximumSessionsExceededHandler;
private SessionLimit sessionLimit = SessionLimit.of(1);
public ConcurrentSessionControlServerAuthenticationSuccessHandler(ReactiveSessionRegistry sessionRegistry,
ServerMaximumSessionsExceededHandler maximumSessionsExceededHandler) {
Assert.notNull(sessionRegistry, "sessionRegistry cannot be null");
Assert.notNull(maximumSessionsExceededHandler, "maximumSessionsExceededHandler cannot be null");
this.sessionRegistry = sessionRegistry;
this.maximumSessionsExceededHandler = maximumSessionsExceededHandler;
}
@Override
public Mono<Void> onAuthenticationSuccess(WebFilterExchange exchange, Authentication authentication) {
return this.sessionLimit.apply(authentication)
.flatMap((maxSessions) -> handleConcurrency(exchange, authentication, maxSessions));
}
private Mono<Void> handleConcurrency(WebFilterExchange exchange, Authentication authentication,
Integer maximumSessions) {
return this.sessionRegistry.getAllSessions(Objects.requireNonNull(authentication.getPrincipal()))
.collectList()
.flatMap((registeredSessions) -> exchange.getExchange()
.getSession()
.map((currentSession) -> Tuples.of(currentSession, registeredSessions)))
.flatMap((sessionTuple) -> {
WebSession currentSession = sessionTuple.getT1();
List<ReactiveSessionInformation> registeredSessions = sessionTuple.getT2();
int registeredSessionsCount = registeredSessions.size();
if (registeredSessionsCount < maximumSessions) {
return Mono.empty();
}
if (registeredSessionsCount == maximumSessions) {
for (ReactiveSessionInformation registeredSession : registeredSessions) {
if (registeredSession.getSessionId().equals(currentSession.getId())) {
return Mono.empty();
}
}
}
return this.maximumSessionsExceededHandler.handle(new MaximumSessionsContext(authentication,
registeredSessions, maximumSessions, currentSession));
});
}
/**
* Sets the strategy used to resolve the maximum number of sessions that are allowed
* for a specific {@link Authentication}. By default, it returns {@code 1} for any
* authentication.
* @param sessionLimit the {@link SessionLimit} to use
*/
public void setSessionLimit(SessionLimit sessionLimit) {
Assert.notNull(sessionLimit, "sessionLimit cannot be null");
this.sessionLimit = sessionLimit;
}
}
|
ConcurrentSessionControlServerAuthenticationSuccessHandler
|
java
|
quarkusio__quarkus
|
integration-tests/gradle/src/main/resources/custom-jar-classifier-dependency/project-a/src/main/java/io/blob/Intermediate.java
|
{
"start": 52,
"end": 176
}
|
class ____ {
public void someMethod() {
SomeCLass someClass = new SomeCLass();
someClass.doWork();
}
}
|
Intermediate
|
java
|
dropwizard__dropwizard
|
dropwizard-example/src/main/java/com/example/helloworld/resources/ViewResource.java
|
{
"start": 234,
"end": 1122
}
|
class ____ {
@GET
@Produces("text/html;charset=UTF-8")
@Path("/utf8.ftl")
public View freemarkerUTF8() {
return new View("/views/ftl/utf8.ftl", StandardCharsets.UTF_8) {
};
}
@GET
@Produces("text/html;charset=ISO-8859-1")
@Path("/iso88591.ftl")
public View freemarkerISO88591() {
return new View("/views/ftl/iso88591.ftl", StandardCharsets.ISO_8859_1) {
};
}
@GET
@Produces("text/html;charset=UTF-8")
@Path("/utf8.mustache")
public View mustacheUTF8() {
return new View("/views/mustache/utf8.mustache", StandardCharsets.UTF_8) {
};
}
@GET
@Produces("text/html;charset=ISO-8859-1")
@Path("/iso88591.mustache")
public View mustacheISO88591() {
return new View("/views/mustache/iso88591.mustache", StandardCharsets.ISO_8859_1) {
};
}
}
|
ViewResource
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/internal/util/SerializationHelper.java
|
{
"start": 8642,
"end": 8860
}
|
class ____ to use; mainly here we are worried about deserializing user classes in
* environments (app servers, etc) where Hibernate is on a parent classes loader. To
* facilitate for that we allow passing in the
|
loader
|
java
|
ReactiveX__RxJava
|
src/test/java/io/reactivex/rxjava3/subscribers/SerializedSubscriberTest.java
|
{
"start": 1286,
"end": 18913
}
|
class ____ extends RxJavaTest {
Subscriber<String> subscriber;
@Before
public void before() {
subscriber = TestHelper.mockSubscriber();
}
private Subscriber<String> serializedSubscriber(Subscriber<String> subscriber) {
return new SerializedSubscriber<>(subscriber);
}
@Test
public void singleThreadedBasic() {
TestSingleThreadedPublisher onSubscribe = new TestSingleThreadedPublisher("one", "two", "three");
Flowable<String> w = Flowable.unsafeCreate(onSubscribe);
Subscriber<String> aw = serializedSubscriber(subscriber);
w.subscribe(aw);
onSubscribe.waitToFinish();
verify(subscriber, times(1)).onNext("one");
verify(subscriber, times(1)).onNext("two");
verify(subscriber, times(1)).onNext("three");
verify(subscriber, never()).onError(any(Throwable.class));
verify(subscriber, times(1)).onComplete();
// non-deterministic because unsubscribe happens after 'waitToFinish' releases
// so commenting out for now as this is not a critical thing to test here
// verify(s, times(1)).unsubscribe();
}
@Test
public void multiThreadedBasic() {
TestMultiThreadedObservable onSubscribe = new TestMultiThreadedObservable("one", "two", "three");
Flowable<String> w = Flowable.unsafeCreate(onSubscribe);
BusySubscriber busySubscriber = new BusySubscriber();
Subscriber<String> aw = serializedSubscriber(busySubscriber);
w.subscribe(aw);
onSubscribe.waitToFinish();
assertEquals(3, busySubscriber.onNextCount.get());
assertFalse(busySubscriber.onError);
assertTrue(busySubscriber.onComplete);
// non-deterministic because unsubscribe happens after 'waitToFinish' releases
// so commenting out for now as this is not a critical thing to test here
// verify(s, times(1)).unsubscribe();
// we can have concurrency ...
assertTrue(onSubscribe.maxConcurrentThreads.get() > 1);
// ... but the onNext execution should be single threaded
assertEquals(1, busySubscriber.maxConcurrentThreads.get());
}
@Test
public void multiThreadedWithNPE() throws InterruptedException {
TestMultiThreadedObservable onSubscribe = new TestMultiThreadedObservable("one", "two", "three", null);
Flowable<String> w = Flowable.unsafeCreate(onSubscribe);
BusySubscriber busySubscriber = new BusySubscriber();
Subscriber<String> aw = serializedSubscriber(busySubscriber);
w.subscribe(aw);
onSubscribe.waitToFinish();
busySubscriber.terminalEvent.await();
System.out.println("OnSubscribe maxConcurrentThreads: " + onSubscribe.maxConcurrentThreads.get() + " Subscriber maxConcurrentThreads: " + busySubscriber.maxConcurrentThreads.get());
// we can't know how many onNext calls will occur since they each run on a separate thread
// that depends on thread scheduling so 0, 1, 2 and 3 are all valid options
// assertEquals(3, busySubscriber.onNextCount.get());
assertTrue(busySubscriber.onNextCount.get() < 4);
assertTrue(busySubscriber.onError);
// no onComplete because onError was invoked
assertFalse(busySubscriber.onComplete);
// non-deterministic because unsubscribe happens after 'waitToFinish' releases
// so commenting out for now as this is not a critical thing to test here
//verify(s, times(1)).unsubscribe();
// we can have concurrency ...
assertTrue(onSubscribe.maxConcurrentThreads.get() > 1);
// ... but the onNext execution should be single threaded
assertEquals(1, busySubscriber.maxConcurrentThreads.get());
}
@Test
public void multiThreadedWithNPEinMiddle() {
int n = 10;
for (int i = 0; i < n; i++) {
TestMultiThreadedObservable onSubscribe = new TestMultiThreadedObservable("one", "two", "three", null,
"four", "five", "six", "seven", "eight", "nine");
Flowable<String> w = Flowable.unsafeCreate(onSubscribe);
BusySubscriber busySubscriber = new BusySubscriber();
Subscriber<String> aw = serializedSubscriber(busySubscriber);
w.subscribe(aw);
onSubscribe.waitToFinish();
System.out.println("OnSubscribe maxConcurrentThreads: " + onSubscribe.maxConcurrentThreads.get() + " Subscriber maxConcurrentThreads: " + busySubscriber.maxConcurrentThreads.get());
// we can have concurrency ...
assertTrue(onSubscribe.maxConcurrentThreads.get() > 1);
// ... but the onNext execution should be single threaded
assertEquals(1, busySubscriber.maxConcurrentThreads.get());
// this should not be the full number of items since the error should stop it before it completes all 9
System.out.println("onNext count: " + busySubscriber.onNextCount.get());
assertFalse(busySubscriber.onComplete);
assertTrue(busySubscriber.onError);
assertTrue(busySubscriber.onNextCount.get() < 9);
// no onComplete because onError was invoked
// non-deterministic because unsubscribe happens after 'waitToFinish' releases
// so commenting out for now as this is not a critical thing to test here
// verify(s, times(1)).unsubscribe();
}
}
/**
* A non-realistic use case that tries to expose thread-safety issues by throwing lots of out-of-order
* events on many threads.
*/
@Test
public void runOutOfOrderConcurrencyTest() {
ExecutorService tp = Executors.newFixedThreadPool(20);
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
TestConcurrencySubscriber tw = new TestConcurrencySubscriber();
// we need Synchronized + SafeSubscriber to handle synchronization plus life-cycle
Subscriber<String> w = serializedSubscriber(new SafeSubscriber<>(tw));
Future<?> f1 = tp.submit(new OnNextThread(w, 12000));
Future<?> f2 = tp.submit(new OnNextThread(w, 5000));
Future<?> f3 = tp.submit(new OnNextThread(w, 75000));
Future<?> f4 = tp.submit(new OnNextThread(w, 13500));
Future<?> f5 = tp.submit(new OnNextThread(w, 22000));
Future<?> f6 = tp.submit(new OnNextThread(w, 15000));
Future<?> f7 = tp.submit(new OnNextThread(w, 7500));
Future<?> f8 = tp.submit(new OnNextThread(w, 23500));
Future<?> f10 = tp.submit(new CompletionThread(w, TestConcurrencySubscriberEvent.onComplete, f1, f2, f3, f4));
try {
Thread.sleep(1);
} catch (InterruptedException e) {
// ignore
}
Future<?> f11 = tp.submit(new CompletionThread(w, TestConcurrencySubscriberEvent.onComplete, f4, f6, f7));
Future<?> f12 = tp.submit(new CompletionThread(w, TestConcurrencySubscriberEvent.onComplete, f4, f6, f7));
Future<?> f13 = tp.submit(new CompletionThread(w, TestConcurrencySubscriberEvent.onComplete, f4, f6, f7));
Future<?> f14 = tp.submit(new CompletionThread(w, TestConcurrencySubscriberEvent.onComplete, f4, f6, f7));
// // the next 4 onError events should wait on same as f10
Future<?> f15 = tp.submit(new CompletionThread(w, TestConcurrencySubscriberEvent.onError, f1, f2, f3, f4));
Future<?> f16 = tp.submit(new CompletionThread(w, TestConcurrencySubscriberEvent.onError, f1, f2, f3, f4));
Future<?> f17 = tp.submit(new CompletionThread(w, TestConcurrencySubscriberEvent.onError, f1, f2, f3, f4));
Future<?> f18 = tp.submit(new CompletionThread(w, TestConcurrencySubscriberEvent.onError, f1, f2, f3, f4));
waitOnThreads(f1, f2, f3, f4, f5, f6, f7, f8, f10, f11, f12, f13, f14, f15, f16, f17, f18);
@SuppressWarnings("unused")
int numNextEvents = tw.assertEvents(null); // no check of type since we don't want to test barging results here, just interleaving behavior
// System.out.println("Number of events executed: " + numNextEvents);
for (int i = 0; i < errors.size(); i++) {
TestHelper.assertUndeliverable(errors, i, RuntimeException.class);
}
} catch (Throwable e) {
fail("Concurrency test failed: " + e.getMessage());
e.printStackTrace();
} finally {
tp.shutdown();
try {
tp.awaitTermination(5000, TimeUnit.MILLISECONDS);
} catch (InterruptedException e) {
e.printStackTrace();
}
RxJavaPlugins.reset();
}
}
@Test
public void runConcurrencyTest() {
ExecutorService tp = Executors.newFixedThreadPool(20);
try {
TestConcurrencySubscriber tw = new TestConcurrencySubscriber();
// we need Synchronized + SafeSubscriber to handle synchronization plus life-cycle
Subscriber<String> w = serializedSubscriber(new SafeSubscriber<>(tw));
w.onSubscribe(new BooleanSubscription());
Future<?> f1 = tp.submit(new OnNextThread(w, 12000));
Future<?> f2 = tp.submit(new OnNextThread(w, 5000));
Future<?> f3 = tp.submit(new OnNextThread(w, 75000));
Future<?> f4 = tp.submit(new OnNextThread(w, 13500));
Future<?> f5 = tp.submit(new OnNextThread(w, 22000));
Future<?> f6 = tp.submit(new OnNextThread(w, 15000));
Future<?> f7 = tp.submit(new OnNextThread(w, 7500));
Future<?> f8 = tp.submit(new OnNextThread(w, 23500));
// 12000 + 5000 + 75000 + 13500 + 22000 + 15000 + 7500 + 23500 = 173500
Future<?> f10 = tp.submit(new CompletionThread(w, TestConcurrencySubscriberEvent.onComplete, f1, f2, f3, f4, f5, f6, f7, f8));
try {
Thread.sleep(1);
} catch (InterruptedException e) {
// ignore
}
waitOnThreads(f1, f2, f3, f4, f5, f6, f7, f8, f10);
int numNextEvents = tw.assertEvents(null); // no check of type since we don't want to test barging results here, just interleaving behavior
assertEquals(173500, numNextEvents);
// System.out.println("Number of events executed: " + numNextEvents);
} catch (Throwable e) {
fail("Concurrency test failed: " + e.getMessage());
e.printStackTrace();
} finally {
tp.shutdown();
try {
tp.awaitTermination(25000, TimeUnit.MILLISECONDS);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
/**
* Test that a notification does not get delayed in the queue waiting for the next event to push it through.
*
* @throws InterruptedException if the await is interrupted
*/
@Ignore("this is non-deterministic ... haven't figured out what's wrong with the test yet (benjchristensen: July 2014)")
@Test
public void notificationDelay() throws InterruptedException {
ExecutorService tp1 = Executors.newFixedThreadPool(1);
ExecutorService tp2 = Executors.newFixedThreadPool(1);
try {
int n = 10;
for (int i = 0; i < n; i++) {
final CountDownLatch firstOnNext = new CountDownLatch(1);
final CountDownLatch onNextCount = new CountDownLatch(2);
final CountDownLatch latch = new CountDownLatch(1);
final CountDownLatch running = new CountDownLatch(2);
TestSubscriberEx<String> ts = new TestSubscriberEx<>(new DefaultSubscriber<String>() {
@Override
public void onComplete() {
}
@Override
public void onError(Throwable e) {
}
@Override
public void onNext(String t) {
firstOnNext.countDown();
// force it to take time when delivering so the second one is enqueued
try {
latch.await();
} catch (InterruptedException e) {
}
}
});
Subscriber<String> subscriber = serializedSubscriber(ts);
Future<?> f1 = tp1.submit(new OnNextThread(subscriber, 1, onNextCount, running));
Future<?> f2 = tp2.submit(new OnNextThread(subscriber, 1, onNextCount, running));
running.await(); // let one of the OnNextThread actually run before proceeding
firstOnNext.await();
Thread t1 = ts.lastThread();
System.out.println("first onNext on thread: " + t1);
latch.countDown();
waitOnThreads(f1, f2);
// not completed yet
assertEquals(2, ts.values().size());
Thread t2 = ts.lastThread();
System.out.println("second onNext on thread: " + t2);
assertSame(t1, t2);
System.out.println(ts.values());
subscriber.onComplete();
System.out.println(ts.values());
}
} finally {
tp1.shutdown();
tp2.shutdown();
}
}
/**
* Demonstrates thread starvation problem.
*
* No solution on this for now. Trade-off in this direction as per https://github.com/ReactiveX/RxJava/issues/998#issuecomment-38959474
* Probably need backpressure for this to work
*
* When using SynchronizedSubscriber we get this output:
*
* {@code p1: 18 p2: 68 =>} should be close to each other unless we have thread starvation
*
* When using SerializedSubscriber we get:
*
* {@code p1: 1 p2: 2445261 =>} should be close to each other unless we have thread starvation
*
* This demonstrates how SynchronizedSubscriber balances back and forth better, and blocks emission.
* The real issue in this example is the async buffer-bloat, so we need backpressure.
*
*
* @throws InterruptedException if the await is interrupted
*/
@Ignore("Demonstrates thread starvation problem. Read JavaDoc")
@Test
public void threadStarvation() throws InterruptedException {
TestSubscriber<String> ts = new TestSubscriber<>(new DefaultSubscriber<String>() {
@Override
public void onComplete() {
}
@Override
public void onError(Throwable e) {
}
@Override
public void onNext(String t) {
// force it to take time when delivering
try {
Thread.sleep(1);
} catch (InterruptedException e) {
}
}
});
final Subscriber<String> subscriber = serializedSubscriber(ts);
AtomicInteger p1 = new AtomicInteger();
AtomicInteger p2 = new AtomicInteger();
subscriber.onSubscribe(new BooleanSubscription());
ResourceSubscriber<String> as1 = new ResourceSubscriber<String>() {
@Override
public void onNext(String t) {
subscriber.onNext(t);
}
@Override
public void onError(Throwable t) {
RxJavaPlugins.onError(t);
}
@Override
public void onComplete() {
}
};
ResourceSubscriber<String> as2 = new ResourceSubscriber<String>() {
@Override
public void onNext(String t) {
subscriber.onNext(t);
}
@Override
public void onError(Throwable t) {
RxJavaPlugins.onError(t);
}
@Override
public void onComplete() {
}
};
infinite(p1).subscribe(as1);
infinite(p2).subscribe(as2);
Thread.sleep(100);
System.out.println("p1: " + p1.get() + " p2: " + p2.get() + " => should be close to each other unless we have thread starvation");
assertEquals(p1.get(), p2.get(), 10000); // fairly distributed within 10000 of each other
as1.dispose();
as2.dispose();
}
private static void waitOnThreads(Future<?>... futures) {
for (Future<?> f : futures) {
try {
f.get(20, TimeUnit.SECONDS);
} catch (Throwable e) {
System.err.println("Failed while waiting on future.");
e.printStackTrace();
}
}
}
private static Flowable<String> infinite(final AtomicInteger produced) {
return Flowable.unsafeCreate(new Publisher<String>() {
@Override
public void subscribe(Subscriber<? super String> s) {
BooleanSubscription bs = new BooleanSubscription();
s.onSubscribe(bs);
while (!bs.isCancelled()) {
s.onNext("onNext");
produced.incrementAndGet();
}
}
}).subscribeOn(Schedulers.newThread());
}
/**
* A thread that will pass data to onNext.
*/
public static
|
SerializedSubscriberTest
|
java
|
spring-projects__spring-boot
|
module/spring-boot-web-server/src/test/java/org/springframework/boot/web/server/servlet/context/testcomponents/listener/TestListener.java
|
{
"start": 1601,
"end": 1912
}
|
class ____ implements Filter {
@Override
public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain)
throws IOException, ServletException {
request.setAttribute("listenerAddedFilterAttribute", "charlie");
chain.doFilter(request, response);
}
}
}
|
ListenerAddedFilter
|
java
|
elastic__elasticsearch
|
test/framework/src/main/java/org/elasticsearch/datageneration/matchers/Messages.java
|
{
"start": 763,
"end": 2287
}
|
class ____ {
public static String formatErrorMessage(
final XContentBuilder actualMappings,
final Settings.Builder actualSettings,
final XContentBuilder expectedMappings,
final Settings.Builder expectedSettings,
final String errorMessage
) {
return "Error ["
+ errorMessage
+ "] "
+ "actual mappings ["
+ Strings.toString(actualMappings)
+ "] "
+ "actual settings ["
+ Strings.toString(actualSettings.build())
+ "] "
+ "expected mappings ["
+ Strings.toString(expectedMappings)
+ "] "
+ "expected settings ["
+ Strings.toString(expectedSettings.build())
+ "] ";
}
public static String prettyPrintArrays(final Object[] actualArray, final Object[] expectedArray) {
return "actual: "
+ prettyPrintCollection(Arrays.asList(actualArray))
+ ", expected: "
+ prettyPrintCollection(Arrays.asList(expectedArray));
}
public static <T> String prettyPrintCollections(final Collection<T> actualList, final Collection<T> expectedList) {
return "actual: " + prettyPrintCollection(actualList) + ", expected: " + prettyPrintCollection(expectedList);
}
private static <T> String prettyPrintCollection(final Collection<T> list) {
return "[" + list.stream().map(Object::toString).collect(Collectors.joining(", ")) + "]";
}
}
|
Messages
|
java
|
hibernate__hibernate-orm
|
hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/strategy/RevisionEndTimestampJoinedInheritanceTest.java
|
{
"start": 3648,
"end": 4687
}
|
class ____ {
@Id
@GeneratedValue
private Integer id;
private String name;
Employee() {
}
Employee(String name) {
this.name = name;
}
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
@Override
public int hashCode() {
int result = ( id != null ? id.hashCode() : 0 );
result = result * 31 + ( name != null ? name.hashCode() : 0 );
return result;
}
@Override
public boolean equals(Object object) {
if ( this == object ) {
return true;
}
if ( object == null || !( object instanceof Employee ) ) {
return false;
}
Employee that = (Employee) object;
if ( id != null ? !id.equals( that.id ) : that.id != null ) {
return false;
}
return !( name != null ? !name.equals( that.name ) : that.name != null );
}
}
@Audited
@Entity(name = "FullTimeEmployee")
@DiscriminatorValue("FT")
public static
|
Employee
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/rules/logical/EventTimeTemporalJoinRewriteRule.java
|
{
"start": 8130,
"end": 38207
}
|
interface ____ extends RelRule.Config {
RelRule.Config JOIN_CALC_SNAPSHOT_CALC_WMA_CALC_TS =
ImmutableEventTimeTemporalJoinRewriteRule.Config.builder()
.build()
.withDescription(
"EventTimeTemporalJoinRewriteRule_CALC_SNAPSHOT_CALC_WMA_CALC")
.as(Config.class)
.withOperandSupplier(
joinTransform ->
joinTransform
.operand(FlinkLogicalJoin.class)
.inputs(
left ->
left.operand(FlinkLogicalRel.class)
.anyInputs(),
right ->
right.operand(
FlinkLogicalCalc
.class)
.oneInput(
r1 ->
r1.operand(
FlinkLogicalSnapshot
.class)
.oneInput(
r2 ->
r2.operand(
FlinkLogicalCalc
.class)
.oneInput(
r3 ->
r3.operand(
FlinkLogicalWatermarkAssigner
.class)
.oneInput(
r4 ->
r4.operand(
FlinkLogicalCalc
.class)
.oneInput(
r5 ->
r5.operand(
FlinkLogicalTableSourceScan
.class)
.noInputs())))))));
RelRule.Config JOIN_CALC_SNAPSHOT_CALC_WMA_TS =
ImmutableEventTimeTemporalJoinRewriteRule.Config.builder()
.build()
.withDescription("EventTimeTemporalJoinRewriteRule_CALC_SNAPSHOT_CALC_WMA")
.as(Config.class)
.withOperandSupplier(
joinTransform ->
joinTransform
.operand(FlinkLogicalJoin.class)
.inputs(
left ->
left.operand(FlinkLogicalRel.class)
.anyInputs(),
right ->
right.operand(
FlinkLogicalCalc
.class)
.oneInput(
r1 ->
r1.operand(
FlinkLogicalSnapshot
.class)
.oneInput(
r2 ->
r2.operand(
FlinkLogicalCalc
.class)
.oneInput(
r3 ->
r3.operand(
FlinkLogicalWatermarkAssigner
.class)
.oneInput(
r4 ->
r4.operand(
FlinkLogicalTableSourceScan
.class)
.noInputs()))))));
RelRule.Config JOIN_CALC_SNAPSHOT_WMA_CALC_TS =
ImmutableEventTimeTemporalJoinRewriteRule.Config.builder()
.build()
.withDescription("EventTimeTemporalJoinRewriteRule_CALC_SNAPSHOT_WMA_CALC")
.as(Config.class)
.withOperandSupplier(
joinTransform ->
joinTransform
.operand(FlinkLogicalJoin.class)
.inputs(
left ->
left.operand(FlinkLogicalRel.class)
.anyInputs(),
right ->
right.operand(
FlinkLogicalCalc
.class)
.oneInput(
r1 ->
r1.operand(
FlinkLogicalSnapshot
.class)
.oneInput(
r2 ->
r2.operand(
FlinkLogicalWatermarkAssigner
.class)
.oneInput(
r3 ->
r3.operand(
FlinkLogicalCalc
.class)
.oneInput(
r4 ->
r4.operand(
FlinkLogicalTableSourceScan
.class)
.noInputs()))))));
RelRule.Config JOIN_CALC_SNAPSHOT_WMA_TS =
ImmutableEventTimeTemporalJoinRewriteRule.Config.builder()
.build()
.withDescription("EventTimeTemporalJoinRewriteRule_CALC_SNAPSHOT_WMA")
.as(Config.class)
.withOperandSupplier(
joinTransform ->
joinTransform
.operand(FlinkLogicalJoin.class)
.inputs(
left ->
left.operand(FlinkLogicalRel.class)
.anyInputs(),
right ->
right.operand(
FlinkLogicalCalc
.class)
.oneInput(
r1 ->
r1.operand(
FlinkLogicalSnapshot
.class)
.oneInput(
r2 ->
r2.operand(
FlinkLogicalWatermarkAssigner
.class)
.oneInput(
r3 ->
r3.operand(
FlinkLogicalTableSourceScan
.class)
.noInputs())))));
RelRule.Config JOIN_SNAPSHOT_CALC_WMA_CALC_TS =
ImmutableEventTimeTemporalJoinRewriteRule.Config.builder()
.build()
.withDescription("EventTimeTemporalJoinRewriteRule_SNAPSHOT_CALC_WMA_CALC")
.as(Config.class)
.withOperandSupplier(
joinTransform ->
joinTransform
.operand(FlinkLogicalJoin.class)
.inputs(
left ->
left.operand(FlinkLogicalRel.class)
.anyInputs(),
right ->
right.operand(
FlinkLogicalSnapshot
.class)
.oneInput(
r1 ->
r1.operand(
FlinkLogicalCalc
.class)
.oneInput(
r2 ->
r2.operand(
FlinkLogicalWatermarkAssigner
.class)
.oneInput(
r3 ->
r3.operand(
FlinkLogicalCalc
.class)
.oneInput(
r4 ->
r4.operand(
FlinkLogicalTableSourceScan
.class)
.noInputs()))))));
RelRule.Config JOIN_SNAPSHOT_CALC_WMA_TS =
ImmutableEventTimeTemporalJoinRewriteRule.Config.builder()
.build()
.withDescription("EventTimeTemporalJoinRewriteRule_SNAPSHOT_CALC_WMA")
.as(Config.class)
.withOperandSupplier(
joinTransform ->
joinTransform
.operand(FlinkLogicalJoin.class)
.inputs(
left ->
left.operand(FlinkLogicalRel.class)
.anyInputs(),
right ->
right.operand(
FlinkLogicalSnapshot
.class)
.oneInput(
r1 ->
r1.operand(
FlinkLogicalCalc
.class)
.oneInput(
r2 ->
r2.operand(
FlinkLogicalWatermarkAssigner
.class)
.oneInput(
r3 ->
r3.operand(
FlinkLogicalTableSourceScan
.class)
.noInputs())))));
RelRule.Config JOIN_SNAPSHOT_WMA_CALC_TS =
ImmutableEventTimeTemporalJoinRewriteRule.Config.builder()
.build()
.withDescription("EventTimeTemporalJoinRewriteRule_SNAPSHOT_WMA_CALC")
.as(Config.class)
.withOperandSupplier(
joinTransform ->
joinTransform
.operand(FlinkLogicalJoin.class)
.inputs(
left ->
left.operand(FlinkLogicalRel.class)
.anyInputs(),
right ->
right.operand(
FlinkLogicalSnapshot
.class)
.oneInput(
r1 ->
r1.operand(
FlinkLogicalWatermarkAssigner
.class)
.oneInput(
r2 ->
r2.operand(
FlinkLogicalCalc
.class)
.oneInput(
r3 ->
r3.operand(
FlinkLogicalTableSourceScan
.class)
.noInputs())))));
RelRule.Config JOIN_SNAPSHOT_WMA_TS =
ImmutableEventTimeTemporalJoinRewriteRule.Config.builder()
.build()
.withDescription("EventTimeTemporalJoinRewriteRule_SNAPSHOT_WMA")
.as(Config.class)
.withOperandSupplier(
joinTransform ->
joinTransform
.operand(FlinkLogicalJoin.class)
.inputs(
left ->
left.operand(FlinkLogicalRel.class)
.anyInputs(),
right ->
right.operand(
FlinkLogicalSnapshot
.class)
.oneInput(
r1 ->
r1.operand(
FlinkLogicalWatermarkAssigner
.class)
.oneInput(
r2 ->
r2.operand(
FlinkLogicalTableSourceScan
.class)
.noInputs()))));
@Override
default RelOptRule toRule() {
return new EventTimeTemporalJoinRewriteRule(this);
}
}
}
|
Config
|
java
|
junit-team__junit5
|
documentation/src/test/java/example/ParameterizedTestDemo.java
|
{
"start": 17657,
"end": 19777
}
|
interface ____ {
}
// end::ArgumentsAggregator_with_custom_annotation_example_CsvToPerson[]
// @formatter:on
// tag::custom_display_names[]
@DisplayName("Display name of container")
@ParameterizedTest(name = "{index} ==> the rank of {0} is {1}")
@CsvSource({ "apple, 1", "banana, 2", "'lemon, lime', 3" })
void testWithCustomDisplayNames(String fruit, int rank) {
}
// end::custom_display_names[]
// @formatter:off
// tag::named_arguments[]
@DisplayName("A parameterized test with named arguments")
@ParameterizedTest(name = "{index}: {0}")
@MethodSource("namedArguments")
void testWithNamedArguments(File file) {
}
static Stream<Arguments> namedArguments() {
return Stream.of(
arguments(named("An important file", new File("path1"))),
arguments(named("Another file", new File("path2")))
);
}
// end::named_arguments[]
// @formatter:on
// @formatter:off
// tag::named_argument_set[]
@DisplayName("A parameterized test with named argument sets")
@ParameterizedTest
@FieldSource("argumentSets")
void testWithArgumentSets(File file1, File file2) {
}
static List<Arguments> argumentSets = Arrays.asList(
argumentSet("Important files", new File("path1"), new File("path2")),
argumentSet("Other files", new File("path3"), new File("path4"))
);
// end::named_argument_set[]
// @formatter:on
// tag::repeatable_annotations[]
@DisplayName("A parameterized test that makes use of repeatable annotations")
@ParameterizedTest
@MethodSource("someProvider")
@MethodSource("otherProvider")
void testWithRepeatedAnnotation(String argument) {
assertNotNull(argument);
}
static Stream<String> someProvider() {
return Stream.of("foo");
}
static Stream<String> otherProvider() {
return Stream.of("bar");
}
// end::repeatable_annotations[]
@Disabled("Fails prior to invoking the test method")
// tag::argument_count_validation[]
@ParameterizedTest(argumentCountValidation = ArgumentCountValidationMode.STRICT)
@CsvSource({ "42, -666" })
void testWithArgumentCountValidation(int number) {
assertTrue(number > 0);
}
// end::argument_count_validation[]
}
|
CsvToPerson
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/JobImpl.java
|
{
"start": 68357,
"end": 68827
}
|
class ____
implements SingleArcTransition<JobImpl, JobEvent> {
@Override
public void transition(JobImpl job, JobEvent event) {
JobStateInternal finalState = JobStateInternal.valueOf(
((JobAbortCompletedEvent) event).getFinalState().name());
job.unsuccessfulFinish(finalState);
}
}
//This transition happens when a job is to be failed. It waits for all the
//tasks to finish / be killed.
private static
|
JobAbortCompletedTransition
|
java
|
elastic__elasticsearch
|
x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/Profile.java
|
{
"start": 836,
"end": 7893
}
|
enum ____ {
STANDARD() {
@Override
public String textBody(MimeMessage msg) throws IOException, MessagingException {
MimeMultipart mixed = (MimeMultipart) msg.getContent();
MimeMultipart related = null;
for (int i = 0; i < mixed.getCount(); i++) {
MimeBodyPart part = (MimeBodyPart) mixed.getBodyPart(i);
if (part.getContentType().startsWith("multipart/related")) {
related = (MimeMultipart) part.getContent();
break;
}
}
if (related == null) {
throw new IllegalStateException(
"could not extract body text from mime message using [standard] profile. could not find "
+ "part content type with [multipart/related]"
);
}
MimeMultipart alternative = null;
for (int i = 0; i < related.getCount(); i++) {
MimeBodyPart part = (MimeBodyPart) related.getBodyPart(i);
if (part.getContentType().startsWith("multipart/alternative")) {
alternative = (MimeMultipart) part.getContent();
break;
}
}
if (alternative == null) {
throw new IllegalStateException(
"could not extract body text from mime message using [standard] profile. could not find "
+ "part content type with [multipart/alternative]"
);
}
for (int i = 0; i < alternative.getCount(); i++) {
MimeBodyPart part = (MimeBodyPart) alternative.getBodyPart(i);
if (part.getContentType().startsWith("text/plain")) {
return (String) part.getContent();
}
}
throw new IllegalStateException("could not extract body text from mime message using [standard] profile");
}
@Override
public MimeMessage toMimeMessage(Email email, Session session) throws MessagingException {
MimeMessage message = createCommon(email, session);
MimeMultipart mixed = new MimeMultipart("mixed");
message.setContent(mixed);
MimeMultipart related = new MimeMultipart("related");
mixed.addBodyPart(wrap(related, null));
MimeMultipart alternative = new MimeMultipart("alternative");
related.addBodyPart(wrap(alternative, "text/alternative"));
MimeBodyPart text = new MimeBodyPart();
if (email.textBody != null) {
text.setText(email.textBody, StandardCharsets.UTF_8.name());
} else {
text.setText("", StandardCharsets.UTF_8.name());
}
alternative.addBodyPart(text);
if (email.htmlBody != null) {
MimeBodyPart html = new MimeBodyPart();
html.setText(email.htmlBody, StandardCharsets.UTF_8.name(), "html");
alternative.addBodyPart(html);
}
if (email.attachments.isEmpty() == false) {
for (Attachment attachment : email.attachments.values()) {
if (attachment.isInline()) {
related.addBodyPart(attachment.bodyPart());
} else {
mixed.addBodyPart(attachment.bodyPart());
}
}
}
return message;
}
},
OUTLOOK() {
@Override
public String textBody(MimeMessage msg) throws IOException, MessagingException {
return STANDARD.textBody(msg);
}
@Override
public MimeMessage toMimeMessage(Email email, Session session) throws MessagingException {
return STANDARD.toMimeMessage(email, session);
}
},
GMAIL() {
@Override
public String textBody(MimeMessage msg) throws IOException, MessagingException {
return STANDARD.textBody(msg);
}
@Override
public MimeMessage toMimeMessage(Email email, Session session) throws MessagingException {
return STANDARD.toMimeMessage(email, session);
}
},
MAC() {
@Override
public String textBody(MimeMessage msg) throws IOException, MessagingException {
return STANDARD.textBody(msg);
}
@Override
public MimeMessage toMimeMessage(Email email, Session session) throws MessagingException {
return STANDARD.toMimeMessage(email, session);
}
};
static final String MESSAGE_ID_HEADER = "Message-ID";
public abstract MimeMessage toMimeMessage(Email email, Session session) throws MessagingException;
public abstract String textBody(MimeMessage msg) throws IOException, MessagingException;
public static Profile resolve(String name) {
Profile profile = resolve(name, null);
if (profile == null) {
throw new IllegalArgumentException("[" + name + "] is an unknown email profile");
}
return profile;
}
public static Profile resolve(String name, Profile defaultProfile) {
if (name == null) {
return defaultProfile;
}
return switch (name.toLowerCase(Locale.ROOT)) {
case "std", "standard" -> STANDARD;
case "outlook" -> OUTLOOK;
case "gmail" -> GMAIL;
case "mac" -> MAC;
default -> defaultProfile;
};
}
static MimeMessage createCommon(Email email, Session session) throws MessagingException {
MimeMessage message = new MimeMessage(session);
message.setHeader(MESSAGE_ID_HEADER, email.id);
if (email.from != null) {
message.setFrom(email.from);
}
if (email.replyTo != null) {
message.setReplyTo(email.replyTo.toArray());
}
if (email.priority != null) {
email.priority.applyTo(message);
}
message.setSentDate(Date.from(email.sentDate.toInstant()));
message.setRecipients(Message.RecipientType.TO, email.to.toArray());
if (email.cc != null) {
message.setRecipients(Message.RecipientType.CC, email.cc.toArray());
}
if (email.bcc != null) {
message.setRecipients(Message.RecipientType.BCC, email.bcc.toArray());
}
if (email.subject != null) {
message.setSubject(email.subject, StandardCharsets.UTF_8.name());
} else {
message.setSubject("", StandardCharsets.UTF_8.name());
}
return message;
}
static MimeBodyPart wrap(MimeMultipart multipart, String contentType) throws MessagingException {
MimeBodyPart part = new MimeBodyPart();
if (contentType == null) {
part.setContent(multipart);
} else {
part.setContent(multipart, contentType);
}
return part;
}
}
|
Profile
|
java
|
eclipse-vertx__vert.x
|
vertx-core/src/test/java/io/vertx/test/fakedns/FakeDNSServer.java
|
{
"start": 11907,
"end": 15535
}
|
class ____ implements ResourceRecord {
private final String ipAddress;
private final String domainName;
private boolean isTruncated;
public VertxResourceRecord(String domainName, String ipAddress) {
this.domainName = domainName;
this.ipAddress = ipAddress;
}
public boolean isTruncated() {
return isTruncated;
}
public VertxResourceRecord setTruncated(boolean truncated) {
isTruncated = truncated;
return this;
}
@Override
public String getDomainName() {
return domainName;
}
@Override
public RecordType getRecordType() {
return RecordType.A;
}
@Override
public RecordClass getRecordClass() {
return RecordClass.IN;
}
@Override
public int getTimeToLive() {
return 100;
}
@Override
public String get(String id) {
return DnsAttribute.IP_ADDRESS.equals(id) ? ipAddress : null;
}
}
private static final ResourceRecordEncoder TestAAAARecordEncoder = new ResourceRecordEncoder() {
@Override
protected void putResourceRecordData(IoBuffer ioBuffer, ResourceRecord resourceRecord) {
if (!resourceRecord.get(DnsAttribute.IP_ADDRESS).equals("::1")) {
throw new IllegalStateException("Only supposed to be used with IPV6 address of ::1");
}
// encode the ::1
ioBuffer.put(new byte[]{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1});
}
};
private final DnsMessageEncoder encoder = new DnsMessageEncoder();
private void encode(DnsMessage dnsMessage, IoBuffer buf) {
// Hack
if (dnsMessage.getAnswerRecords().size() == 1 && dnsMessage.getAnswerRecords().get(0) instanceof VertxResourceRecord) {
VertxResourceRecord vrr = (VertxResourceRecord) dnsMessage.getAnswerRecords().get(0);
DnsMessageModifier modifier = new DnsMessageModifier();
modifier.setTransactionId(dnsMessage.getTransactionId());
modifier.setMessageType(dnsMessage.getMessageType());
modifier.setOpCode(dnsMessage.getOpCode());
modifier.setAuthoritativeAnswer(dnsMessage.isAuthoritativeAnswer());
modifier.setTruncated(dnsMessage.isTruncated());
modifier.setRecursionDesired(dnsMessage.isRecursionDesired());
modifier.setRecursionAvailable(dnsMessage.isRecursionAvailable());
modifier.setReserved(dnsMessage.isReserved());
modifier.setAcceptNonAuthenticatedData(dnsMessage.isAcceptNonAuthenticatedData());
modifier.setResponseCode(dnsMessage.getResponseCode());
modifier.setQuestionRecords(dnsMessage.getQuestionRecords());
modifier.setAnswerRecords(dnsMessage.getAnswerRecords());
modifier.setAuthorityRecords(dnsMessage.getAuthorityRecords());
modifier.setAdditionalRecords(dnsMessage.getAdditionalRecords());
modifier.setTruncated(vrr.isTruncated);
dnsMessage = modifier.getDnsMessage();
}
encoder.encode(buf, dnsMessage);
for (ResourceRecord record: dnsMessage.getAnswerRecords()) {
// This is a hack to allow to also test for AAAA resolution as DnsMessageEncoder does not support it and it
// is hard to extend, because the interesting methods are private...
// In case of RecordType.AAAA we need to encode the RecordType by ourself
if (record.getRecordType() == RecordType.AAAA) {
try {
TestAAAARecordEncoder.put(buf, record);
} catch (IOException e) {
// Should never happen
throw new IllegalStateException(e);
}
}
}
}
/**
* ProtocolCodecFactory which allows to test AAAA resolution
*/
private final
|
VertxResourceRecord
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/operator/comparison/In.java
|
{
"start": 1678,
"end": 6874
}
|
class ____ extends ScalarFunction {
private final Expression value;
private final List<Expression> list;
private final ZoneId zoneId;
public In(Source source, Expression value, List<Expression> list) {
this(source, value, list, null);
}
public In(Source source, Expression value, List<Expression> list, ZoneId zoneId) {
super(source, CollectionUtils.combine(list, value));
this.value = value;
this.list = new ArrayList<>(new LinkedHashSet<>(list));
this.zoneId = zoneId;
}
@Override
protected NodeInfo<In> info() {
return NodeInfo.create(this, In::new, value(), list(), zoneId());
}
@Override
public Expression replaceChildren(List<Expression> newChildren) {
return new In(source(), newChildren.get(newChildren.size() - 1), newChildren.subList(0, newChildren.size() - 1), zoneId());
}
public ZoneId zoneId() {
return zoneId;
}
public Expression value() {
return value;
}
public List<Expression> list() {
return list;
}
@Override
public DataType dataType() {
return DataTypes.BOOLEAN;
}
@Override
public Nullability nullable() {
return Nullability.UNKNOWN;
}
@Override
public boolean foldable() {
return Expressions.foldable(children()) || (Expressions.foldable(list) && list().stream().allMatch(Expressions::isNull));
}
@Override
public Boolean fold() {
// Optimization for early return and Query folding to LocalExec
if (Expressions.isNull(value) || list.size() == 1 && Expressions.isNull(list.get(0))) {
return null;
}
return InProcessor.apply(value.fold(), foldAndConvertListOfValues(list, value.dataType()));
}
@Override
protected Expression canonicalize() {
// order values for commutative operators
List<Expression> canonicalValues = Expressions.canonicalize(list);
Collections.sort(canonicalValues, (l, r) -> Integer.compare(l.hashCode(), r.hashCode()));
return new In(source(), value, canonicalValues, zoneId);
}
@Override
public ScriptTemplate asScript() {
ScriptTemplate leftScript = asScript(value);
// fold & remove duplicates
List<Object> values = new ArrayList<>(new LinkedHashSet<>(foldAndConvertListOfValues(list, value.dataType())));
return new ScriptTemplate(
formatTemplate(format("{ql}.", "in({}, {})", leftScript.template())),
paramsBuilder().script(leftScript.params()).variable(values).build(),
dataType()
);
}
protected List<Object> foldAndConvertListOfValues(List<Expression> expressions, DataType dataType) {
List<Object> values = new ArrayList<>(expressions.size());
for (Expression e : expressions) {
values.add(DataTypeConverter.convert(Foldables.valueOf(e), dataType));
}
return values;
}
protected boolean areCompatible(DataType left, DataType right) {
return DataTypes.areCompatible(left, right);
}
@Override
protected Pipe makePipe() {
return new InPipe(source(), this, children().stream().map(Expressions::pipe).collect(Collectors.toList()));
}
@Override
protected TypeResolution resolveType() {
TypeResolution resolution = TypeResolutions.isExact(value, functionName(), DEFAULT);
if (resolution.unresolved()) {
return resolution;
}
for (Expression ex : list) {
if (ex.foldable() == false) {
return new TypeResolution(
format(
null,
"Comparisons against fields are not (currently) supported; offender [{}] in [{}]",
Expressions.name(ex),
sourceText()
)
);
}
}
DataType dt = value.dataType();
for (int i = 0; i < list.size(); i++) {
Expression listValue = list.get(i);
if (areCompatible(dt, listValue.dataType()) == false) {
return new TypeResolution(
format(
null,
"{} argument of [{}] must be [{}], found value [{}] type [{}]",
ordinal(i + 1),
sourceText(),
dt.typeName(),
Expressions.name(listValue),
listValue.dataType().typeName()
)
);
}
}
return super.resolveType();
}
public TypeResolution validateInTypes() {
return resolveType();
}
@Override
public int hashCode() {
return Objects.hash(value, list);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
In other = (In) obj;
return Objects.equals(value, other.value) && Objects.equals(list, other.list);
}
}
|
In
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/cluster/routing/allocation/NodeAllocationResult.java
|
{
"start": 6439,
"end": 10167
}
|
class ____ implements ToXContentFragment, Writeable {
private final boolean inSync;
@Nullable
private final String allocationId;
private final long matchingBytes;
@Nullable
private final Exception storeException;
public ShardStoreInfo(String allocationId, boolean inSync, Exception storeException) {
this.inSync = inSync;
this.allocationId = allocationId;
this.matchingBytes = -1;
this.storeException = storeException;
}
public ShardStoreInfo(long matchingBytes) {
this.inSync = false;
this.allocationId = null;
this.matchingBytes = matchingBytes;
this.storeException = null;
}
public ShardStoreInfo(StreamInput in) throws IOException {
this.inSync = in.readBoolean();
this.allocationId = in.readOptionalString();
this.matchingBytes = in.readLong();
this.storeException = in.readException();
}
/**
* Returns {@code true} if the shard copy is in-sync and contains the latest data.
* Returns {@code false} if the shard copy is stale or if the shard copy being examined
* is for a replica shard allocation.
*/
public boolean isInSync() {
return inSync;
}
/**
* Gets the allocation id for the shard copy, if it exists.
*/
@Nullable
public String getAllocationId() {
return allocationId;
}
/**
* Gets the number of matching bytes the shard copy has with the primary shard.
* Returns -1 if not applicable (this value only applies to assigning replica shards).
*/
public long getMatchingBytes() {
return matchingBytes;
}
/**
* Gets the store exception when trying to read the store, if there was an error. If
* there was no error, returns {@code null}.
*/
@Nullable
public Exception getStoreException() {
return storeException;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeBoolean(inSync);
out.writeOptionalString(allocationId);
out.writeLong(matchingBytes);
out.writeException(storeException);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject("store");
{
if (matchingBytes < 0) {
// dealing with a primary shard
if (allocationId == null && storeException == null) {
// there was no information we could obtain of any shard data on the node
builder.field("found", false);
} else {
builder.field("in_sync", inSync);
}
}
if (allocationId != null) {
builder.field("allocation_id", allocationId);
}
if (matchingBytes >= 0) {
builder.humanReadableField("matching_size_in_bytes", "matching_size", ByteSizeValue.ofBytes(matchingBytes));
}
if (storeException != null) {
builder.startObject("store_exception");
ElasticsearchException.generateThrowableXContent(builder, params, storeException);
builder.endObject();
}
}
builder.endObject();
return builder;
}
}
}
|
ShardStoreInfo
|
java
|
spring-projects__spring-security
|
core/src/main/java/org/springframework/security/authentication/ott/reactive/ReactiveOneTimeTokenService.java
|
{
"start": 1097,
"end": 1802
}
|
interface ____ {
/**
* Generates a one-time token based on the provided generate request.
* @param request the generate request containing the necessary information to
* generate the token
* @return the generated {@link OneTimeToken}.
*/
Mono<OneTimeToken> generate(GenerateOneTimeTokenRequest request);
/**
* Consumes a one-time token based on the provided authentication token.
* @param authenticationToken the authentication token containing the one-time token
* value to be consumed
* @return the consumed {@link OneTimeToken} or empty Mono if the token is invalid
*/
Mono<OneTimeToken> consume(OneTimeTokenAuthenticationToken authenticationToken);
}
|
ReactiveOneTimeTokenService
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/manytomany/ManyToManySQLJoinTableRestrictionTest.java
|
{
"start": 1480,
"end": 5242
}
|
class ____ {
@BeforeAll
public void setUp(SessionFactoryScope scope) {
scope.inTransaction( session -> {
final User user1 = new User( "user1" );
final Project project1 = new Project( "p1" );
project1.getManagers().add( user1 );
project1.getMembers().add( user1 );
final Project project2 = new Project( "p2" );
project2.getMembers().add( user1 );
session.persist( user1 );
session.persist( project1 );
session.persist( project2 );
final User user2 = new User( "user2" );
final User user3 = new User( "user3" );
final Project project3 = new Project( "p3" );
project3.getMembers().add( user2 );
project3.getMembers().add( user3 );
project3.getManagers().add( user2 );
project3.getOrderedUsers().add(user3);
project3.getOrderedUsers().add(user2);
session.persist( user2 );
session.persist( user3 );
session.persist( project3 );
} );
}
@AfterAll
public void tearDown(SessionFactoryScope scope) {
scope.inTransaction( session -> {
session.createMutationQuery( "delete from Project" ).executeUpdate();
session.createMutationQuery( "delete from User" ).executeUpdate();
} );
}
@Test
public void testJoinTableRemoveEmptyCollection(SessionFactoryScope scope) {
final SQLStatementInspector inspector = scope.getCollectingStatementInspector();
scope.inTransaction( session -> {
final Project p1 = session.find( Project.class, "p1" );
p1.getManagers().remove( p1.getManagers().iterator().next() );
assertThat( p1.getManagers() ).isEmpty();
inspector.clear();
} );
assertThat( inspector.getSqlQueries() ).hasSize( 1 );
assertThat( inspector.getSqlQueries().get( 0 ) ).contains( "manager" );
scope.inTransaction( session -> {
final User user1 = session.find( User.class, "user1" );
assertThat( user1.getManagedProjects() ).isEmpty();
assertThat( user1.getOtherProjects().stream().map( Project::getName ) ).contains( "p1", "p2" );
} );
}
@Test
public void testJoinTableRemoveNonEmptyCollection(SessionFactoryScope scope) {
final SQLStatementInspector inspector = scope.getCollectingStatementInspector();
scope.inTransaction( session -> {
final User user = session.find( User.class, "user2" );
final Project p3 = session.find( Project.class, "p3" );
p3.getMembers().remove( user );
assertThat( p3.getMembers() ).isNotEmpty();
inspector.clear();
} );
assertThat( inspector.getSqlQueries() ).hasSize( 1 );
assertThat( inspector.getSqlQueries().get( 0 ) ).contains( "member" );
scope.inTransaction( session -> {
final User user2 = session.find( User.class, "user2" );
assertThat( user2.getOtherProjects() ).isEmpty();
assertThat( user2.getManagedProjects().stream().map( Project::getName ) ).contains( "p3" );
} );
}
@Test
public void testJoinTableUpdate(SessionFactoryScope scope) {
final SQLStatementInspector inspector = scope.getCollectingStatementInspector();
scope.inTransaction( session -> {
final Project p3 = session.find( Project.class, "p3" );
assertThat( p3.getOrderedUsers().stream().map( User::getName ) ).containsExactly( "user3", "user2" );
p3.getOrderedUsers().sort( Comparator.comparing( User::getName ) );
inspector.clear();
} );
assertThat( inspector.getSqlQueries() ).hasSize( 2 );
assertThat( inspector.getSqlQueries().get( 0 ) ).contains( "order_col is not null" );
assertThat( inspector.getSqlQueries().get( 1 ) ).contains( "order_col is not null" );
scope.inTransaction( session -> {
final Project p3 = session.find( Project.class, "p3" );
assertThat( p3.getOrderedUsers().stream().map( User::getName ) ).containsExactly( "user2", "user3" );
} );
}
@Entity( name = "Project" )
@Table( name = "t_project" )
public static
|
ManyToManySQLJoinTableRestrictionTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/type/descriptor/java/DoublePrimitiveArrayJavaType.java
|
{
"start": 5959,
"end": 6128
}
|
class ____ extends MutableMutabilityPlan<double[]> {
@Override
protected double[] deepCopyNotNull(double[] value) {
return value.clone();
}
}
}
|
ArrayMutabilityPlan
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhancement/detached/DetachedGetIdentifierTest.java
|
{
"start": 898,
"end": 1570
}
|
class ____ {
@Test
public void test(SessionFactoryScope scope) {
SimpleEntity[] entities = new SimpleEntity[2];
entities[0] = new SimpleEntity();
entities[0].name = "test";
scope.inTransaction( em -> {
entities[1] = em.merge( entities[0] );
assertNotNull( em.getEntityManagerFactory().getPersistenceUnitUtil().getIdentifier( entities[1] ) );
} );
// Call as detached entity
try ( SessionFactory sessionFactory = scope.getSessionFactory() ) {
assertNotNull( sessionFactory.getPersistenceUnitUtil().getIdentifier( entities[1] ) );
}
}
// --- //
@Entity(name = "SimpleEntity")
@Table( name = "SIMPLE_ENTITY" )
static
|
DetachedGetIdentifierTest
|
java
|
apache__logging-log4j2
|
log4j-api/src/main/java/org/apache/logging/log4j/util/Timer.java
|
{
"start": 1226,
"end": 1462
}
|
class ____ implements Serializable, StringBuilderFormattable {
private static final long serialVersionUID = 9175191792439630013L;
private final String name; // The timer's name
/**
* @since 2.12.0
*/
public
|
Timer
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/scheduling/annotation/EnableAsyncTests.java
|
{
"start": 15489,
"end": 15659
}
|
class ____ {
@Bean
public AsyncBean asyncBean() {
return new AsyncBean();
}
}
@Configuration
@EnableAsync(mode = AdviceMode.ASPECTJ)
static
|
OrderedAsyncConfig
|
java
|
assertj__assertj-core
|
assertj-guava/src/main/java/org/assertj/guava/error/RangeSetShouldIntersectAnyOf.java
|
{
"start": 1007,
"end": 1630
}
|
class ____ extends BasicErrorMessageFactory {
public static ErrorMessageFactory shouldIntersectAnyOf(RangeSet<?> actual, Object expected) {
return new RangeSetShouldIntersectAnyOf(actual, expected);
}
/**
* Creates a new <code>{@link BasicErrorMessageFactory}</code>.
*
* @param actual actual {@link com.google.common.collect.RangeSet}.
* @param expected expected range to intersect.
*/
private RangeSetShouldIntersectAnyOf(Object actual, Object expected) {
super("%nExpecting:%n %s%nto intersect at least one range of the given:%n %s%n", actual, expected);
}
}
|
RangeSetShouldIntersectAnyOf
|
java
|
apache__camel
|
core/camel-core-model/src/main/java/org/apache/camel/builder/Builder.java
|
{
"start": 3824,
"end": 12747
}
|
class ____ will be invoked
* @param method name of method to invoke
* @return the builder
*/
public static ValueBuilder method(Class<?> beanType, String method) {
Expression exp = new MethodCallExpression(beanType, method);
return new ValueBuilder(exp);
}
/**
* Returns a constant expression
*/
public static ValueBuilder constant(Object value) {
Expression exp;
if (value instanceof String str) {
exp = new ConstantExpression(str);
} else {
ConstantExpression ce = new ConstantExpression();
var def = ExpressionNodeHelper.toExpressionDefinition(ExpressionBuilder.constantExpression(value));
ce.setExpressionType(def);
ce.setExpression(String.valueOf(value));
exp = ce;
}
return new ValueBuilder(exp);
}
/**
* Returns a constant expression
*/
public static ValueBuilder constant(String value, Class<?> resultType) {
ConstantExpression exp = new ConstantExpression(value);
exp.setResultType(resultType);
return new ValueBuilder(exp);
}
/**
* Returns a constant expression
*/
public static ValueBuilder constant(Object value, boolean trim) {
Expression exp;
if (value instanceof String str) {
ConstantExpression ce = new ConstantExpression(str);
ce.setTrim(trim ? "true" : "false");
exp = ce;
} else {
ConstantExpression ce = new ConstantExpression();
ce.setExpressionType(ExpressionNodeHelper.toExpressionDefinition(ExpressionBuilder.constantExpression(value)));
ce.setExpression(String.valueOf(value));
ce.setTrim(trim ? "true" : "false");
exp = ce;
}
return new ValueBuilder(exp);
}
/**
* Returns a constant expression
*/
public static ValueBuilder language(String language, String expression) {
Expression exp = new LanguageExpression(language, expression);
return new ValueBuilder(exp);
}
/**
* Returns a csimple expression
*/
public static ValueBuilder csimple(String value) {
Expression exp = new CSimpleExpression(value);
return new ValueBuilder(exp);
}
/**
* Returns a csimple expression
*/
public static ValueBuilder csimple(String value, Class<?> resultType) {
CSimpleExpression exp = new CSimpleExpression(value);
exp.setResultType(resultType);
return new ValueBuilder(exp);
}
/**
* Returns a simple expression
*/
public static ValueBuilder simple(String value) {
Expression exp = new SimpleExpression(value);
return new ValueBuilder(exp);
}
/**
* Returns a simple expression
*/
public static ValueBuilder simple(String value, Class<?> resultType) {
SimpleExpression exp = new SimpleExpression(value);
exp.setResultType(resultType);
return new ValueBuilder(exp);
}
/**
* Returns a JOOR expression value builder
*/
@Deprecated(since = "4.3.0")
public static ValueBuilder joor(String value) {
JoorExpression exp = new JoorExpression(value);
return new ValueBuilder(exp);
}
/**
* Returns a JOOR expression value builder
*/
@Deprecated(since = "4.3.0")
public static ValueBuilder joor(String value, Class<?> resultType) {
JoorExpression exp = new JoorExpression(value);
exp.setResultType(resultType);
return new ValueBuilder(exp);
}
/**
* Returns a Java expression value builder
*/
public static ValueBuilder java(String value) {
JavaExpression exp = new JavaExpression(value);
return new ValueBuilder(exp);
}
/**
* Returns a Java expression value builder
*/
public static ValueBuilder java(String value, Class<?> resultType) {
JavaExpression exp = new JavaExpression(value);
exp.setResultType(resultType);
return new ValueBuilder(exp);
}
/**
* Returns a JQ expression value builder
*/
public static ValueBuilder jq(String value) {
JqExpression exp = new JqExpression(value);
return new ValueBuilder(exp);
}
/**
* Returns a JQ expression value builder
*/
public static ValueBuilder jq(String value, Class<?> resultType) {
JqExpression exp = new JqExpression(value);
exp.setResultType(resultType);
return new ValueBuilder(exp);
}
/**
* Returns a JSonPath expression value builder
*/
public static ValueBuilder jsonpath(String value) {
JsonPathExpression exp = new JsonPathExpression(value);
return new ValueBuilder(exp);
}
/**
* Returns a JSonPath expression value builder
*
* @param value The JSonPath expression
* @param resultType The result type that the JSonPath expression will return.
*/
public static ValueBuilder jsonpath(String value, Class<?> resultType) {
JsonPathExpression exp = new JsonPathExpression(value);
exp.setResultType(resultType);
return new ValueBuilder(exp);
}
/**
* Returns a predicate and value builder for headers on an exchange
*/
public static ValueBuilder header(String name) {
Expression exp = new HeaderExpression(name);
return new ValueBuilder(exp);
}
/**
* Returns a predicate and value builder for properties on an exchange
*/
public static ValueBuilder exchangeProperty(String name) {
Expression exp = new ExchangePropertyExpression(name);
return new ValueBuilder(exp);
}
/**
* Returns a predicate and value builder for the inbound body on an exchange
*/
public static ValueBuilder body() {
Expression exp = new SimpleExpression("${body}");
return new ValueBuilder(exp);
}
/**
* Returns a predicate and value builder for the inbound message body as a specific type
*/
public static <T> ValueBuilder bodyAs(Class<T> type) {
ObjectHelper.notNull(type, "type");
Expression exp = new SimpleExpression(String.format("${bodyAs(%s)}", type.getCanonicalName()));
return new ValueBuilder(exp);
}
/**
* Returns a predicate and value builder for variable
*/
public static ValueBuilder variable(String name) {
Expression exp = new VariableExpression(name);
return new ValueBuilder(exp);
}
/**
* Returns an expression for the given system property
*/
public static ValueBuilder systemProperty(final String name) {
Expression exp = new SimpleExpression(String.format("${sys.%s}", name));
return new ValueBuilder(exp);
}
/**
* Returns an expression for the given system property
*/
public static ValueBuilder systemProperty(final String name, final String defaultValue) {
return new ValueBuilder(ExpressionBuilder.systemPropertyExpression(name, defaultValue));
}
/**
* Returns a predicate and value builder for the exception message on an exchange
*/
public static ValueBuilder exceptionMessage() {
Expression exp = new SimpleExpression("${exception.message}");
return new ValueBuilder(exp);
}
/**
* Returns a predicate and value builder for the exception stacktrace on an exchange
*/
public static ValueBuilder exceptionStackTrace() {
Expression exp = new SimpleExpression("${exception.stacktrace}");
return new ValueBuilder(exp);
}
/**
* Returns an expression that replaces all occurrences of the regular expression with the given replacement
*/
public static ValueBuilder regexReplaceAll(Expression content, String regex, String replacement) {
Expression newExp = ExpressionBuilder.regexReplaceAll(content, regex, replacement);
return new ValueBuilder(newExp);
}
/**
* Returns an expression that replaces all occurrences of the regular expression with the given replacement
*/
public static ValueBuilder regexReplaceAll(Expression content, String regex, Expression replacement) {
Expression newExp = ExpressionBuilder.regexReplaceAll(content, regex, replacement);
return new ValueBuilder(newExp);
}
/**
* Call a wasm (web assembly) function.
*/
public static ValueBuilder wasm(String value) {
WasmExpression exp = new WasmExpression(value);
return new ValueBuilder(exp);
}
/**
* Call a wasm (web assembly) function.
*/
public static ValueBuilder wasm(String value, Class<?> resultType) {
WasmExpression exp = new WasmExpression(value);
exp.setResultType(resultType);
return new ValueBuilder(exp);
}
}
|
which
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcNoSuchMethodException.java
|
{
"start": 1082,
"end": 1616
}
|
class ____ extends RpcServerException {
private static final long serialVersionUID = 1L;
public RpcNoSuchMethodException(final String message) {
super(message);
}
/**
* get the rpc status corresponding to this exception
*/
public RpcStatusProto getRpcStatusProto() {
return RpcStatusProto.ERROR;
}
/**
* get the detailed rpc status corresponding to this exception
*/
public RpcErrorCodeProto getRpcErrorCodeProto() {
return RpcErrorCodeProto.ERROR_NO_SUCH_METHOD;
}
}
|
RpcNoSuchMethodException
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/time/JodaInstantWithMillisTest.java
|
{
"start": 2427,
"end": 2923
}
|
class ____ {
private static final Instant INSTANT1 = new Instant(42);
// BUG: Diagnostic contains: Instant.ofEpochMilli(44);
private static final Instant INSTANT2 = INSTANT1.withMillis(44);
}
""")
.doTest();
}
@Test
public void instantConstructorIntPrimitiveInsideJoda() {
helper
.addSourceLines(
"TestClass.java",
"""
package org.joda.time;
public
|
TestClass
|
java
|
apache__commons-lang
|
src/main/java/org/apache/commons/lang3/reflect/ConstructorUtils.java
|
{
"start": 5273,
"end": 5642
}
|
class ____ find a constructor for, not {@code null}.
* @param parameterTypes find method with compatible parameters.
* @return the constructor, null if no matching accessible constructor found.
* @throws NullPointerException Thrown if {@code cls} is {@code null}
* @throws SecurityException Thrown if a security manager is present and the caller's
|
to
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestAMRMClientPlacementConstraints.java
|
{
"start": 2743,
"end": 11459
}
|
class ____ extends BaseAMRMClientTest {
private List<Container> allocatedContainers = null;
private List<RejectedSchedulingRequest> rejectedSchedulingRequests = null;
private Map<Set<String>, PlacementConstraint> pcMapping = null;
@BeforeEach
public void setup() throws Exception {
conf = new YarnConfiguration();
allocatedContainers = new ArrayList<>();
rejectedSchedulingRequests = new ArrayList<>();
pcMapping = new HashMap<>();
pcMapping.put(Collections.singleton("foo"),
PlacementConstraints.build(
PlacementConstraints.targetNotIn(NODE, allocationTag("foo"))));
pcMapping.put(Collections.singleton("bar"),
PlacementConstraints.build(
PlacementConstraints.targetNotIn(NODE, allocationTag("bar"))));
}
@Test
@Timeout(value = 60)
public void testAMRMClientWithPlacementConstraintsByPlacementProcessor()
throws Exception {
// we have to create a new instance of MiniYARNCluster to avoid SASL qop
// mismatches between client and server
conf.set(YarnConfiguration.RM_PLACEMENT_CONSTRAINTS_HANDLER,
YarnConfiguration.PROCESSOR_RM_PLACEMENT_CONSTRAINTS_HANDLER);
createClusterAndStartApplication(conf);
allocatedContainers.clear();
rejectedSchedulingRequests.clear();
AMRMClient<AMRMClient.ContainerRequest> amClient =
AMRMClient.<AMRMClient.ContainerRequest>createAMRMClient();
amClient.setNMTokenCache(new NMTokenCache());
//asserting we are not using the singleton instance cache
assertNotSame(NMTokenCache.getSingleton(),
amClient.getNMTokenCache());
AMRMClientAsync asyncClient = new AMRMClientAsyncImpl<>(amClient,
1000, new TestCallbackHandler());
asyncClient.init(conf);
asyncClient.start();
asyncClient.registerApplicationMaster("Host", 10000, "", pcMapping);
// Send two types of requests - 4 with source tag "foo" have numAlloc = 1
// and 1 with source tag "bar" and has numAlloc = 4. Both should be
// handled similarly. i.e: Since there are only 3 nodes,
// 2 schedulingRequests - 1 with source tag "foo" on one with source
// tag "bar" should get rejected.
asyncClient.addSchedulingRequests(
Arrays.asList(
// 4 reqs with numAlloc = 1
schedulingRequest(1, 1, 1, 1, 512, "foo"),
schedulingRequest(1, 1, 2, 1, 512, "foo"),
schedulingRequest(1, 1, 3, 1, 512, "foo"),
schedulingRequest(1, 1, 4, 1, 512, "foo"),
// 1 req with numAlloc = 4
schedulingRequest(4, 1, 5, 1, 512, "bar")));
// kick the scheduler
waitForContainerAllocation(allocatedContainers,
rejectedSchedulingRequests, 6, 2);
assertEquals(6, allocatedContainers.size());
Map<NodeId, List<Container>> containersPerNode =
allocatedContainers.stream().collect(
Collectors.groupingBy(Container::getNodeId));
Map<Set<String>, List<SchedulingRequest>> outstandingSchedRequests =
((AMRMClientImpl)amClient).getOutstandingSchedRequests();
// Check the outstanding SchedulingRequests
assertEquals(2, outstandingSchedRequests.size());
assertEquals(1, outstandingSchedRequests.get(
new HashSet<>(Collections.singletonList("foo"))).size());
assertEquals(1, outstandingSchedRequests.get(
new HashSet<>(Collections.singletonList("bar"))).size());
// Ensure 2 containers allocated per node.
// Each node should have a "foo" and a "bar" container.
assertEquals(3, containersPerNode.entrySet().size());
HashSet<String> srcTags = new HashSet<>(Arrays.asList("foo", "bar"));
containersPerNode.entrySet().forEach(
x ->
assertEquals(
srcTags,
x.getValue()
.stream()
.map(y -> y.getAllocationTags().iterator().next())
.collect(Collectors.toSet()))
);
// Ensure 2 rejected requests - 1 of "foo" and 1 of "bar"
assertEquals(2, rejectedSchedulingRequests.size());
assertEquals(srcTags,
rejectedSchedulingRequests
.stream()
.map(x -> x.getRequest().getAllocationTags().iterator().next())
.collect(Collectors.toSet()));
asyncClient.stop();
}
@Test
@Timeout(value = 60)
public void testAMRMClientWithPlacementConstraintsByScheduler()
throws Exception {
// we have to create a new instance of MiniYARNCluster to avoid SASL qop
// mismatches between client and server
conf.set(YarnConfiguration.RM_PLACEMENT_CONSTRAINTS_HANDLER,
YarnConfiguration.SCHEDULER_RM_PLACEMENT_CONSTRAINTS_HANDLER);
createClusterAndStartApplication(conf);
allocatedContainers.clear();
rejectedSchedulingRequests.clear();
AMRMClient<AMRMClient.ContainerRequest> amClient =
AMRMClient.<AMRMClient.ContainerRequest>createAMRMClient();
amClient.setNMTokenCache(new NMTokenCache());
//asserting we are not using the singleton instance cache
assertNotSame(NMTokenCache.getSingleton(),
amClient.getNMTokenCache());
AMRMClientAsync asyncClient = new AMRMClientAsyncImpl<>(amClient,
1000, new TestCallbackHandler());
asyncClient.init(conf);
asyncClient.start();
asyncClient.registerApplicationMaster("Host", 10000, "", pcMapping);
// Send two types of requests - 4 with source tag "foo" have numAlloc = 1
// and 1 with source tag "bar" and has numAlloc = 4. Both should be
// handled similarly. i.e: Since there are only 3 nodes,
// 2 schedulingRequests - 1 with source tag "foo" on one with source
// tag "bar" should get rejected.
asyncClient.addSchedulingRequests(
Arrays.asList(
// 4 reqs with numAlloc = 1
schedulingRequest(1, 1, 1, 1, 512, "foo"),
schedulingRequest(1, 1, 2, 1, 512, "foo"),
schedulingRequest(1, 1, 3, 1, 512, "foo"),
schedulingRequest(1, 1, 4, 1, 512, "foo"),
// 1 req with numAlloc = 4
schedulingRequest(4, 1, 5, 1, 512, "bar"),
// 1 empty tag
schedulingRequest(1, 1, 6, 1, 512, new HashSet<>())));
// kick the scheduler
waitForContainerAllocation(allocatedContainers,
rejectedSchedulingRequests, 7, 0);
assertEquals(7, allocatedContainers.size());
Map<NodeId, List<Container>> containersPerNode =
allocatedContainers.stream().collect(
Collectors.groupingBy(Container::getNodeId));
Map<Set<String>, List<SchedulingRequest>> outstandingSchedRequests =
((AMRMClientImpl)amClient).getOutstandingSchedRequests();
// Check the outstanding SchedulingRequests
assertEquals(3, outstandingSchedRequests.size());
assertEquals(1, outstandingSchedRequests.get(
new HashSet<>(Collections.singletonList("foo"))).size());
assertEquals(1, outstandingSchedRequests.get(
new HashSet<>(Collections.singletonList("bar"))).size());
assertEquals(0, outstandingSchedRequests.get(
new HashSet<String>()).size());
// Each node should have a "foo" and a "bar" container.
assertEquals(3, containersPerNode.entrySet().size());
HashSet<String> srcTags = new HashSet<>(Arrays.asList("foo", "bar"));
containersPerNode.entrySet().forEach(
x ->
assertEquals(
srcTags,
x.getValue()
.stream()
.filter(y -> !y.getAllocationTags().isEmpty())
.map(y -> y.getAllocationTags().iterator().next())
.collect(Collectors.toSet()))
);
// The rejected requests were not set by scheduler
assertEquals(0, rejectedSchedulingRequests.size());
asyncClient.stop();
}
@Test
/*
* Three cases of empty HashSet key of outstandingSchedRequests
* 1. Not set any tags
* 2. Set a empty set, e.g ImmutableSet.of(), new HashSet<>()
* 3. Set tag as null
*/
public void testEmptyKeyOfOutstandingSchedRequests() {
AMRMClient<AMRMClient.ContainerRequest> amClient =
AMRMClient.<AMRMClient.ContainerRequest>createAMRMClient();
HashSet<String> schedRequest = null;
amClient.addSchedulingRequests(Arrays.asList(
schedulingRequest(1, 1, 1, 1, 512, ExecutionType.GUARANTEED),
schedulingRequest(1, 1, 2, 1, 512, new HashSet<>()),
schedulingRequest(1, 1, 3, 1, 512, schedRequest)));
Map<Set<String>, List<SchedulingRequest>> outstandingSchedRequests =
((AMRMClientImpl)amClient).getOutstandingSchedRequests();
assertEquals(1, outstandingSchedRequests.size());
assertEquals(3, outstandingSchedRequests
.get(new HashSet<String>()).size());
}
private
|
TestAMRMClientPlacementConstraints
|
java
|
alibaba__druid
|
druid-demo-petclinic/src/main/java/org/springframework/samples/petclinic/model/NamedEntity.java
|
{
"start": 857,
"end": 981
}
|
class ____ objects needing these properties.
*
* @author Ken Krebs
* @author Juergen Hoeller
*/
@MappedSuperclass
public
|
for
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/SCMUploaderNotifyRequestPBImpl.java
|
{
"start": 1170,
"end": 2908
}
|
class ____ extends SCMUploaderNotifyRequest {
SCMUploaderNotifyRequestProto proto =
SCMUploaderNotifyRequestProto.getDefaultInstance();
SCMUploaderNotifyRequestProto.Builder builder = null;
boolean viaProto = false;
public SCMUploaderNotifyRequestPBImpl() {
builder = SCMUploaderNotifyRequestProto.newBuilder();
}
public SCMUploaderNotifyRequestPBImpl(
SCMUploaderNotifyRequestProto proto) {
this.proto = proto;
viaProto = true;
}
public SCMUploaderNotifyRequestProto getProto() {
mergeLocalToProto();
proto = viaProto ? proto : builder.build();
viaProto = true;
return proto;
}
@Override
public String getResourceKey() {
SCMUploaderNotifyRequestProtoOrBuilder p = viaProto ? proto : builder;
return (p.hasResourceKey()) ? p.getResourceKey() : null;
}
@Override
public void setResourceKey(String key) {
maybeInitBuilder();
if (key == null) {
builder.clearResourceKey();
return;
}
builder.setResourceKey(key);
}
@Override
public String getFileName() {
SCMUploaderNotifyRequestProtoOrBuilder p = viaProto ? proto : builder;
return (p.hasFilename()) ? p.getFilename() : null;
}
@Override
public void setFilename(String filename) {
maybeInitBuilder();
if (filename == null) {
builder.clearFilename();
return;
}
builder.setFilename(filename);
}
private void mergeLocalToProto() {
if (viaProto)
maybeInitBuilder();
proto = builder.build();
viaProto = true;
}
private void maybeInitBuilder() {
if (viaProto || builder == null) {
builder = SCMUploaderNotifyRequestProto.newBuilder(proto);
}
viaProto = false;
}
}
|
SCMUploaderNotifyRequestPBImpl
|
java
|
apache__maven
|
api/maven-api-core/src/main/java/org/apache/maven/api/services/ChecksumAlgorithmService.java
|
{
"start": 1232,
"end": 5005
}
|
interface ____ extends Service {
/**
* Returns immutable collection of all supported algorithm names.
*/
@Nonnull
Collection<String> getChecksumAlgorithmNames();
/**
* Returns {@link ChecksumAlgorithm} for given algorithm name, or throws if algorithm not supported.
*
* @throws ChecksumAlgorithmServiceException if asked algorithm name is not supported.
* @throws NullPointerException if passed in name is {@code null}.
*/
@Nonnull
ChecksumAlgorithm select(@Nonnull String algorithmName);
/**
* Returns a collection of {@link ChecksumAlgorithm} in same order as algorithm names are ordered, or throws if
* any of the algorithm name is not supported. The returned collection has equal count of elements as passed in
* collection of names, and if names contains duplicated elements, the returned list of algorithms will have
* duplicates as well.
*
* @throws ChecksumAlgorithmServiceException if any asked algorithm name is not supported.
* @throws NullPointerException if passed in list of names is {@code null}.
*/
@Nonnull
Collection<ChecksumAlgorithm> select(@Nonnull Collection<String> algorithmNames);
/**
* Calculates checksums for specified data.
*
* @param data The content for which to calculate checksums, must not be {@code null}.
* @param algorithms The checksum algorithms to use, must not be {@code null}.
* @return The calculated checksums, indexed by algorithms, never {@code null}.
* @throws NullPointerException if passed in any parameter is {@code null}.
*/
@Nonnull
Map<ChecksumAlgorithm, String> calculate(@Nonnull byte[] data, @Nonnull Collection<ChecksumAlgorithm> algorithms);
/**
* Calculates checksums for specified data.
*
* @param data The content for which to calculate checksums, must not be {@code null}.
* @param algorithms The checksum algorithms to use, must not be {@code null}.
* @return The calculated checksums, indexed by algorithms, never {@code null}.
* @throws NullPointerException if passed in any parameter is {@code null}.
*/
@Nonnull
Map<ChecksumAlgorithm, String> calculate(
@Nonnull ByteBuffer data, @Nonnull Collection<ChecksumAlgorithm> algorithms);
/**
* Calculates checksums for specified file.
*
* @param file The file for which to calculate checksums, must not be {@code null}.
* @param algorithms The checksum algorithms to use, must not be {@code null}.
* @return The calculated checksums, indexed by algorithms, never {@code null}.
* @throws NullPointerException if passed in any parameter is {@code null}.
* @throws IOException In case of any IO problem.
*/
@Nonnull
Map<ChecksumAlgorithm, String> calculate(@Nonnull Path file, @Nonnull Collection<ChecksumAlgorithm> algorithms)
throws IOException;
/**
* Calculates checksums for specified stream. Upon this method returns, the stream will be depleted (fully read)
* but not closed.
*
* @param stream The stream for which to calculate checksums, must not be {@code null}.
* @param algorithms The checksum algorithms to use, must not be {@code null}.
* @return The calculated checksums, indexed by algorithms, never {@code null}.
* @throws NullPointerException if passed in any parameter is {@code null}.
* @throws IOException In case of any IO problem.
*/
@Nonnull
Map<ChecksumAlgorithm, String> calculate(
@Nonnull InputStream stream, @Nonnull Collection<ChecksumAlgorithm> algorithms) throws IOException;
/**
* The checksum algorithm.
*/
|
ChecksumAlgorithmService
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunctionTestCase.java
|
{
"start": 521,
"end": 1215
}
|
class ____ extends BinarySpatialFunctionTestCase {
protected static void addSpatialCombinations(List<TestCaseSupplier> suppliers, DataType... dataTypes) {
addSpatialCombinations(suppliers, dataTypes, DataType.BOOLEAN, false);
}
protected static void addSpatialGridCombinations(List<TestCaseSupplier> suppliers, DataType... dataTypes) {
addSpatialGridCombinations(suppliers, dataTypes, DataType.BOOLEAN);
}
protected static String typeErrorMessage(boolean includeOrdinal, List<Set<DataType>> validPerPosition, List<DataType> types) {
return typeErrorMessage(includeOrdinal, validPerPosition, types, false, false);
}
}
|
SpatialRelatesFunctionTestCase
|
java
|
google__error-prone
|
check_api/src/main/java/com/google/errorprone/matchers/method/MethodMatchers.java
|
{
"start": 7785,
"end": 8522
}
|
interface ____ extends MethodMatcher {
/** Match constructors with no formal parameters. */
ParameterMatcher withNoParameters();
/** Match constructors whose formal parameters have the given types. */
ParameterMatcher withParameters(String first, String... rest);
/** Match constructors whose formal parameters have the given types. */
ParameterMatcher withParameters(Iterable<String> parameters);
/** Match constructors whose formal parameters have the given types. */
ParameterMatcher withParametersOfType(Iterable<Supplier<Type>> parameters);
}
/**
* @deprecated use {@code Matcher<ExpressionTree>} instead of referring directly to this type.
*/
@Deprecated
public
|
ConstructorClassMatcher
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/input/BaseTestLineRecordReaderBZip2.java
|
{
"start": 1715,
"end": 11559
}
|
class ____ {
// LF stands for line feed
private static final byte[] LF = new byte[] {'\n'};
// CR stands for cartridge return
private static final byte[] CR = new byte[] {'\r'};
private static final byte[] CR_LF = new byte[] {'\r', '\n'};
private Configuration conf;
private FileSystem fs;
private Path tempFile;
public Configuration getConf() {
return conf;
}
public FileSystem getFs() {
return fs;
}
public Path getTempFile() {
return tempFile;
}
@BeforeEach
public void setUp() throws Exception {
conf = new Configuration();
Path workDir = new Path(
System.getProperty("test.build.data", "target"),
"data/" + getClass().getSimpleName());
fs = workDir.getFileSystem(conf);
Path inputDir = new Path(workDir, "input");
tempFile = new Path(inputDir, "test.txt.bz2");
}
@AfterEach
public void tearDown() throws Exception {
fs.delete(tempFile, /* recursive */ false);
}
@Test
public void firstBlockEndsWithLF() throws Exception {
try (BZip2TextFileWriter writer = new BZip2TextFileWriter(tempFile, conf)) {
writer.writeManyRecords(BLOCK_SIZE, 1000, LF);
writer.writeRecord(10, LF);
writer.writeRecord(10, LF);
writer.writeRecord(10, LF);
}
assertRecordCountsPerSplit(tempFile, new long[] {1001, 2});
}
@Test
public void firstBlockEndsWithLFSecondBlockStartsWithLF() throws Exception {
try (BZip2TextFileWriter writer = new BZip2TextFileWriter(tempFile, conf)) {
writer.writeManyRecords(BLOCK_SIZE, 1000, LF);
// Write 254 empty rows terminating at LF, as those records will get
// rolled into the first block record due to run-length encoding, the
// 255th LF character will trigger a run to be written to the block. We
// only need 254 LF characters since the last byte written by prior
// writeManyRecords call is already a LF.
writer.writeManyRecords(254, 254, LF);
// This LF character should be the first byte of the second block, but
// if splitting at blocks, the first split will read this record as the
// additional record.
writer.writeRecord(1, LF);
writer.writeRecord(10, LF);
writer.writeRecord(10, LF);
}
assertRecordCountsPerSplit(tempFile, new long[] {1255, 2});
}
@Test
public void firstBlockEndsWithLFSecondBlockStartsWithCR() throws Exception {
try (BZip2TextFileWriter writer = new BZip2TextFileWriter(tempFile, conf)) {
writer.writeManyRecords(BLOCK_SIZE, 1000, LF);
writer.writeRecord(1, CR);
writer.writeRecord(10, LF);
writer.writeRecord(10, LF);
}
assertRecordCountsPerSplit(tempFile, new long[] {1001, 2});
}
@Test
public void firstBlockEndsWithCRLF() throws Exception {
try (BZip2TextFileWriter writer = new BZip2TextFileWriter(tempFile, conf)) {
writer.writeManyRecords(BLOCK_SIZE, 1000, CR_LF);
writer.writeRecord(10, LF);
writer.writeRecord(10, LF);
writer.writeRecord(10, LF);
}
assertRecordCountsPerSplit(tempFile, new long[] {1001, 2});
}
@Test
public void lastRecordContentSpanAcrossBlocks()
throws Exception {
try (BZip2TextFileWriter writer = new BZip2TextFileWriter(tempFile, conf)) {
writer.writeManyRecords(BLOCK_SIZE - 50, 999, LF);
writer.writeRecord(100, LF);
writer.writeRecord(10, LF);
writer.writeRecord(10, LF);
writer.writeRecord(10, LF);
}
assertRecordCountsPerSplit(tempFile, new long[] {1000, 3});
}
@Test
public void lastRecordOfBlockHasItsLFInNextBlock() throws Exception {
try (BZip2TextFileWriter writer = new BZip2TextFileWriter(tempFile, conf)) {
writer.writeManyRecords(BLOCK_SIZE - 50, 999, LF);
// The LF character is the first byte of the second block
writer.writeRecord(51, LF);
writer.writeRecord(10, LF);
writer.writeRecord(10, LF);
writer.writeRecord(10, LF);
}
assertRecordCountsPerSplit(tempFile, new long[] {1000, 3});
}
@Test
public void lastRecordOfFirstBlockHasItsCRLFInSecondBlock() throws Exception {
try (BZip2TextFileWriter writer = new BZip2TextFileWriter(tempFile, conf)) {
writer.writeManyRecords(BLOCK_SIZE - 50, 999, LF);
// Both CR + LF characters are the first two bytes of second block
writer.writeRecord(52, CR_LF);
writer.writeRecord(10, LF);
writer.writeRecord(10, LF);
writer.writeRecord(10, LF);
}
assertRecordCountsPerSplit(tempFile, new long[] {1000, 3});
}
@Test
public void lastRecordOfFirstBlockHasItsCRLFPartlyInSecondBlock()
throws Exception {
try (BZip2TextFileWriter writer = new BZip2TextFileWriter(tempFile, conf)) {
writer.writeManyRecords(BLOCK_SIZE - 50, 999, LF);
// The CR character is the last byte of the first block and the LF is
// the firs byte of the second block
writer.writeRecord(51, CR_LF);
writer.writeRecord(10, LF);
writer.writeRecord(10, LF);
writer.writeRecord(10, LF);
}
assertRecordCountsPerSplit(tempFile, new long[] {1000, 3});
}
@Test
public void lastByteInFirstBlockIsCRFirstByteInSecondBlockIsNotLF()
throws Exception {
try (BZip2TextFileWriter writer = new BZip2TextFileWriter(tempFile, conf)) {
writer.writeManyRecords(BLOCK_SIZE, 1000, CR);
writer.writeRecord(10, LF);
writer.writeRecord(10, LF);
writer.writeRecord(10, LF);
}
assertRecordCountsPerSplit(tempFile, new long[] {1001, 2});
}
@Test
public void usingCRDelimiterWithSmallestBufferSize() throws Exception {
// Forces calling LineReader#fillBuffer for ever byte read
conf.set(IO_FILE_BUFFER_SIZE_KEY, "1");
try (BZip2TextFileWriter writer = new BZip2TextFileWriter(tempFile, conf)) {
writer.writeManyRecords(BLOCK_SIZE - 50, 999, CR);
writer.writeRecord(100, CR);
writer.writeRecord(10, CR);
writer.writeRecord(10, CR);
writer.writeRecord(10, CR);
}
assertRecordCountsPerSplit(tempFile, new long[] {1000, 3});
}
@Test
public void delimitedByCRSpanningThreeBlocks() throws Exception {
try (BZip2TextFileWriter writer = new BZip2TextFileWriter(tempFile, conf)) {
writer.writeRecord(3 * BLOCK_SIZE, CR);
writer.writeRecord(3 * BLOCK_SIZE, CR);
writer.writeRecord(3 * BLOCK_SIZE, CR);
}
assertRecordCountsPerSplit(tempFile,
new long[] {1, 0, 1, 0, 0, 1, 0, 0, 0});
}
@Test
public void customDelimiterLastThreeBytesInBlockAreDelimiter()
throws Exception {
byte[] delimiter = new byte[] {'e', 'n', 'd'};
setDelimiter(delimiter);
try (BZip2TextFileWriter writer = new BZip2TextFileWriter(tempFile, conf)) {
writer.writeManyRecords(BLOCK_SIZE, 1000, delimiter);
writer.writeRecord(10, delimiter);
writer.writeRecord(10, delimiter);
writer.writeRecord(10, delimiter);
}
assertRecordCountsPerSplit(tempFile, new long[] {1001, 2});
}
@Test
public void customDelimiterDelimiterSpansAcrossBlocks()
throws Exception {
byte[] delimiter = new byte[] {'e', 'n', 'd'};
setDelimiter(delimiter);
try (BZip2TextFileWriter writer = new BZip2TextFileWriter(tempFile, conf)) {
writer.writeManyRecords(BLOCK_SIZE - 50, 999, delimiter);
writer.writeRecord(52, delimiter);
writer.writeRecord(10, delimiter);
writer.writeRecord(10, delimiter);
writer.writeRecord(10, delimiter);
}
assertRecordCountsPerSplit(tempFile, new long[] {1001, 2});
}
@Test
public void customDelimiterLastRecordDelimiterStartsAtNextBlockStart()
throws Exception {
byte[] delimiter = new byte[] {'e', 'n', 'd'};
setDelimiter(delimiter);
try (BZip2TextFileWriter writer = new BZip2TextFileWriter(tempFile, conf)) {
writer.writeManyRecords(BLOCK_SIZE - 50, 999, delimiter);
writer.writeRecord(53, delimiter);
writer.writeRecord(10, delimiter);
writer.writeRecord(10, delimiter);
writer.writeRecord(10, delimiter);
}
assertRecordCountsPerSplit(tempFile, new long[] {1000, 3});
}
@Test
public void customDelimiterLastBlockBytesShareCommonPrefixWithDelimiter()
throws Exception {
byte[] delimiter = new byte[] {'e', 'n', 'd'};
setDelimiter(delimiter);
try (BZip2TextFileWriter writer = new BZip2TextFileWriter(tempFile, conf)) {
writer.writeManyRecords(BLOCK_SIZE - 4, 999, delimiter);
// The first 4 bytes, "an e", will be the last 4 bytes of the first block,
// the last byte being 'e' which matches the first character of the
// delimiter "end". The first byte of the next block also matches the
// second byte of the delimiter "n"; however the next character "c" does
// not match the last character of the delimiter. Thus an additional
// record should not be read for the split that reads the first block.
// The split that reads the second block will just discard
// "nchanting tale coming to an end".
writer.write("an enchanting tale coming to an end");
writer.writeRecord(10, delimiter);
writer.writeRecord(10, delimiter);
writer.writeRecord(10, delimiter);
}
assertRecordCountsPerSplit(tempFile, new long[] {1000, 3});
}
protected abstract BaseLineRecordReaderHelper newReader(Path file);
private void assertRecordCountsPerSplit(
Path path, long[] countsIfSplitAtBlocks) throws IOException {
RecordCountAssert countAssert =
new RecordCountAssert(path, countsIfSplitAtBlocks);
countAssert.assertSingleSplit();
countAssert.assertSplittingAtBlocks();
countAssert.assertSplittingJustAfterSecondBlockStarts();
countAssert.assertSplittingEachBlockRangeInThreeParts();
countAssert.assertSplitsAroundBlockStartOffsets();
}
private
|
BaseTestLineRecordReaderBZip2
|
java
|
google__guava
|
android/guava/src/com/google/common/collect/ForwardingSet.java
|
{
"start": 1599,
"end": 2067
}
|
class ____ <i>not</i> forward calls to {@code
* default} methods. Instead, it inherits their default implementations. When those implementations
* invoke methods, they invoke methods on the {@code ForwardingSet}.
*
* <p>The {@code standard} methods are not guaranteed to be thread-safe, even when all of the
* methods that they depend on are thread-safe.
*
* @author Kevin Bourrillion
* @author Louis Wasserman
* @since 2.0
*/
@GwtCompatible
public abstract
|
does
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/async/AsyncSearchIndexServiceTests.java
|
{
"start": 7861,
"end": 19007
}
|
class ____ extends NoopCircuitBreaker {
private long used = 0;
private long limit = 0;
AdjustableLimitCircuitBreaker(String name) {
super(name);
}
@Override
public void addEstimateBytesAndMaybeBreak(long bytes, String label) throws CircuitBreakingException {
if (bytes <= 0) {
addWithoutBreaking(bytes);
} else {
if (used + bytes > limit) {
throw new CircuitBreakingException(
"Current used [" + used + "] and requesting bytes [" + bytes + "] " + "is greater than the limit [" + limit + "]",
Durability.TRANSIENT
);
}
used += bytes;
}
}
@Override
public void addWithoutBreaking(long bytes) {
used += bytes;
}
@Override
public long getUsed() {
return used;
}
@Override
public long getLimit() {
return limit;
}
void adjustLimit(long newLimit) {
if (newLimit < used) {
throw new IllegalArgumentException("Limit must not be smaller than used; used=" + used + "; limit=" + newLimit);
}
this.limit = newLimit;
}
}
public void testCircuitBreaker() throws Exception {
AdjustableLimitCircuitBreaker circuitBreaker = new AdjustableLimitCircuitBreaker("test");
CircuitBreakerService circuitBreakerService = new CircuitBreakerService() {
@Override
public CircuitBreaker getBreaker(String name) {
assertThat(name, equalTo(CircuitBreaker.REQUEST));
return circuitBreaker;
}
@Override
public AllCircuitBreakerStats stats() {
return null;
}
@Override
public CircuitBreakerStats stats(String name) {
return null;
}
};
BigArrays bigArrays = new BigArrays(null, circuitBreakerService, CircuitBreaker.REQUEST);
ClusterService clusterService = getInstanceFromNode(ClusterService.class);
TransportService transportService = getInstanceFromNode(TransportService.class);
indexService = new AsyncTaskIndexService<>(
"test",
clusterService,
transportService.getThreadPool().getThreadContext(),
client(),
ASYNC_SEARCH_ORIGIN,
TestAsyncResponse::new,
writableRegistry(),
bigArrays
);
AsyncExecutionId executionId = new AsyncExecutionId(
Long.toString(randomNonNegativeLong()),
new TaskId(randomAlphaOfLength(10), randomNonNegativeLong())
);
long expirationTime = randomLong();
String testMessage = randomAlphaOfLength(10);
{
circuitBreaker.adjustLimit(randomIntBetween(1, 64)); // small limit
TestAsyncResponse initialResponse = new TestAsyncResponse(testMessage, expirationTime);
PlainActionFuture<DocWriteResponse> createFuture = new PlainActionFuture<>();
indexService.createResponse(executionId.getDocId(), Map.of(), initialResponse, createFuture);
CircuitBreakingException e = expectThrows(CircuitBreakingException.class, createFuture::actionGet);
assertEquals(0, e.getSuppressed().length); // no other suppressed exceptions
assertThat(circuitBreaker.getUsed(), equalTo(0L));
}
{
circuitBreaker.adjustLimit(randomIntBetween(16 * 1024, 1024 * 1024)); // large enough
TestAsyncResponse initialResponse = new TestAsyncResponse(testMessage, expirationTime);
PlainActionFuture<DocWriteResponse> createFuture = new PlainActionFuture<>();
indexService.createResponse(executionId.getDocId(), Map.of(), initialResponse, createFuture);
assertThat(createFuture.actionGet().getResult(), equalTo(DocWriteResponse.Result.CREATED));
assertThat(circuitBreaker.getUsed(), equalTo(0L));
if (randomBoolean()) {
PlainActionFuture<TestAsyncResponse> getFuture = new PlainActionFuture<>();
indexService.getResponse(executionId, randomBoolean(), getFuture);
assertThat(getFuture.actionGet(), equalTo(initialResponse));
assertBusy(() -> assertThat(circuitBreaker.getUsed(), equalTo(0L)));
}
if (randomBoolean()) {
circuitBreaker.adjustLimit(between(1, 16));
PlainActionFuture<TestAsyncResponse> getFuture = new PlainActionFuture<>();
indexService.getResponse(executionId, randomBoolean(), getFuture);
expectThrows(CircuitBreakingException.class, getFuture::actionGet);
assertBusy(() -> assertThat(circuitBreaker.getUsed(), equalTo(0L)));
}
}
int updates = randomIntBetween(1, 5);
for (int u = 0; u < updates; u++) {
if (randomBoolean()) {
circuitBreaker.adjustLimit(randomIntBetween(16 * 1024, 1024 * 1024));
testMessage = randomAlphaOfLength(10);
TestAsyncResponse updateResponse = new TestAsyncResponse(testMessage, randomLong());
PlainActionFuture<UpdateResponse> updateFuture = new PlainActionFuture<>();
indexService.updateResponse(executionId.getDocId(), Map.of(), updateResponse, updateFuture);
updateFuture.actionGet();
assertThat(circuitBreaker.getUsed(), equalTo(0L));
} else {
circuitBreaker.adjustLimit(randomIntBetween(1, 64)); // small limit
PlainActionFuture<UpdateResponse> updateFuture = new PlainActionFuture<>();
TestAsyncResponse updateResponse = new TestAsyncResponse(randomAlphaOfLength(100), randomLong());
indexService.updateResponse(executionId.getDocId(), Map.of(), updateResponse, updateFuture);
CircuitBreakingException e = expectThrows(CircuitBreakingException.class, updateFuture::actionGet);
assertEquals(0, e.getSuppressed().length); // no other suppressed exceptions
assertThat(circuitBreaker.getUsed(), equalTo(0L));
}
if (randomBoolean()) {
circuitBreaker.adjustLimit(randomIntBetween(16 * 1024, 1024 * 1024)); // small limit
PlainActionFuture<TestAsyncResponse> getFuture = new PlainActionFuture<>();
indexService.getResponse(executionId, randomBoolean(), getFuture);
assertThat(getFuture.actionGet().test, equalTo(testMessage));
assertThat(getFuture.actionGet().expirationTimeMillis, equalTo(expirationTime));
assertBusy(() -> assertThat(circuitBreaker.getUsed(), equalTo(0L)));
}
if (randomBoolean()) {
circuitBreaker.adjustLimit(randomIntBetween(1, 16)); // small limit
PlainActionFuture<TestAsyncResponse> getFuture = new PlainActionFuture<>();
indexService.getResponse(executionId, randomBoolean(), getFuture);
expectThrows(CircuitBreakingException.class, getFuture::actionGet);
assertBusy(() -> assertThat(circuitBreaker.getUsed(), equalTo(0L)));
}
}
}
public void testMaxAsyncSearchResponseSize() throws Exception {
try {
// successfully create an initial response
AsyncExecutionId executionId1 = new AsyncExecutionId(
Long.toString(randomNonNegativeLong()),
new TaskId(randomAlphaOfLength(10), randomNonNegativeLong())
);
TestAsyncResponse initialResponse = new TestAsyncResponse(randomAlphaOfLength(130), randomLong());
PlainActionFuture<DocWriteResponse> createFuture1 = new PlainActionFuture<>();
indexService.createResponse(executionId1.getDocId(), Map.of(), initialResponse, createFuture1);
createFuture1.actionGet();
// setting very small limit for the max size of async search response
int limit = randomIntBetween(1, 125);
ClusterUpdateSettingsRequest updateSettingsRequest = new ClusterUpdateSettingsRequest(
TEST_REQUEST_TIMEOUT,
TEST_REQUEST_TIMEOUT
);
updateSettingsRequest.transientSettings(Settings.builder().put("search.max_async_search_response_size", limit + "b"));
assertAcked(clusterAdmin().updateSettings(updateSettingsRequest).actionGet());
String expectedErrMsg = "Can't store an async search response larger than ["
+ limit
+ "] bytes. "
+ "This limit can be set by changing the ["
+ MAX_ASYNC_SEARCH_RESPONSE_SIZE_SETTING.getKey()
+ "] setting.";
// test that an update operation of the initial response fails
PlainActionFuture<UpdateResponse> updateFuture = new PlainActionFuture<>();
TestAsyncResponse updateResponse = new TestAsyncResponse(randomAlphaOfLength(130), randomLong());
indexService.updateResponse(executionId1.getDocId(), Map.of(), updateResponse, updateFuture);
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, updateFuture::actionGet);
assertEquals(expectedErrMsg, e.getMessage());
// test that the inital response is overwritten with a failure
PlainActionFuture<TestAsyncResponse> getFuture = new PlainActionFuture<>();
indexService.getResponse(executionId1, randomBoolean(), getFuture);
assertEquals(expectedErrMsg, getFuture.actionGet().failure);
// test that a create operation fails
AsyncExecutionId executionId2 = new AsyncExecutionId(
Long.toString(randomNonNegativeLong()),
new TaskId(randomAlphaOfLength(10), randomNonNegativeLong())
);
PlainActionFuture<DocWriteResponse> createFuture = new PlainActionFuture<>();
TestAsyncResponse initialResponse2 = new TestAsyncResponse(randomAlphaOfLength(130), randomLong());
indexService.createResponse(executionId2.getDocId(), Map.of(), initialResponse2, createFuture);
IllegalArgumentException e2 = expectThrows(IllegalArgumentException.class, createFuture::actionGet);
assertEquals(expectedErrMsg, e2.getMessage());
} finally {
// restoring limit
ClusterUpdateSettingsRequest updateSettingsRequest = new ClusterUpdateSettingsRequest(
TEST_REQUEST_TIMEOUT,
TEST_REQUEST_TIMEOUT
);
updateSettingsRequest.transientSettings(Settings.builder().put("search.max_async_search_response_size", (String) null));
assertAcked(clusterAdmin().updateSettings(updateSettingsRequest).actionGet());
}
}
}
|
AdjustableLimitCircuitBreaker
|
java
|
apache__spark
|
common/kvstore/src/test/java/org/apache/spark/util/kvstore/LevelDBTypeInfoSuite.java
|
{
"start": 5945,
"end": 6112
}
|
class ____ {
@KVIndex
public String key;
@KVIndex("id")
public String id;
@KVIndex("id")
public String id2;
}
public static
|
DuplicateIndex
|
java
|
FasterXML__jackson-databind
|
src/main/java/tools/jackson/databind/util/ArrayIterator.java
|
{
"start": 309,
"end": 927
}
|
class ____<T> implements Iterator<T>, Iterable<T> // lgtm [java/iterator-implements-iterable]
{
private final T[] _a;
private int _index;
public ArrayIterator(T[] a) {
_a = a;
_index = 0;
}
@Override
public boolean hasNext() { return _index < _a.length; }
@Override
public T next() {
if (_index >= _a.length) {
throw new NoSuchElementException();
}
return _a[_index++];
}
@Override public void remove() { throw new UnsupportedOperationException(); }
@Override public Iterator<T> iterator() { return this; }
}
|
ArrayIterator
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/iterable/IterableAssert_areAtLeastOne_Test.java
|
{
"start": 1020,
"end": 1461
}
|
class ____ extends IterableAssertBaseTest {
private static final Condition<Object> condition = new TestCondition<>();
@Override
protected ConcreteIterableAssert<Object> invoke_api_method() {
return assertions.areAtLeastOne(condition);
}
@Override
protected void verify_internal_effects() {
verify(iterables).assertAreAtLeast(getInfo(assertions), getActual(assertions), 1, condition);
}
}
|
IterableAssert_areAtLeastOne_Test
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/context/event/DirtiesContextEventPublishingTests.java
|
{
"start": 6692,
"end": 6860
}
|
class ____ {
@Test
@DirtiesContext(methodMode = MethodMode.BEFORE_METHOD)
void test() {
}
}
@Configuration
static
|
MethodLevelBeforeMethodDirtiesContextTestCase
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/metrics/RouterMBean.java
|
{
"start": 997,
"end": 1107
}
|
interface ____ the router specific metrics.
*/
@InterfaceAudience.Private
@InterfaceStability.Evolving
public
|
for
|
java
|
mybatis__mybatis-3
|
src/main/java/org/apache/ibatis/annotations/Options.java
|
{
"start": 4041,
"end": 4088
}
|
interface ____ {
Options[] value();
}
}
|
List
|
java
|
netty__netty
|
transport-native-io_uring/src/test/java/io/netty/channel/uring/IoUringSocketChannelNotYetConnectedTest.java
|
{
"start": 974,
"end": 1341
}
|
class ____ extends SocketChannelNotYetConnectedTest {
@BeforeAll
public static void loadJNI() {
assumeTrue(IoUring.isAvailable());
}
@Override
protected List<TestsuitePermutation.BootstrapFactory<Bootstrap>> newFactories() {
return IoUringSocketTestPermutation.INSTANCE.clientSocket();
}
}
|
IoUringSocketChannelNotYetConnectedTest
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/scheduling/annotation/EnableSchedulingTests.java
|
{
"start": 24542,
"end": 25017
}
|
class ____ implements FactoryBean<PrototypeBeanWithScheduled> {
private AtomicInteger counter;
public FactoryBeanForScheduled(AtomicInteger counter) {
this.counter = counter;
}
@Override
public PrototypeBeanWithScheduled getObject() {
return new PrototypeBeanWithScheduled(this.counter);
}
@Override
public Class<?> getObjectType() {
return PrototypeBeanWithScheduled.class;
}
}
@Configuration
@EnableScheduling
static
|
FactoryBeanForScheduled
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/context/annotation/EnableAspectJAutoProxyTests.java
|
{
"start": 1423,
"end": 3973
}
|
class ____ {
@Test
void withJdkProxy() {
ConfigurableApplicationContext ctx = new AnnotationConfigApplicationContext(ConfigWithJdkProxy.class);
aspectIsApplied(ctx);
assertThat(AopUtils.isJdkDynamicProxy(ctx.getBean(FooService.class))).isTrue();
assertThat(AopUtils.isJdkDynamicProxy(ctx.getBean("otherFooService"))).isTrue();
ctx.close();
}
@Test
void withCglibProxy() {
ConfigurableApplicationContext ctx = new AnnotationConfigApplicationContext(ConfigWithCglibProxy.class);
aspectIsApplied(ctx);
assertThat(AopUtils.isCglibProxy(ctx.getBean(FooService.class))).isTrue();
assertThat(AopUtils.isJdkDynamicProxy(ctx.getBean("otherFooService"))).isTrue();
ctx.close();
}
@Test
void withExposedProxy() {
ConfigurableApplicationContext ctx = new AnnotationConfigApplicationContext(ConfigWithExposedProxy.class);
aspectIsApplied(ctx);
assertThat(AopUtils.isJdkDynamicProxy(ctx.getBean(FooService.class))).isTrue();
ctx.close();
}
private void aspectIsApplied(ApplicationContext ctx) {
FooService fooService = ctx.getBean(FooService.class);
ServiceInvocationCounter counter = ctx.getBean(ServiceInvocationCounter.class);
assertThat(counter.getCount()).isEqualTo(0);
assertThat(fooService.isInitCalled()).isTrue();
assertThat(counter.getCount()).isEqualTo(1);
String value = fooService.foo(1);
assertThat(value).isEqualTo("bar");
assertThat(counter.getCount()).isEqualTo(2);
fooService.foo(1);
assertThat(counter.getCount()).isEqualTo(3);
}
@Test
void withAnnotationOnArgumentAndJdkProxy() {
ConfigurableApplicationContext ctx = new AnnotationConfigApplicationContext(
ConfigWithJdkProxy.class, SampleService.class, LoggingAspect.class);
SampleService sampleService = ctx.getBean(SampleService.class);
sampleService.execute(new SampleDto());
sampleService.execute(new SampleInputBean());
sampleService.execute((SampleDto) null);
sampleService.execute((SampleInputBean) null);
ctx.close();
}
@Test
void withAnnotationOnArgumentAndCglibProxy() {
ConfigurableApplicationContext ctx = new AnnotationConfigApplicationContext(
ConfigWithCglibProxy.class, SampleService.class, LoggingAspect.class);
SampleService sampleService = ctx.getBean(SampleService.class);
sampleService.execute(new SampleDto());
sampleService.execute(new SampleInputBean());
sampleService.execute((SampleDto) null);
sampleService.execute((SampleInputBean) null);
ctx.close();
}
@ComponentScan("example.scannable")
@EnableAspectJAutoProxy
static
|
EnableAspectJAutoProxyTests
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/NatsEndpointBuilderFactory.java
|
{
"start": 86089,
"end": 94447
}
|
interface ____
extends
AdvancedNatsEndpointConsumerBuilder,
AdvancedNatsEndpointProducerBuilder {
default NatsEndpointBuilder basic() {
return (NatsEndpointBuilder) this;
}
/**
* Reference an already instantiated connection to Nats server.
*
* The option is a: <code>io.nats.client.Connection</code> type.
*
* Group: advanced
*
* @param connection the value to set
* @return the dsl builder
*/
default AdvancedNatsEndpointBuilder connection(io.nats.client.Connection connection) {
doSetProperty("connection", connection);
return this;
}
/**
* Reference an already instantiated connection to Nats server.
*
* The option will be converted to a
* <code>io.nats.client.Connection</code> type.
*
* Group: advanced
*
* @param connection the value to set
* @return the dsl builder
*/
default AdvancedNatsEndpointBuilder connection(String connection) {
doSetProperty("connection", connection);
return this;
}
/**
* Sets a custom ConsumerConfiguration object for the JetStream
* consumer. This is an advanced option typically used when you need to
* configure properties not exposed as simple Camel URI parameters. When
* set, this object will be used to build the final consumer
* subscription options.
*
* The option is a:
* <code>io.nats.client.api.ConsumerConfiguration</code> type.
*
* Group: advanced
*
* @param consumerConfiguration the value to set
* @return the dsl builder
*/
default AdvancedNatsEndpointBuilder consumerConfiguration(io.nats.client.api.ConsumerConfiguration consumerConfiguration) {
doSetProperty("consumerConfiguration", consumerConfiguration);
return this;
}
/**
* Sets a custom ConsumerConfiguration object for the JetStream
* consumer. This is an advanced option typically used when you need to
* configure properties not exposed as simple Camel URI parameters. When
* set, this object will be used to build the final consumer
* subscription options.
*
* The option will be converted to a
* <code>io.nats.client.api.ConsumerConfiguration</code> type.
*
* Group: advanced
*
* @param consumerConfiguration the value to set
* @return the dsl builder
*/
default AdvancedNatsEndpointBuilder consumerConfiguration(String consumerConfiguration) {
doSetProperty("consumerConfiguration", consumerConfiguration);
return this;
}
/**
* Sets the name to assign to the JetStream durable consumer. Setting
* this value makes the consumer durable. The value is used to set the
* durable() field in the underlying NATS ConsumerConfiguration.Builder.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: advanced
*
* @param durableName the value to set
* @return the dsl builder
*/
default AdvancedNatsEndpointBuilder durableName(String durableName) {
doSetProperty("durableName", durableName);
return this;
}
/**
* To use a custom header filter strategy.
*
* The option is a:
* <code>org.apache.camel.spi.HeaderFilterStrategy</code> type.
*
* Group: advanced
*
* @param headerFilterStrategy the value to set
* @return the dsl builder
*/
default AdvancedNatsEndpointBuilder headerFilterStrategy(org.apache.camel.spi.HeaderFilterStrategy headerFilterStrategy) {
doSetProperty("headerFilterStrategy", headerFilterStrategy);
return this;
}
/**
* To use a custom header filter strategy.
*
* The option will be converted to a
* <code>org.apache.camel.spi.HeaderFilterStrategy</code> type.
*
* Group: advanced
*
* @param headerFilterStrategy the value to set
* @return the dsl builder
*/
default AdvancedNatsEndpointBuilder headerFilterStrategy(String headerFilterStrategy) {
doSetProperty("headerFilterStrategy", headerFilterStrategy);
return this;
}
/**
* Sets whether to operate JetStream requests asynchronously.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: advanced
*
* @param jetstreamAsync the value to set
* @return the dsl builder
*/
default AdvancedNatsEndpointBuilder jetstreamAsync(boolean jetstreamAsync) {
doSetProperty("jetstreamAsync", jetstreamAsync);
return this;
}
/**
* Sets whether to operate JetStream requests asynchronously.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: true
* Group: advanced
*
* @param jetstreamAsync the value to set
* @return the dsl builder
*/
default AdvancedNatsEndpointBuilder jetstreamAsync(String jetstreamAsync) {
doSetProperty("jetstreamAsync", jetstreamAsync);
return this;
}
/**
* Sets the consumer subscription type for JetStream. Set to true to use
* a Pull Subscription (consumer explicitly requests messages). Set to
* false to use a Push Subscription (messages are automatically
* delivered).
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: advanced
*
* @param pullSubscription the value to set
* @return the dsl builder
*/
default AdvancedNatsEndpointBuilder pullSubscription(boolean pullSubscription) {
doSetProperty("pullSubscription", pullSubscription);
return this;
}
/**
* Sets the consumer subscription type for JetStream. Set to true to use
* a Pull Subscription (consumer explicitly requests messages). Set to
* false to use a Push Subscription (messages are automatically
* delivered).
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: true
* Group: advanced
*
* @param pullSubscription the value to set
* @return the dsl builder
*/
default AdvancedNatsEndpointBuilder pullSubscription(String pullSubscription) {
doSetProperty("pullSubscription", pullSubscription);
return this;
}
/**
* Whether or not connection trace messages should be printed to
* standard out for fine grained debugging of connection issues.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*
* @param traceConnection the value to set
* @return the dsl builder
*/
default AdvancedNatsEndpointBuilder traceConnection(boolean traceConnection) {
doSetProperty("traceConnection", traceConnection);
return this;
}
/**
* Whether or not connection trace messages should be printed to
* standard out for fine grained debugging of connection issues.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: advanced
*
* @param traceConnection the value to set
* @return the dsl builder
*/
default AdvancedNatsEndpointBuilder traceConnection(String traceConnection) {
doSetProperty("traceConnection", traceConnection);
return this;
}
}
public
|
AdvancedNatsEndpointBuilder
|
java
|
elastic__elasticsearch
|
x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/AuditTrailService.java
|
{
"start": 1254,
"end": 3277
}
|
class ____ {
private static final Logger logger = LogManager.getLogger(AuditTrailService.class);
private static final AuditTrail NOOP_AUDIT_TRAIL = new NoopAuditTrail();
private final @Nullable AuditTrail auditTrail;
private final XPackLicenseState licenseState;
private final Duration minLogPeriod = Duration.ofMinutes(30);
protected AtomicReference<Instant> nextLogInstantAtomic = new AtomicReference<>(Instant.EPOCH);
public AuditTrailService(@Nullable AuditTrail auditTrail, XPackLicenseState licenseState) {
this.auditTrail = auditTrail;
this.licenseState = licenseState;
}
public AuditTrail get() {
if (auditTrail != null) {
if (Security.AUDITING_FEATURE.check(licenseState)) {
return auditTrail;
} else {
maybeLogAuditingDisabled();
return NOOP_AUDIT_TRAIL;
}
} else {
return NOOP_AUDIT_TRAIL;
}
}
public boolean includeRequestBody() {
if (get() instanceof LoggingAuditTrail trail) {
return trail.includeRequestBody();
} else {
return false;
}
}
// TODO: this method only exists for access to LoggingAuditTrail in a Node for testing.
// DO NOT USE IT, IT WILL BE REMOVED IN THE FUTURE
public AuditTrail getAuditTrail() {
return auditTrail;
}
private void maybeLogAuditingDisabled() {
Instant nowInstant = Instant.now();
Instant nextLogInstant = nextLogInstantAtomic.get();
if (nextLogInstant.isBefore(nowInstant)) {
if (nextLogInstantAtomic.compareAndSet(nextLogInstant, nowInstant.plus(minLogPeriod))) {
logger.warn(
"Auditing logging is DISABLED because the currently active license ["
+ licenseState.getOperationMode()
+ "] does not permit it"
);
}
}
}
private static
|
AuditTrailService
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/type/format/jaxb/JaxbXmlFormatMapper.java
|
{
"start": 20435,
"end": 20817
}
|
class ____ implements ManagedMapWrapper {
@XmlAnyElement
Collection<Object> elements;
public LegacyMapWrapper() {
this.elements = new ArrayList<>();
}
public LegacyMapWrapper(Collection<Object> elements) {
this.elements = elements;
}
@Override
public int size() {
return elements.size();
}
}
@XmlRootElement(name = "Map")
public static
|
LegacyMapWrapper
|
java
|
apache__flink
|
flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/generated/NamespaceAggsHandleFunctionBase.java
|
{
"start": 1041,
"end": 1324
}
|
class ____ handling aggregate or table aggregate functions.
*
* <p>The differences between {@link NamespaceAggsHandleFunctionBase} and {@link
* AggsHandleFunctionBase} is that the {@link NamespaceAggsHandleFunctionBase} has namespace.
*
* @param <N> type of namespace
*/
public
|
for
|
java
|
elastic__elasticsearch
|
x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/crypto/tool/SystemKeyToolProvider.java
|
{
"start": 401,
"end": 622
}
|
class ____ implements CliToolProvider {
@Override
public String name() {
return "syskeygen";
}
@Override
public Command create() {
return new SystemKeyTool();
}
}
|
SystemKeyToolProvider
|
java
|
quarkusio__quarkus
|
extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/specialmappings/IdVersionPK.java
|
{
"start": 146,
"end": 1088
}
|
class ____ implements Serializable {
@Id
private String id;
@Id
private Long version;
public IdVersionPK() {
}
public IdVersionPK(String id, Long version) {
this.id = id;
this.version = version;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public Long getVersion() {
return version;
}
public void setVersion(Long version) {
this.version = version;
}
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
IdVersionPK that = (IdVersionPK) o;
return Objects.equals(id, that.id) &&
Objects.equals(version, that.version);
}
@Override
public int hashCode() {
return Objects.hash(id, version);
}
}
|
IdVersionPK
|
java
|
spring-projects__spring-boot
|
module/spring-boot-pulsar/src/test/java/org/springframework/boot/pulsar/autoconfigure/PulsarAutoConfigurationTests.java
|
{
"start": 42961,
"end": 43878
}
|
class ____ {
@Bean
@Order(200)
PulsarContainerFactoryCustomizer<ConcurrentPulsarListenerContainerFactory<?>> customizerFoo() {
return (containerFactory) -> appendToSubscriptionName(containerFactory, ":foo");
}
@Bean
@Order(100)
PulsarContainerFactoryCustomizer<ConcurrentPulsarListenerContainerFactory<?>> customizerBar() {
return (containerFactory) -> appendToSubscriptionName(containerFactory, ":bar");
}
private void appendToSubscriptionName(ConcurrentPulsarListenerContainerFactory<?> containerFactory,
String valueToAppend) {
String subscriptionName = containerFactory.getContainerProperties().getSubscriptionName();
String updatedValue = (subscriptionName != null) ? subscriptionName + valueToAppend : valueToAppend;
containerFactory.getContainerProperties().setSubscriptionName(updatedValue);
}
}
}
@Nested
|
ListenerContainerFactoryCustomizersConfig
|
java
|
google__guice
|
extensions/assistedinject/test/com/google/inject/assistedinject/FactoryModuleBuilderTest.java
|
{
"start": 15698,
"end": 15794
}
|
interface ____ {
AbstractCar create(Color color);
}
public static
|
ColoredAbstractCarFactory
|
java
|
apache__camel
|
core/camel-management/src/main/java/org/apache/camel/management/mbean/ManagedConvertBody.java
|
{
"start": 1188,
"end": 1791
}
|
class ____ extends ManagedProcessor implements ManagedConvertBodyMBean {
public ManagedConvertBody(CamelContext context, ConvertBodyProcessor processor, ProcessorDefinition<?> definition) {
super(context, processor, definition);
}
@Override
public ConvertBodyProcessor getProcessor() {
return (ConvertBodyProcessor) super.getProcessor();
}
@Override
public String getType() {
return getProcessor().getType().getCanonicalName();
}
@Override
public String getCharset() {
return getProcessor().getCharset();
}
}
|
ManagedConvertBody
|
java
|
elastic__elasticsearch
|
test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/MatchAssertion.java
|
{
"start": 1437,
"end": 4511
}
|
class ____ extends Assertion {
public static MatchAssertion parse(XContentParser parser) throws IOException {
XContentLocation location = parser.getTokenLocation();
Tuple<String, Object> stringObjectTuple = ParserUtils.parseTuple(parser);
return new MatchAssertion(location, stringObjectTuple.v1(), stringObjectTuple.v2());
}
private static final Logger logger = LogManager.getLogger(MatchAssertion.class);
public MatchAssertion(XContentLocation location, String field, Object expectedValue) {
super(location, field, expectedValue);
}
@Override
protected void doAssert(Object actualValue, Object expectedValue) {
// if the value is wrapped into / it is a regexp (e.g. /s+d+/)
if (expectedValue instanceof String) {
String expValue = ((String) expectedValue).trim();
if (expValue.length() > 2 && expValue.startsWith("/") && expValue.endsWith("/")) {
assertThat(
"field [" + getField() + "] was expected to be of type String but is an instanceof [" + safeClass(actualValue) + "]",
actualValue,
instanceOf(String.class)
);
String stringValue = (String) actualValue;
String regex = expValue.substring(1, expValue.length() - 1);
logger.trace("assert that [{}] matches [{}]", stringValue, regex);
assertThat(
"field [" + getField() + "] was expected to match the provided regex but didn't",
stringValue,
matches(regex, Pattern.COMMENTS)
);
return;
}
}
logger.trace("assert that [{}] matches [{}] (field [{}])", actualValue, expectedValue, getField());
if (expectedValue == null) {
assertNull("field [" + getField() + "] should be null but was [" + actualValue + "]", actualValue);
return;
}
assertNotNull("field [" + getField() + "] is null", actualValue);
if (actualValue.getClass().equals(safeClass(expectedValue)) == false) {
if (actualValue instanceof Number && expectedValue instanceof Number) {
// Double 1.0 is equal to Integer 1
assertThat(
"field [" + getField() + "] doesn't match the expected value",
((Number) actualValue).doubleValue(),
equalTo(((Number) expectedValue).doubleValue())
);
return;
}
}
if (expectedValue instanceof Map) {
assertThat(actualValue, instanceOf(Map.class));
assertMap((Map<?, ?>) actualValue, matchesMap((Map<?, ?>) expectedValue));
} else if (expectedValue instanceof List) {
assertThat(actualValue, instanceOf(List.class));
assertMap((List<?>) actualValue, matchesList((List<?>) expectedValue));
}
assertThat(actualValue, equalTo(expectedValue));
}
}
|
MatchAssertion
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/main/java/org/springframework/boot/context/properties/source/ConfigurationPropertyNameAliases.java
|
{
"start": 1239,
"end": 2867
}
|
class ____ implements Iterable<ConfigurationPropertyName> {
private final MultiValueMap<ConfigurationPropertyName, ConfigurationPropertyName> aliases = new LinkedMultiValueMap<>();
public ConfigurationPropertyNameAliases() {
}
public ConfigurationPropertyNameAliases(String name, String... aliases) {
addAliases(name, aliases);
}
public ConfigurationPropertyNameAliases(ConfigurationPropertyName name, ConfigurationPropertyName... aliases) {
addAliases(name, aliases);
}
public void addAliases(String name, String... aliases) {
Assert.notNull(name, "'name' must not be null");
Assert.notNull(aliases, "'aliases' must not be null");
addAliases(ConfigurationPropertyName.of(name),
Arrays.stream(aliases).map(ConfigurationPropertyName::of).toArray(ConfigurationPropertyName[]::new));
}
public void addAliases(ConfigurationPropertyName name, ConfigurationPropertyName... aliases) {
Assert.notNull(name, "'name' must not be null");
Assert.notNull(aliases, "'aliases' must not be null");
this.aliases.addAll(name, Arrays.asList(aliases));
}
public List<ConfigurationPropertyName> getAliases(ConfigurationPropertyName name) {
return this.aliases.getOrDefault(name, Collections.emptyList());
}
public @Nullable ConfigurationPropertyName getNameForAlias(ConfigurationPropertyName alias) {
return this.aliases.entrySet()
.stream()
.filter((e) -> e.getValue().contains(alias))
.map(Map.Entry::getKey)
.findFirst()
.orElse(null);
}
@Override
public Iterator<ConfigurationPropertyName> iterator() {
return this.aliases.keySet().iterator();
}
}
|
ConfigurationPropertyNameAliases
|
java
|
apache__flink
|
flink-libraries/flink-cep/src/main/java/org/apache/flink/cep/pattern/Pattern.java
|
{
"start": 1693,
"end": 2177
}
|
class ____ a pattern definition.
*
* <p>A pattern definition is used by {@link org.apache.flink.cep.nfa.compiler.NFACompiler} to
* create a {@link NFA}.
*
* <pre>{@code
* Pattern<T, F> pattern = Pattern.<T>begin("start")
* .next("middle").subtype(F.class)
* .followedBy("end").where(new MyCondition());
* }</pre>
*
* @param <T> Base type of the elements appearing in the pattern
* @param <F> Subtype of T to which the current pattern operator is constrained
*/
public
|
for
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHXORErasureEncoder.java
|
{
"start": 1871,
"end": 3215
}
|
class ____ extends ErasureEncoder {
private RawErasureEncoder rsRawEncoder;
private RawErasureEncoder xorRawEncoder;
public HHXORErasureEncoder(ErasureCoderOptions options) {
super(options);
}
@Override
protected ErasureCodingStep prepareEncodingStep(
final ECBlockGroup blockGroup) {
RawErasureEncoder rsRawEncoderTmp = checkCreateRSRawEncoder();
RawErasureEncoder xorRawEncoderTmp = checkCreateXorRawEncoder();
ECBlock[] inputBlocks = getInputBlocks(blockGroup);
return new HHXORErasureEncodingStep(inputBlocks,
getOutputBlocks(blockGroup), rsRawEncoderTmp, xorRawEncoderTmp);
}
private RawErasureEncoder checkCreateRSRawEncoder() {
if (rsRawEncoder == null) {
rsRawEncoder = CodecUtil.createRawEncoder(getConf(),
ErasureCodeConstants.RS_CODEC_NAME, getOptions());
}
return rsRawEncoder;
}
private RawErasureEncoder checkCreateXorRawEncoder() {
if (xorRawEncoder == null) {
xorRawEncoder = CodecUtil.createRawEncoder(getConf(),
ErasureCodeConstants.XOR_CODEC_NAME,
getOptions());
}
return xorRawEncoder;
}
@Override
public void release() {
if (rsRawEncoder != null) {
rsRawEncoder.release();
}
if (xorRawEncoder != null) {
xorRawEncoder.release();
}
}
}
|
HHXORErasureEncoder
|
java
|
apache__spark
|
common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalShuffleBlockResolver.java
|
{
"start": 2865,
"end": 17163
}
|
class ____ {
private static final SparkLogger logger =
SparkLoggerFactory.getLogger(ExternalShuffleBlockResolver.class);
private static final ObjectMapper mapper = new ObjectMapper();
/**
* This a common prefix to the key for each app registration we stick in RocksDB, so they
* are easy to find, since RocksDB lets you search based on prefix.
*/
private static final String APP_KEY_PREFIX = "AppExecShuffleInfo";
private static final StoreVersion CURRENT_VERSION = new StoreVersion(1, 0);
// Map containing all registered executors' metadata.
@VisibleForTesting
final ConcurrentMap<AppExecId, ExecutorShuffleInfo> executors;
/**
* Caches index file information so that we can avoid open/close the index files
* for each block fetch.
*/
private final LoadingCache<String, ShuffleIndexInformation> shuffleIndexCache;
// Single-threaded Java executor used to perform expensive recursive directory deletion.
private final Executor directoryCleaner;
private final TransportConf conf;
private final boolean rddFetchEnabled;
@VisibleForTesting
final File registeredExecutorFile;
@VisibleForTesting
final DB db;
public ExternalShuffleBlockResolver(TransportConf conf, File registeredExecutorFile)
throws IOException {
this(conf, registeredExecutorFile, Executors.newSingleThreadExecutor(
// Add `spark` prefix because it will run in NM in Yarn mode.
NettyUtils.createThreadFactory("spark-shuffle-directory-cleaner")));
}
// Allows tests to have more control over when directories are cleaned up.
@VisibleForTesting
ExternalShuffleBlockResolver(
TransportConf conf,
File registeredExecutorFile,
Executor directoryCleaner) throws IOException {
this.conf = conf;
this.rddFetchEnabled =
Boolean.parseBoolean(conf.get(Constants.SHUFFLE_SERVICE_FETCH_RDD_ENABLED, "false"));
this.registeredExecutorFile = registeredExecutorFile;
String indexCacheSize = conf.get("spark.shuffle.service.index.cache.size", "100m");
CacheLoader<String, ShuffleIndexInformation> indexCacheLoader =
new CacheLoader<String, ShuffleIndexInformation>() {
@Override
public ShuffleIndexInformation load(String filePath) throws IOException {
return new ShuffleIndexInformation(filePath);
}
};
shuffleIndexCache = CacheBuilder.newBuilder()
.maximumWeight(JavaUtils.byteStringAsBytes(indexCacheSize))
.weigher((Weigher<String, ShuffleIndexInformation>)
(filePath, indexInfo) -> indexInfo.getRetainedMemorySize())
.build(indexCacheLoader);
String dbBackendName =
conf.get(Constants.SHUFFLE_SERVICE_DB_BACKEND, DBBackend.ROCKSDB.name());
DBBackend dbBackend = DBBackend.byName(dbBackendName);
db = DBProvider.initDB(dbBackend, this.registeredExecutorFile, CURRENT_VERSION, mapper);
if (db != null) {
logger.info("Use {} as the implementation of {}",
MDC.of(LogKeys.SHUFFLE_DB_BACKEND_NAME, dbBackend),
MDC.of(LogKeys.SHUFFLE_DB_BACKEND_KEY, Constants.SHUFFLE_SERVICE_DB_BACKEND));
executors = reloadRegisteredExecutors(db);
} else {
executors = new ConcurrentHashMap<>();
}
this.directoryCleaner = directoryCleaner;
}
public int getRegisteredExecutorsSize() {
return executors.size();
}
/** Registers a new Executor with all the configuration we need to find its shuffle files. */
public void registerExecutor(
String appId,
String execId,
ExecutorShuffleInfo executorInfo) {
AppExecId fullId = new AppExecId(appId, execId);
logger.info("Registered executor {} with {}",
MDC.of(LogKeys.APP_EXECUTOR_ID, fullId),
MDC.of(LogKeys.EXECUTOR_SHUFFLE_INFO, executorInfo));
try {
if (db != null && AppsWithRecoveryDisabled.isRecoveryEnabledForApp(appId)) {
byte[] key = dbAppExecKey(fullId);
byte[] value = mapper.writeValueAsString(executorInfo).getBytes(StandardCharsets.UTF_8);
db.put(key, value);
}
} catch (Exception e) {
logger.error("Error saving registered executors", e);
}
executors.put(fullId, executorInfo);
}
/**
* Obtains a FileSegmentManagedBuffer from a single block (shuffleId, mapId, reduceId).
*/
public ManagedBuffer getBlockData(
String appId,
String execId,
int shuffleId,
long mapId,
int reduceId) {
return getContinuousBlocksData(appId, execId, shuffleId, mapId, reduceId, reduceId + 1);
}
/**
* Obtains a FileSegmentManagedBuffer from (shuffleId, mapId, [startReduceId, endReduceId)).
* We make assumptions about how the hash and sort based shuffles store their data.
*/
public ManagedBuffer getContinuousBlocksData(
String appId,
String execId,
int shuffleId,
long mapId,
int startReduceId,
int endReduceId) {
ExecutorShuffleInfo executor = executors.get(new AppExecId(appId, execId));
if (executor == null) {
throw new RuntimeException(
String.format("Executor is not registered (appId=%s, execId=%s)", appId, execId));
}
return getSortBasedShuffleBlockData(executor, shuffleId, mapId, startReduceId, endReduceId);
}
public ManagedBuffer getRddBlockData(
String appId,
String execId,
int rddId,
int splitIndex) {
ExecutorShuffleInfo executor = executors.get(new AppExecId(appId, execId));
if (executor == null) {
throw new RuntimeException(
String.format("Executor is not registered (appId=%s, execId=%s)", appId, execId));
}
return getDiskPersistedRddBlockData(executor, rddId, splitIndex);
}
/**
* Removes our metadata of all executors registered for the given application, and optionally
* also deletes the local directories associated with the executors of that application in a
* separate thread.
*
* It is not valid to call registerExecutor() for an executor with this appId after invoking
* this method.
*/
public void applicationRemoved(String appId, boolean cleanupLocalDirs) {
logger.info("Application {} removed, cleanupLocalDirs = {}",
MDC.of(LogKeys.APP_ID, appId),
MDC.of(LogKeys.CLEANUP_LOCAL_DIRS, cleanupLocalDirs));
Iterator<Map.Entry<AppExecId, ExecutorShuffleInfo>> it = executors.entrySet().iterator();
while (it.hasNext()) {
Map.Entry<AppExecId, ExecutorShuffleInfo> entry = it.next();
AppExecId fullId = entry.getKey();
final ExecutorShuffleInfo executor = entry.getValue();
// Only touch executors associated with the appId that was removed.
if (appId.equals(fullId.appId)) {
it.remove();
if (db != null && AppsWithRecoveryDisabled.isRecoveryEnabledForApp(fullId.appId)) {
try {
db.delete(dbAppExecKey(fullId));
} catch (IOException e) {
logger.error("Error deleting {} from executor state db", e,
MDC.of(LogKeys.APP_ID, appId));
}
}
if (cleanupLocalDirs) {
logger.info("Cleaning up executor {}'s {} local dirs",
MDC.of(LogKeys.APP_EXECUTOR_ID, fullId),
MDC.of(LogKeys.NUM_LOCAL_DIRS, executor.localDirs.length));
// Execute the actual deletion in a different thread, as it may take some time.
directoryCleaner.execute(() -> deleteExecutorDirs(executor.localDirs));
}
}
}
}
/**
* Removes all the files which cannot be served by the external shuffle service (non-shuffle and
* non-RDD files) in any local directories associated with the finished executor.
*/
public void executorRemoved(String executorId, String appId) {
logger.info("Clean up non-shuffle and non-RDD files associated with the finished executor {}",
MDC.of(LogKeys.EXECUTOR_ID, executorId));
AppExecId fullId = new AppExecId(appId, executorId);
final ExecutorShuffleInfo executor = executors.get(fullId);
if (executor == null) {
// Executor not registered, skip clean up of the local directories.
logger.info("Executor is not registered (appId={}, execId={})",
MDC.of(LogKeys.APP_ID, appId),
MDC.of(LogKeys.EXECUTOR_ID, executorId));
} else {
logger.info("Cleaning up non-shuffle and non-RDD files in executor {}'s {} local dirs",
MDC.of(LogKeys.APP_EXECUTOR_ID, fullId),
MDC.of(LogKeys.NUM_LOCAL_DIRS, executor.localDirs.length));
// Execute the actual deletion in a different thread, as it may take some time.
directoryCleaner.execute(() -> deleteNonShuffleServiceServedFiles(executor.localDirs));
}
}
/**
* Synchronously deletes each directory one at a time.
* Should be executed in its own thread, as this may take a long time.
*/
private void deleteExecutorDirs(String[] dirs) {
for (String localDir : dirs) {
try {
JavaUtils.deleteRecursively(new File(localDir));
logger.debug("Successfully cleaned up directory: {}", localDir);
} catch (Exception e) {
logger.error("Failed to delete directory: {}", e,
MDC.of(LogKeys.PATH, localDir));
}
}
}
/**
* Synchronously deletes files not served by shuffle service in each directory recursively.
* Should be executed in its own thread, as this may take a long time.
*/
private void deleteNonShuffleServiceServedFiles(String[] dirs) {
FilenameFilter filter = (dir, name) -> {
// Don't delete shuffle data, shuffle index files or cached RDD files.
return !name.endsWith(".index") && !name.endsWith(".data")
&& (!rddFetchEnabled || !name.startsWith("rdd_"));
};
for (String localDir : dirs) {
try {
JavaUtils.deleteRecursively(new File(localDir), filter);
logger.debug("Successfully cleaned up files not served by shuffle service in directory: {}",
localDir);
} catch (Exception e) {
logger.error("Failed to delete files not served by shuffle service in directory: {}", e,
MDC.of(LogKeys.PATH, localDir));
}
}
}
/**
* Sort-based shuffle data uses an index called "shuffle_ShuffleId_MapId_0.index" into a data file
* called "shuffle_ShuffleId_MapId_0.data". This logic is from IndexShuffleBlockResolver,
* and the block id format is from ShuffleDataBlockId and ShuffleIndexBlockId.
*/
private ManagedBuffer getSortBasedShuffleBlockData(
ExecutorShuffleInfo executor, int shuffleId, long mapId, int startReduceId, int endReduceId) {
String indexFilePath =
ExecutorDiskUtils.getFilePath(
executor.localDirs,
executor.subDirsPerLocalDir,
"shuffle_" + shuffleId + "_" + mapId + "_0.index");
try {
ShuffleIndexInformation shuffleIndexInformation = shuffleIndexCache.get(indexFilePath);
ShuffleIndexRecord shuffleIndexRecord = shuffleIndexInformation.getIndex(
startReduceId, endReduceId);
return new FileSegmentManagedBuffer(
conf,
new File(
ExecutorDiskUtils.getFilePath(
executor.localDirs,
executor.subDirsPerLocalDir,
"shuffle_" + shuffleId + "_" + mapId + "_0.data")),
shuffleIndexRecord.offset(),
shuffleIndexRecord.length());
} catch (ExecutionException e) {
throw new RuntimeException("Failed to open file: " + indexFilePath, e);
}
}
public ManagedBuffer getDiskPersistedRddBlockData(
ExecutorShuffleInfo executor, int rddId, int splitIndex) {
File file = new File(
ExecutorDiskUtils.getFilePath(
executor.localDirs, executor.subDirsPerLocalDir, "rdd_" + rddId + "_" + splitIndex));
long fileLength = file.length();
ManagedBuffer res = null;
if (file.exists()) {
res = new FileSegmentManagedBuffer(conf, file, 0, fileLength);
}
return res;
}
void close() {
if (db != null) {
try {
db.close();
} catch (IOException e) {
logger.error("Exception closing RocksDB with registered executors", e);
}
}
}
public int removeBlocks(String appId, String execId, String[] blockIds) {
ExecutorShuffleInfo executor = executors.get(new AppExecId(appId, execId));
if (executor == null) {
throw new RuntimeException(
String.format("Executor is not registered (appId=%s, execId=%s)", appId, execId));
}
int numRemovedBlocks = 0;
for (String blockId : blockIds) {
File file = new File(
ExecutorDiskUtils.getFilePath(executor.localDirs, executor.subDirsPerLocalDir, blockId));
if (file.delete()) {
numRemovedBlocks++;
} else {
logger.warn("Failed to delete block: {}",
MDC.of(LogKeys.PATH, file.getAbsolutePath()));
}
}
return numRemovedBlocks;
}
public Map<String, String[]> getLocalDirs(String appId, Set<String> execIds) {
return execIds.stream()
.map(exec -> {
ExecutorShuffleInfo info = executors.get(new AppExecId(appId, exec));
if (info == null) {
throw new RuntimeException(
String.format("Executor is not registered (appId=%s, execId=%s)", appId, exec));
}
return Pair.of(exec, info.localDirs);
})
.collect(Collectors.toMap(Pair::getLeft, Pair::getRight));
}
/**
* Diagnose the possible cause of the shuffle data corruption by verifying the shuffle checksums
*/
public Cause diagnoseShuffleBlockCorruption(
String appId,
String execId,
int shuffleId,
long mapId,
int reduceId,
long checksumByReader,
String algorithm) {
ExecutorShuffleInfo executor = executors.get(new AppExecId(appId, execId));
// This should be in sync with IndexShuffleBlockResolver.getChecksumFile
String fileName = "shuffle_" + shuffleId + "_" + mapId + "_0.checksum." + algorithm;
File checksumFile = new File(
ExecutorDiskUtils.getFilePath(executor.localDirs, executor.subDirsPerLocalDir, fileName));
ManagedBuffer data = getBlockData(appId, execId, shuffleId, mapId, reduceId);
return ShuffleChecksumHelper.diagnoseCorruption(
algorithm, checksumFile, reduceId, data, checksumByReader);
}
/** Simply encodes an executor's full ID, which is appId + execId. */
public static
|
ExternalShuffleBlockResolver
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/nodes/hive/LogicalDistribution.java
|
{
"start": 1441,
"end": 3027
}
|
class ____ extends SingleRel {
// distribution keys
private final List<Integer> distKeys;
// sort collation
private final RelCollation collation;
private LogicalDistribution(
RelOptCluster cluster,
RelTraitSet traits,
RelNode child,
RelCollation collation,
List<Integer> distKeys) {
super(cluster, traits, child);
this.distKeys = distKeys;
this.collation = collation;
}
public static LogicalDistribution create(
RelNode input, RelCollation collation, List<Integer> distKeys) {
RelOptCluster cluster = input.getCluster();
collation = RelCollationTraitDef.INSTANCE.canonize(collation);
RelTraitSet traitSet = input.getTraitSet().replace(Convention.NONE).replace(collation);
return new LogicalDistribution(cluster, traitSet, input, collation, distKeys);
}
public List<Integer> getDistKeys() {
return distKeys;
}
public RelCollation getCollation() {
return collation;
}
@Override
public LogicalDistribution copy(RelTraitSet traitSet, List<RelNode> inputs) {
return new LogicalDistribution(getCluster(), traitSet, inputs.get(0), collation, distKeys);
}
@Override
public RelNode accept(RelShuttle shuttle) {
return shuttle.visit(this);
}
@Override
public RelWriter explainTerms(RelWriter pw) {
super.explainTerms(pw);
pw.item("collation", collation);
pw.item("dist", distKeys);
return pw;
}
}
|
LogicalDistribution
|
java
|
spring-projects__spring-boot
|
core/spring-boot-test/src/test/java/org/springframework/boot/test/context/SpringBootContextLoaderAotTests.java
|
{
"start": 2313,
"end": 4553
}
|
class ____ {
@Test
void loadContextForAotProcessingAndAotRuntime() {
InMemoryGeneratedFiles generatedFiles = new InMemoryGeneratedFiles();
TestContextAotGenerator generator = new TestContextAotGenerator(generatedFiles);
Class<?> testClass = ExampleTest.class;
generator.processAheadOfTime(Stream.of(testClass));
TestCompiler.forSystem()
.with(CompilerFiles.from(generatedFiles))
.compile(ThrowingConsumer.of((compiled) -> assertCompiledTest(testClass)));
}
private void assertCompiledTest(Class<?> testClass) throws Exception {
try {
System.setProperty(AotDetector.AOT_ENABLED, "true");
resetAotClasses();
AotTestContextInitializers aotContextInitializers = new AotTestContextInitializers();
TestContextBootstrapper testContextBootstrapper = BootstrapUtils.resolveTestContextBootstrapper(testClass);
MergedContextConfiguration mergedConfig = testContextBootstrapper.buildMergedContextConfiguration();
ApplicationContextInitializer<ConfigurableApplicationContext> contextInitializer = aotContextInitializers
.getContextInitializer(testClass);
assertThat(contextInitializer).isNotNull();
ConfigurableApplicationContext context = (ConfigurableApplicationContext) ((AotContextLoader) mergedConfig
.getContextLoader()).loadContextForAotRuntime(mergedConfig, contextInitializer);
assertThat(context).isExactlyInstanceOf(GenericApplicationContext.class);
String[] beanNames = context.getBeanNamesForType(ExampleBean.class);
BeanDefinition beanDefinition = context.getBeanFactory().getBeanDefinition(beanNames[0]);
assertThat(beanDefinition).isNotExactlyInstanceOf(GenericBeanDefinition.class);
}
finally {
System.clearProperty(AotDetector.AOT_ENABLED);
resetAotClasses();
}
}
private void resetAotClasses() {
reset("org.springframework.test.context.aot.AotTestAttributesFactory");
reset("org.springframework.test.context.aot.AotTestContextInitializersFactory");
}
private void reset(String className) {
Class<?> targetClass = ClassUtils.resolveClassName(className, null);
ReflectionTestUtils.invokeMethod(targetClass, "reset");
}
@SpringBootTest(classes = ExampleConfig.class, webEnvironment = WebEnvironment.NONE)
static
|
SpringBootContextLoaderAotTests
|
java
|
quarkusio__quarkus
|
extensions/websockets-next/deployment/src/test/java/io/quarkus/websockets/next/test/errors/WriteErrorClosedConnectionTest.java
|
{
"start": 1490,
"end": 2156
}
|
class ____ {
static final AtomicBoolean ERROR_HANDLER_CALLED = new AtomicBoolean();
@OnBinaryMessage
Uni<Buffer> process(Buffer message, WebSocketConnection connection) {
// This should result in a failure because the connection is closed
// but we still try to write a binary message
return connection.close().replaceWith(message);
}
@OnError
void runtimeProblem(Throwable t, WebSocketConnection connection) {
if (connection.isOpen()) {
throw new IllegalStateException();
}
ERROR_HANDLER_CALLED.set(true);
}
}
}
|
Echo
|
java
|
apache__camel
|
core/camel-console/src/main/java/org/apache/camel/impl/console/MessageHistoryDevConsole.java
|
{
"start": 1398,
"end": 2852
}
|
class ____ extends AbstractDevConsole {
public MessageHistoryDevConsole() {
super("camel", "message-history", "Message History", "History of latest completed exchange");
}
protected String doCallText(Map<String, Object> options) {
StringBuilder sb = new StringBuilder();
BacklogTracer tracer = getCamelContext().getCamelContextExtension().getContextPlugin(BacklogTracer.class);
if (tracer != null) {
Collection<BacklogTracerEventMessage> queue = tracer.getLatestMessageHistory();
for (BacklogTracerEventMessage t : queue) {
String json = t.toJSon(0);
sb.append(json).append("\n");
}
}
return sb.toString();
}
protected JsonObject doCallJson(Map<String, Object> options) {
JsonObject root = new JsonObject();
BacklogTracer tracer = getCamelContext().getCamelContextExtension().getContextPlugin(BacklogTracer.class);
if (tracer != null) {
JsonArray arr = new JsonArray();
Collection<BacklogTracerEventMessage> queue = tracer.getLatestMessageHistory();
for (BacklogTracerEventMessage t : queue) {
JsonObject jo = (JsonObject) t.asJSon();
arr.add(jo);
}
root.put("name", getCamelContext().getName());
root.put("traces", arr);
}
return root;
}
}
|
MessageHistoryDevConsole
|
java
|
spring-projects__spring-boot
|
smoke-test/spring-boot-smoke-test-webservices/src/main/java/smoketest/webservices/service/StubHumanResourceService.java
|
{
"start": 838,
"end": 1183
}
|
class ____ implements HumanResourceService {
private static final Log logger = LogFactory.getLog(StubHumanResourceService.class);
@Override
public void bookHoliday(LocalDate startDate, LocalDate endDate, String name) {
logger.info("Booking holiday for [" + startDate + " - " + endDate + "] for [" + name + "]");
}
}
|
StubHumanResourceService
|
java
|
assertj__assertj-core
|
assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/presentation/StandardRepresentation_toStringOf_Test.java
|
{
"start": 5751,
"end": 5960
}
|
class ____ {
}
// WHEN
String localClassStandardRepresentation = STANDARD_REPRESENTATION.toStringOf(LocalClass.class);
// THEN
then(localClassStandardRepresentation).isEqualTo("local
|
LocalClass
|
java
|
apache__camel
|
components/camel-aws/camel-aws2-ec2/src/main/java/org/apache/camel/component/aws2/ec2/client/AWS2EC2InternalClient.java
|
{
"start": 1002,
"end": 1211
}
|
interface ____ {
/**
* Returns an EC2 client after a factory method determines which one to return.
*
* @return Ec2Client Ec2Client
*/
Ec2Client getEc2Client();
}
|
AWS2EC2InternalClient
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllFirstBytesRefByTimestampAggregatorFunctionSupplier.java
|
{
"start": 666,
"end": 1790
}
|
class ____ implements AggregatorFunctionSupplier {
public AllFirstBytesRefByTimestampAggregatorFunctionSupplier() {
}
@Override
public List<IntermediateStateDesc> nonGroupingIntermediateStateDesc() {
return AllFirstBytesRefByTimestampAggregatorFunction.intermediateStateDesc();
}
@Override
public List<IntermediateStateDesc> groupingIntermediateStateDesc() {
return AllFirstBytesRefByTimestampGroupingAggregatorFunction.intermediateStateDesc();
}
@Override
public AllFirstBytesRefByTimestampAggregatorFunction aggregator(DriverContext driverContext,
List<Integer> channels) {
return AllFirstBytesRefByTimestampAggregatorFunction.create(driverContext, channels);
}
@Override
public AllFirstBytesRefByTimestampGroupingAggregatorFunction groupingAggregator(
DriverContext driverContext, List<Integer> channels) {
return AllFirstBytesRefByTimestampGroupingAggregatorFunction.create(channels, driverContext);
}
@Override
public String describe() {
return AllFirstBytesRefByTimestampAggregator.describe();
}
}
|
AllFirstBytesRefByTimestampAggregatorFunctionSupplier
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.