language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
google__error-prone
|
core/src/main/java/com/google/errorprone/bugpatterns/CloseableDecoratorTypes.java
|
{
"start": 1427,
"end": 2217
}
|
class ____ {
public static final ImmutableSetMultimap<String, String> CLOSEABLE_DECORATOR_TYPES =
ImmutableSetMultimap.<String, String>builder()
.putAll(
InputStream.class.getName(),
FilterInputStream.class.getName(), // e.g., BufferedInputStream
InputStreamReader.class.getName())
.putAll(
OutputStream.class.getName(),
FilterOutputStream.class.getName(), // e.g., BufferedOutputStream
OutputStreamWriter.class.getName())
.put(Reader.class.getName(), Reader.class.getName()) // e.g., BufferedReader
.put(Writer.class.getName(), Writer.class.getName()) // e.g., BufferedWriter
.build();
private CloseableDecoratorTypes() {}
}
|
CloseableDecoratorTypes
|
java
|
apache__camel
|
components/camel-stomp/src/generated/java/org/apache/camel/component/stomp/StompEndpointConfigurer.java
|
{
"start": 732,
"end": 5770
}
|
class ____ extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
StompEndpoint target = (StompEndpoint) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "bridgeerrorhandler":
case "bridgeErrorHandler": target.setBridgeErrorHandler(property(camelContext, boolean.class, value)); return true;
case "brokerurl":
case "brokerURL": target.getConfiguration().setBrokerURL(property(camelContext, java.lang.String.class, value)); return true;
case "customheaders":
case "customHeaders": target.getConfiguration().setCustomHeaders(property(camelContext, java.util.Properties.class, value)); return true;
case "exceptionhandler":
case "exceptionHandler": target.setExceptionHandler(property(camelContext, org.apache.camel.spi.ExceptionHandler.class, value)); return true;
case "exchangepattern":
case "exchangePattern": target.setExchangePattern(property(camelContext, org.apache.camel.ExchangePattern.class, value)); return true;
case "headerfilterstrategy":
case "headerFilterStrategy": target.setHeaderFilterStrategy(property(camelContext, org.apache.camel.spi.HeaderFilterStrategy.class, value)); return true;
case "host": target.getConfiguration().setHost(property(camelContext, java.lang.String.class, value)); return true;
case "lazystartproducer":
case "lazyStartProducer": target.setLazyStartProducer(property(camelContext, boolean.class, value)); return true;
case "login": target.getConfiguration().setLogin(property(camelContext, java.lang.String.class, value)); return true;
case "passcode": target.getConfiguration().setPasscode(property(camelContext, java.lang.String.class, value)); return true;
case "sslcontextparameters":
case "sslContextParameters": target.getConfiguration().setSslContextParameters(property(camelContext, org.apache.camel.support.jsse.SSLContextParameters.class, value)); return true;
case "version": target.getConfiguration().setVersion(property(camelContext, java.lang.String.class, value)); return true;
default: return false;
}
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "bridgeerrorhandler":
case "bridgeErrorHandler": return boolean.class;
case "brokerurl":
case "brokerURL": return java.lang.String.class;
case "customheaders":
case "customHeaders": return java.util.Properties.class;
case "exceptionhandler":
case "exceptionHandler": return org.apache.camel.spi.ExceptionHandler.class;
case "exchangepattern":
case "exchangePattern": return org.apache.camel.ExchangePattern.class;
case "headerfilterstrategy":
case "headerFilterStrategy": return org.apache.camel.spi.HeaderFilterStrategy.class;
case "host": return java.lang.String.class;
case "lazystartproducer":
case "lazyStartProducer": return boolean.class;
case "login": return java.lang.String.class;
case "passcode": return java.lang.String.class;
case "sslcontextparameters":
case "sslContextParameters": return org.apache.camel.support.jsse.SSLContextParameters.class;
case "version": return java.lang.String.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
StompEndpoint target = (StompEndpoint) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "bridgeerrorhandler":
case "bridgeErrorHandler": return target.isBridgeErrorHandler();
case "brokerurl":
case "brokerURL": return target.getConfiguration().getBrokerURL();
case "customheaders":
case "customHeaders": return target.getConfiguration().getCustomHeaders();
case "exceptionhandler":
case "exceptionHandler": return target.getExceptionHandler();
case "exchangepattern":
case "exchangePattern": return target.getExchangePattern();
case "headerfilterstrategy":
case "headerFilterStrategy": return target.getHeaderFilterStrategy();
case "host": return target.getConfiguration().getHost();
case "lazystartproducer":
case "lazyStartProducer": return target.isLazyStartProducer();
case "login": return target.getConfiguration().getLogin();
case "passcode": return target.getConfiguration().getPasscode();
case "sslcontextparameters":
case "sslContextParameters": return target.getConfiguration().getSslContextParameters();
case "version": return target.getConfiguration().getVersion();
default: return null;
}
}
}
|
StompEndpointConfigurer
|
java
|
quarkusio__quarkus
|
extensions/datasource/deployment-spi/src/main/java/io/quarkus/datasource/deployment/spi/DevServicesDatasourceProviderBuildItem.java
|
{
"start": 185,
"end": 726
}
|
class ____ extends MultiBuildItem {
private final String database;
private final DevServicesDatasourceProvider devDBProvider;
public DevServicesDatasourceProviderBuildItem(String database, DevServicesDatasourceProvider devDBProvider) {
this.database = database;
this.devDBProvider = devDBProvider;
}
public String getDatabase() {
return database;
}
public DevServicesDatasourceProvider getDevServicesProvider() {
return devDBProvider;
}
}
|
DevServicesDatasourceProviderBuildItem
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/scripting/groovy/ConcreteMessenger.java
|
{
"start": 773,
"end": 1007
}
|
class ____ implements ConfigurableMessenger {
private String message;
@Override
public String getMessage() {
return message;
}
@Override
public void setMessage(String message) {
this.message = message;
}
}
|
ConcreteMessenger
|
java
|
elastic__elasticsearch
|
x-pack/plugin/sql/qa/jdbc/security/src/test/java/org/elasticsearch/xpack/sql/qa/jdbc/security/JdbcPreparedStatementIT.java
|
{
"start": 467,
"end": 976
}
|
class ____ extends PreparedStatementTestCase {
@Override
protected Settings restClientSettings() {
return JdbcConnectionIT.securitySettings();
}
@Override
protected String getProtocol() {
return JdbcConnectionIT.SSL_ENABLED ? "https" : "http";
}
@Override
protected Properties connectionProperties() {
Properties sp = super.connectionProperties();
sp.putAll(JdbcSecurityUtils.adminProperties());
return sp;
}
}
|
JdbcPreparedStatementIT
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/index/similarity/ScriptedSimilarityTests.java
|
{
"start": 1778,
"end": 10369
}
|
class ____ extends ESTestCase {
public void testSameNormsAsBM25CountOverlaps() {
doTestSameNormsAsBM25(false);
}
public void testSameNormsAsBM25DiscountOverlaps() {
doTestSameNormsAsBM25(true);
}
private void doTestSameNormsAsBM25(boolean discountOverlaps) {
ScriptedSimilarity sim1 = new ScriptedSimilarity("foobar", null, "foobaz", null, discountOverlaps);
BM25Similarity sim2 = new BM25Similarity(discountOverlaps);
for (int iter = 0; iter < 100; ++iter) {
final int length = TestUtil.nextInt(random(), 1, 100);
final int position = random().nextInt(length);
final int numOverlaps = random().nextInt(length);
int maxTermFrequency = TestUtil.nextInt(random(), 1, 10);
int uniqueTermCount = TestUtil.nextInt(random(), 1, 10);
FieldInvertState state = new FieldInvertState(
Version.LATEST.major,
"foo",
IndexOptions.DOCS_AND_FREQS,
position,
length,
numOverlaps,
100,
maxTermFrequency,
uniqueTermCount
);
assertEquals(sim2.computeNorm(state), sim1.computeNorm(state), 0f);
}
}
public void testBasics() throws IOException {
final AtomicBoolean called = new AtomicBoolean();
SimilarityScript.Factory scriptFactory = () -> {
return new SimilarityScript() {
@Override
public double execute(
double weight,
ScriptedSimilarity.Query query,
ScriptedSimilarity.Field field,
ScriptedSimilarity.Term term,
ScriptedSimilarity.Doc doc
) {
StackTraceElement[] stackTraceElements = Thread.currentThread().getStackTrace();
if (Arrays.stream(stackTraceElements).anyMatch(ste -> {
return ste.getClassName().endsWith(".TermScorer") && ste.getMethodName().equals("score");
}) == false) {
// this might happen when computing max scores
return Float.MAX_VALUE;
}
assertEquals(1, weight, 0);
assertNotNull(doc);
assertEquals(2f, doc.getFreq(), 0);
assertEquals(3, doc.getLength(), 0);
assertNotNull(field);
assertEquals(3, field.getDocCount());
assertEquals(5, field.getSumDocFreq());
assertEquals(6, field.getSumTotalTermFreq());
assertNotNull(term);
assertEquals(2, term.getDocFreq());
assertEquals(3, term.getTotalTermFreq());
assertNotNull(query);
assertEquals(3.2f, query.getBoost(), 0);
called.set(true);
return 42f;
}
};
};
ScriptedSimilarity sim = new ScriptedSimilarity("foobar", null, "foobaz", scriptFactory, true);
Directory dir = new ByteBuffersDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig().setSimilarity(sim));
Document doc = new Document();
doc.add(new TextField("f", "foo bar", Store.NO));
doc.add(new StringField("match", "no", Store.NO));
w.addDocument(doc);
doc = new Document();
doc.add(new TextField("f", "foo foo bar", Store.NO));
doc.add(new StringField("match", "yes", Store.NO));
w.addDocument(doc);
doc = new Document();
doc.add(new TextField("f", "bar", Store.NO));
doc.add(new StringField("match", "no", Store.NO));
w.addDocument(doc);
IndexReader r = DirectoryReader.open(w);
w.close();
IndexSearcher searcher = newSearcher(r);
searcher.setSimilarity(sim);
Query query = new BoostQuery(
new BooleanQuery.Builder().add(new TermQuery(new Term("f", "foo")), Occur.SHOULD)
.add(new TermQuery(new Term("match", "yes")), Occur.FILTER)
.build(),
3.2f
);
TopDocs topDocs = searcher.search(query, 1);
assertEquals(1, topDocs.totalHits.value());
assertTrue(called.get());
assertEquals(42, topDocs.scoreDocs[0].score, 0);
r.close();
w.close();
dir.close();
}
public void testInitScript() throws IOException {
final AtomicBoolean initCalled = new AtomicBoolean();
SimilarityWeightScript.Factory weightScriptFactory = () -> {
return new SimilarityWeightScript() {
@Override
public double execute(ScriptedSimilarity.Query query, ScriptedSimilarity.Field field, ScriptedSimilarity.Term term) {
assertEquals(3, field.getDocCount());
assertEquals(5, field.getSumDocFreq());
assertEquals(6, field.getSumTotalTermFreq());
assertNotNull(term);
assertEquals(1, term.getDocFreq());
assertEquals(2, term.getTotalTermFreq());
assertNotNull(query);
assertEquals(3.2f, query.getBoost(), 0);
initCalled.set(true);
return 28;
}
};
};
final AtomicBoolean called = new AtomicBoolean();
SimilarityScript.Factory scriptFactory = () -> {
return new SimilarityScript() {
@Override
public double execute(
double weight,
ScriptedSimilarity.Query query,
ScriptedSimilarity.Field field,
ScriptedSimilarity.Term term,
ScriptedSimilarity.Doc doc
) {
StackTraceElement[] stackTraceElements = Thread.currentThread().getStackTrace();
if (Arrays.stream(stackTraceElements).anyMatch(ste -> {
return ste.getClassName().endsWith(".TermScorer")
&& (ste.getMethodName().equals("score") || ste.getMethodName().equals("nextDocsAndScores"));
}) == false) {
// this might happen when computing max scores
return Float.MAX_VALUE;
}
assertEquals(28, weight, 0d);
assertNotNull(doc);
assertEquals(2f, doc.getFreq(), 0);
assertEquals(3, doc.getLength(), 0);
assertNotNull(field);
assertEquals(3, field.getDocCount());
assertEquals(5, field.getSumDocFreq());
assertEquals(6, field.getSumTotalTermFreq());
assertNotNull(term);
assertEquals(1, term.getDocFreq());
assertEquals(2, term.getTotalTermFreq());
assertNotNull(query);
assertEquals(3.2f, query.getBoost(), 0);
called.set(true);
return 42;
}
};
};
ScriptedSimilarity sim = new ScriptedSimilarity("foobar", weightScriptFactory, "foobaz", scriptFactory, true);
Directory dir = new ByteBuffersDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig().setSimilarity(sim));
Document doc = new Document();
doc.add(new TextField("f", "bar baz", Store.NO));
w.addDocument(doc);
doc = new Document();
doc.add(new TextField("f", "foo foo bar", Store.NO));
doc.add(new StringField("match", "yes", Store.NO));
w.addDocument(doc);
doc = new Document();
doc.add(new TextField("f", "bar", Store.NO));
w.addDocument(doc);
IndexReader r = DirectoryReader.open(w);
w.close();
IndexSearcher searcher = newSearcher(r);
searcher.setSimilarity(sim);
Query query = new BoostQuery(new TermQuery(new Term("f", "foo")), 3.2f);
TopDocs topDocs = searcher.search(query, 1);
assertEquals(1, topDocs.totalHits.value());
assertTrue(initCalled.get());
assertTrue(called.get());
assertEquals(42, topDocs.scoreDocs[0].score, 0);
r.close();
w.close();
dir.close();
}
}
|
ScriptedSimilarityTests
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/processor/ClaimCheckEipPushPopHeadersPatternTest.java
|
{
"start": 1073,
"end": 3024
}
|
class ____ extends ContextTestSupport {
@Test
public void testPushPopHeadersPattern() throws Exception {
Map<String, Object> headers = new HashMap<>();
headers.put("foo", 123);
headers.put("bar", "Moes");
headers.put("car", "Toyota");
getMockEndpoint("mock:a").expectedBodiesReceived("Hello World");
getMockEndpoint("mock:a").expectedHeaderReceived("foo", 123);
getMockEndpoint("mock:a").expectedHeaderReceived("bar", "Moes");
getMockEndpoint("mock:a").message(0).header("car").isEqualTo("Toyota");
getMockEndpoint("mock:b").expectedBodiesReceived("Bye World");
getMockEndpoint("mock:b").expectedHeaderReceived("foo", 456);
getMockEndpoint("mock:b").message(0).header("bar").isNull();
getMockEndpoint("mock:b").message(0).header("car").isEqualTo("Toyota");
getMockEndpoint("mock:c").expectedBodiesReceived("Bye World");
getMockEndpoint("mock:c").expectedHeaderReceived("foo", 123);
// bar header should be back now
getMockEndpoint("mock:c").expectedHeaderReceived("bar", "Moes");
getMockEndpoint("mock:c").message(0).header("car").isEqualTo("Toyota");
template.sendBodyAndHeaders("direct:start", "Hello World", headers);
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start").to("mock:a").claimCheck(ClaimCheckOperation.Push).transform().constant("Bye World")
.setHeader("foo", constant(456)).removeHeader("bar")
.to("mock:b")
// only merge in the message headers
.claimCheck(ClaimCheckOperation.Pop, null, "header:(foo|bar)").to("mock:c");
}
};
}
}
|
ClaimCheckEipPushPopHeadersPatternTest
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/serializer/DupSetterTest.java
|
{
"start": 858,
"end": 1024
}
|
enum ____ {
ENABLE(1);
private Integer code;
Status(Integer code){
this.code = code;
}
}
}
|
Status
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/index/mapper/AbstractPointGeometryFieldMapper.java
|
{
"start": 2131,
"end": 6191
}
|
class ____<T> extends Parser<T> {
protected final String field;
protected final CheckedFunction<XContentParser, T, IOException> objectParser;
private final T nullValue;
private final boolean ignoreZValue;
protected final boolean ignoreMalformed;
private final boolean allowMultipleValues;
protected PointParser(
String field,
CheckedFunction<XContentParser, T, IOException> objectParser,
T nullValue,
boolean ignoreZValue,
boolean ignoreMalformed,
boolean allowMultipleValues
) {
this.field = field;
this.objectParser = objectParser;
this.nullValue = nullValue == null ? null : validate(nullValue);
this.ignoreZValue = ignoreZValue;
this.ignoreMalformed = ignoreMalformed;
this.allowMultipleValues = allowMultipleValues;
}
protected abstract T validate(T in);
protected abstract T createPoint(double x, double y);
@Override
public void parse(XContentParser parser, CheckedConsumer<T, IOException> consumer, MalformedValueHandler malformedHandler)
throws IOException {
if (parser.currentToken() == XContentParser.Token.START_ARRAY) {
XContentParser.Token token = parser.nextToken();
if (token == XContentParser.Token.VALUE_NUMBER) {
double x = parser.doubleValue();
parser.nextToken();
double y = parser.doubleValue();
token = parser.nextToken();
if (token == XContentParser.Token.VALUE_NUMBER) {
if (ignoreZValue == false) {
throw new ElasticsearchParseException(
"Exception parsing coordinates: found Z value [{}] but [ignore_z_value] " + "parameter is [{}]",
parser.doubleValue(),
ignoreZValue
);
}
} else if (token != XContentParser.Token.END_ARRAY) {
throw new ElasticsearchParseException("field type does not accept > 3 dimensions");
}
T point = createPoint(x, y);
consumer.accept(validate(point));
} else {
int count = 0;
while (token != XContentParser.Token.END_ARRAY) {
if (allowMultipleValues == false && ++count > 1) {
throw new ElasticsearchParseException("field type for [{}] does not accept more than single value", field);
}
if (parser.currentToken() == XContentParser.Token.VALUE_NULL) {
if (nullValue != null) {
consumer.accept(nullValue);
}
} else {
parseAndConsumeFromObject(parser, consumer, malformedHandler);
}
token = parser.nextToken();
}
}
} else if (parser.currentToken() == XContentParser.Token.VALUE_NULL) {
if (nullValue != null) {
consumer.accept(nullValue);
}
} else {
parseAndConsumeFromObject(parser, consumer, malformedHandler);
}
}
protected void parseAndConsumeFromObject(
XContentParser parser,
CheckedConsumer<T, IOException> consumer,
MalformedValueHandler malformedHandler
) throws IOException {
try {
T point = objectParser.apply(parser);
consumer.accept(validate(point));
} catch (Exception e) {
malformedHandler.notify(e);
}
}
}
public abstract static
|
PointParser
|
java
|
apache__commons-lang
|
src/main/java/org/apache/commons/lang3/reflect/TypeUtils.java
|
{
"start": 35222,
"end": 36971
}
|
class ____ type parameters are to be determined based on the subtype {@code type}
* @return a {@link Map} of the type assignments for the type variables in each type in the inheritance hierarchy from {@code type} to {@code toClass}
* inclusive.
*/
public static Map<TypeVariable<?>, Type> getTypeArguments(final Type type, final Class<?> toClass) {
return getTypeArguments(type, toClass, null);
}
/**
* Gets a map of the type arguments of {@code type} in the context of {@code toClass}.
*
* @param type the type in question.
* @param toClass the class.
* @param subtypeVarAssigns a map with type variables.
* @return the {@link Map} with type arguments.
*/
private static Map<TypeVariable<?>, Type> getTypeArguments(final Type type, final Class<?> toClass, final Map<TypeVariable<?>, Type> subtypeVarAssigns) {
if (type instanceof Class<?>) {
return getTypeArguments((Class<?>) type, toClass, subtypeVarAssigns);
}
if (type instanceof ParameterizedType) {
return getTypeArguments((ParameterizedType) type, toClass, subtypeVarAssigns);
}
if (type instanceof GenericArrayType) {
return getTypeArguments(((GenericArrayType) type).getGenericComponentType(), toClass.isArray() ? toClass.getComponentType() : toClass,
subtypeVarAssigns);
}
// since wildcard types are not assignable to classes, should this just
// return null?
if (type instanceof WildcardType) {
for (final Type bound : getImplicitUpperBounds((WildcardType) type)) {
// find the first bound that is assignable to the target
|
whose
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/boot/models/annotations/internal/OverriddenChecksAnnotation.java
|
{
"start": 618,
"end": 1728
}
|
class ____
implements DialectOverride.Checks, RepeatableContainer<DialectOverride.Check> {
private DialectOverride.Check[] value;
/**
* Used in creating dynamic annotation instances (e.g. from XML)
*/
public OverriddenChecksAnnotation(ModelsContext modelContext) {
}
/**
* Used in creating annotation instances from JDK variant
*/
public OverriddenChecksAnnotation(DialectOverride.Checks annotation, ModelsContext modelContext) {
value( extractJdkValue( annotation, DialectOverrideAnnotations.DIALECT_OVERRIDE_CHECKS, "value", modelContext ) );
}
/**
* Used in creating annotation instances from Jandex variant
*/
public OverriddenChecksAnnotation(Map<String, Object> attributeValues, ModelsContext modelContext) {
value( (DialectOverride.Check[]) attributeValues.get( "value" ) );
}
@Override
public DialectOverride.Check[] value() {
return value;
}
@Override
public void value(DialectOverride.Check[] value) {
this.value = value;
}
@Override
public Class<? extends Annotation> annotationType() {
return DialectOverride.Checks.class;
}
}
|
OverriddenChecksAnnotation
|
java
|
apache__flink
|
flink-connectors/flink-hadoop-compatibility/src/test/java/org/apache/flink/api/java/hadoop/mapred/HadoopInputFormatTest.java
|
{
"start": 2146,
"end": 8798
}
|
class ____ {
@Test
void testConfigureWithConfigurableInstance() {
ConfigurableDummyInputFormat inputFormat = mock(ConfigurableDummyInputFormat.class);
HadoopInputFormat<String, Long> hadoopInputFormat =
new HadoopInputFormat<>(inputFormat, String.class, Long.class, new JobConf());
verify(inputFormat, times(1)).setConf(any(JobConf.class));
hadoopInputFormat.configure(new org.apache.flink.configuration.Configuration());
verify(inputFormat, times(2)).setConf(any(JobConf.class));
}
@Test
void testConfigureWithJobConfigurableInstance() {
JobConfigurableDummyInputFormat inputFormat = mock(JobConfigurableDummyInputFormat.class);
HadoopInputFormat<String, Long> hadoopInputFormat =
new HadoopInputFormat<>(inputFormat, String.class, Long.class, new JobConf());
verify(inputFormat, times(1)).configure(any(JobConf.class));
hadoopInputFormat.configure(new org.apache.flink.configuration.Configuration());
verify(inputFormat, times(2)).configure(any(JobConf.class));
}
@Test
void testOpenClose() throws Exception {
DummyRecordReader recordReader = mock(DummyRecordReader.class);
DummyInputFormat inputFormat = mock(DummyInputFormat.class);
when(inputFormat.getRecordReader(
any(InputSplit.class), any(JobConf.class), any(Reporter.class)))
.thenReturn(recordReader);
HadoopInputFormat<String, Long> hadoopInputFormat =
new HadoopInputFormat<>(inputFormat, String.class, Long.class, new JobConf());
hadoopInputFormat.open(getHadoopInputSplit());
verify(inputFormat, times(1))
.getRecordReader(any(InputSplit.class), any(JobConf.class), any(Reporter.class));
verify(recordReader, times(1)).createKey();
verify(recordReader, times(1)).createValue();
assertThat(hadoopInputFormat.fetched).isFalse();
hadoopInputFormat.close();
verify(recordReader, times(1)).close();
}
@Test
void testOpenWithConfigurableReader() throws Exception {
ConfigurableDummyRecordReader recordReader = mock(ConfigurableDummyRecordReader.class);
DummyInputFormat inputFormat = mock(DummyInputFormat.class);
when(inputFormat.getRecordReader(
any(InputSplit.class), any(JobConf.class), any(Reporter.class)))
.thenReturn(recordReader);
HadoopInputFormat<String, Long> hadoopInputFormat =
new HadoopInputFormat<>(inputFormat, String.class, Long.class, new JobConf());
hadoopInputFormat.open(getHadoopInputSplit());
verify(inputFormat, times(1))
.getRecordReader(any(InputSplit.class), any(JobConf.class), any(Reporter.class));
verify(recordReader, times(1)).setConf(any(JobConf.class));
verify(recordReader, times(1)).createKey();
verify(recordReader, times(1)).createValue();
assertThat(hadoopInputFormat.fetched).isFalse();
}
@Test
void testCreateInputSplits() throws Exception {
FileSplit[] result = new FileSplit[1];
result[0] = getFileSplit();
DummyInputFormat inputFormat = mock(DummyInputFormat.class);
when(inputFormat.getSplits(any(JobConf.class), anyInt())).thenReturn(result);
HadoopInputFormat<String, Long> hadoopInputFormat =
new HadoopInputFormat<>(inputFormat, String.class, Long.class, new JobConf());
hadoopInputFormat.createInputSplits(2);
verify(inputFormat, times(1)).getSplits(any(JobConf.class), anyInt());
}
@Test
void testReachedEndWithElementsRemaining() throws IOException {
HadoopInputFormat<String, Long> hadoopInputFormat =
new HadoopInputFormat<>(
new DummyInputFormat(), String.class, Long.class, new JobConf());
hadoopInputFormat.fetched = true;
hadoopInputFormat.hasNext = true;
assertThat(hadoopInputFormat.reachedEnd()).isFalse();
}
@Test
void testReachedEndWithNoElementsRemaining() throws IOException {
HadoopInputFormat<String, Long> hadoopInputFormat =
new HadoopInputFormat<>(
new DummyInputFormat(), String.class, Long.class, new JobConf());
hadoopInputFormat.fetched = true;
hadoopInputFormat.hasNext = false;
assertThat(hadoopInputFormat.reachedEnd()).isTrue();
}
@Test
void testFetchNext() throws IOException {
DummyRecordReader recordReader = mock(DummyRecordReader.class);
when(recordReader.next(nullable(String.class), nullable(Long.class))).thenReturn(true);
DummyInputFormat inputFormat = mock(DummyInputFormat.class);
when(inputFormat.getRecordReader(
any(InputSplit.class), any(JobConf.class), any(Reporter.class)))
.thenReturn(recordReader);
HadoopInputFormat<String, Long> hadoopInputFormat =
new HadoopInputFormat<>(inputFormat, String.class, Long.class, new JobConf());
hadoopInputFormat.open(getHadoopInputSplit());
hadoopInputFormat.fetchNext();
verify(recordReader, times(1)).next(nullable(String.class), anyLong());
assertThat(hadoopInputFormat.hasNext).isTrue();
assertThat(hadoopInputFormat.fetched).isTrue();
}
@Test
void checkTypeInformation() throws Exception {
HadoopInputFormat<Void, Long> hadoopInputFormat =
new HadoopInputFormat<>(
new DummyVoidKeyInputFormat<Long>(), Void.class, Long.class, new JobConf());
TypeInformation<Tuple2<Void, Long>> tupleType = hadoopInputFormat.getProducedType();
TypeInformation<Tuple2<Void, Long>> expectedType =
new TupleTypeInfo<>(BasicTypeInfo.VOID_TYPE_INFO, BasicTypeInfo.LONG_TYPE_INFO);
assertThat(tupleType.isTupleType()).isTrue();
assertThat(tupleType).isEqualTo(expectedType);
}
@Test
public void testCloseWithoutOpen() throws Exception {
HadoopInputFormat<Void, Long> hadoopInputFormat =
new HadoopInputFormat<>(
new DummyVoidKeyInputFormat<Long>(), Void.class, Long.class, new JobConf());
hadoopInputFormat.close();
}
private HadoopInputSplit getHadoopInputSplit() {
return new HadoopInputSplit(1, getFileSplit(), new JobConf());
}
private FileSplit getFileSplit() {
return new FileSplit(new Path("path"), 1, 2, new String[] {});
}
private
|
HadoopInputFormatTest
|
java
|
netty__netty
|
common/src/main/java/io/netty/util/CharsetUtil.java
|
{
"start": 1015,
"end": 1150
}
|
class ____ provides various common operations and constants
* related with {@link Charset} and its relevant classes.
*/
public final
|
that
|
java
|
quarkusio__quarkus
|
extensions/devui/runtime/src/main/java/io/quarkus/devui/runtime/DevUIRecorder.java
|
{
"start": 1455,
"end": 7027
}
|
class ____ {
private static final Logger LOG = Logger.getLogger(DevUIRecorder.class);
public static final String DEV_MANAGER_GLOBALS_JSON_MAPPER_FACTORY = "dev-ui-databind-codec-builder";
public void shutdownTask(ShutdownContext shutdownContext, String devUIBasePath) {
shutdownContext.addShutdownTask(new DeleteDirectoryRunnable(devUIBasePath));
}
public void createJsonRpcRouter(BeanContainer beanContainer,
Map<String, JsonRpcMethod> runtimeMethods,
Map<String, JsonRpcMethod> runtimeSubscriptions,
Map<String, JsonRpcMethod> deploymentMethods,
Map<String, JsonRpcMethod> deploymentSubscriptions,
Map<String, JsonRpcMethod> recordedMethods,
Map<String, JsonRpcMethod> recordedSubscriptions) {
JsonRpcRouter jsonRpcRouter = beanContainer.beanInstance(JsonRpcRouter.class);
jsonRpcRouter.populateJsonRpcEndpoints(runtimeMethods, runtimeSubscriptions, deploymentMethods, deploymentSubscriptions,
recordedMethods, recordedSubscriptions);
jsonRpcRouter.initializeCodec(createJsonMapper());
}
private JsonMapper createJsonMapper() {
// We use a codec defined in the deployment module
// because that module always has access to Jackson-Databind regardless of the application dependencies.
JsonMapper.Factory factory = JsonMapper.Factory.deploymentLinker().createLink(
DevConsoleManager.getGlobal(DEV_MANAGER_GLOBALS_JSON_MAPPER_FACTORY));
// We need to pass some information so that the mapper, who lives in the deployment classloader,
// knows how to deal with JsonObject/JsonArray/JsonBuffer, who live in the runtime classloader.
return factory.create(new JsonTypeAdapter<>(JsonObject.class, JsonObject::getMap, JsonObject::new),
new JsonTypeAdapter<>(JsonArray.class, JsonArray::getList, JsonArray::new),
new JsonTypeAdapter<>(Buffer.class, buffer -> BASE64_ENCODER.encodeToString(buffer.getBytes()), text -> {
try {
return Buffer.buffer(BASE64_DECODER.decode(text));
} catch (IllegalArgumentException e) {
throw new IllegalArgumentException("Expected a base64 encoded byte array, got: " + text, e);
}
}));
}
public Handler<RoutingContext> devUIWebSocketHandler() {
return new DevUIWebSocketHandler();
}
public Handler<RoutingContext> mcpStreamableHTTPHandler(String quarkusVersion) {
return new McpHttpHandler(quarkusVersion, createJsonMapper());
}
public Handler<RoutingContext> uiHandler(String finalDestination,
String path,
List<FileSystemStaticHandler.StaticWebRootConfiguration> webRootConfigurations,
ShutdownContext shutdownContext) {
WebJarStaticHandler handler = new WebJarStaticHandler(finalDestination, path, webRootConfigurations);
shutdownContext.addShutdownTask(new ShutdownContext.CloseRunnable(handler));
return handler;
}
public Handler<RoutingContext> buildTimeStaticHandler(BeanContainer beanContainer,
String basePath,
Map<String, String> urlAndPath,
Map<String, String> descriptions,
Map<String, String> mcpDefaultEnabled,
Map<String, String> contentTypes) {
DevUIBuildTimeStaticService buildTimeStaticService = beanContainer.beanInstance(DevUIBuildTimeStaticService.class);
buildTimeStaticService.addData(basePath, urlAndPath, descriptions, mcpDefaultEnabled, contentTypes);
return new DevUIBuildTimeStaticHandler();
}
public Handler<RoutingContext> endpointInfoHandler(String basePath) {
return new EndpointInfoHandler(basePath);
}
public Handler<RoutingContext> vaadinRouterHandler(String basePath) {
return new VaadinRouterHandler(basePath);
}
public Handler<RoutingContext> mvnpmHandler(String root, Set<URL> mvnpmJarFiles) {
return new MvnpmHandler(root, mvnpmJarFiles);
}
public Handler<RoutingContext> redirect(String contextRoot) {
return redirect(contextRoot, null);
}
public Handler<RoutingContext> redirect(String contextRoot, String page) {
return new Handler<RoutingContext>() {
@Override
public void handle(RoutingContext rc) {
// Initially we were using 308 (MOVED PERMANENTLY) because we also want to redirect other HTTP Methods
// (and not only GET).
// However, it caused issues with browser caches and prevented users to have applications using Quarkus 2
// and Quarkus 3 at the same time. So, we decided to switch to FOUND (302)
// See https://github.com/quarkusio/quarkus/issues/33658 for more context.
String location = contextRoot + "dev-ui";
if (page != null) {
location = location + "/" + page;
}
rc.response()
.putHeader("Location", location)
.setStatusCode(HttpResponseStatus.FOUND.code()).end();
}
};
}
public Handler<RoutingContext> createLocalHostOnlyFilter(List<String> hosts) {
return new LocalHostOnlyFilter(hosts);
}
public Handler<RoutingContext> createDevUICorsFilter(List<String> hosts) {
return new DevUICORSFilter(hosts);
}
private static final
|
DevUIRecorder
|
java
|
mockito__mockito
|
mockito-core/src/test/java/org/mockitousage/annotation/DoNotMockTest.java
|
{
"start": 7203,
"end": 7295
}
|
class ____ implements NotMockableInterface {}
private static
|
SubclassOfNotMockableInterface
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/SjmsEndpointBuilderFactory.java
|
{
"start": 1609,
"end": 17487
}
|
interface ____
extends
EndpointConsumerBuilder {
default AdvancedSjmsEndpointConsumerBuilder advanced() {
return (AdvancedSjmsEndpointConsumerBuilder) this;
}
/**
* The JMS acknowledgement name, which is one of: SESSION_TRANSACTED,
* CLIENT_ACKNOWLEDGE, AUTO_ACKNOWLEDGE, DUPS_OK_ACKNOWLEDGE.
*
* The option is a:
* <code>org.apache.camel.component.sjms.jms.SessionAcknowledgementType</code> type.
*
* Default: AUTO_ACKNOWLEDGE
* Group: common
*
* @param acknowledgementMode the value to set
* @return the dsl builder
*/
default SjmsEndpointConsumerBuilder acknowledgementMode(org.apache.camel.component.sjms.jms.SessionAcknowledgementType acknowledgementMode) {
doSetProperty("acknowledgementMode", acknowledgementMode);
return this;
}
/**
* The JMS acknowledgement name, which is one of: SESSION_TRANSACTED,
* CLIENT_ACKNOWLEDGE, AUTO_ACKNOWLEDGE, DUPS_OK_ACKNOWLEDGE.
*
* The option will be converted to a
* <code>org.apache.camel.component.sjms.jms.SessionAcknowledgementType</code> type.
*
* Default: AUTO_ACKNOWLEDGE
* Group: common
*
* @param acknowledgementMode the value to set
* @return the dsl builder
*/
default SjmsEndpointConsumerBuilder acknowledgementMode(String acknowledgementMode) {
doSetProperty("acknowledgementMode", acknowledgementMode);
return this;
}
/**
* The connection factory to be use. A connection factory must be
* configured either on the component or endpoint.
*
* The option is a: <code>jakarta.jms.ConnectionFactory</code> type.
*
* Group: common
*
* @param connectionFactory the value to set
* @return the dsl builder
*/
default SjmsEndpointConsumerBuilder connectionFactory(jakarta.jms.ConnectionFactory connectionFactory) {
doSetProperty("connectionFactory", connectionFactory);
return this;
}
/**
* The connection factory to be use. A connection factory must be
* configured either on the component or endpoint.
*
* The option will be converted to a
* <code>jakarta.jms.ConnectionFactory</code> type.
*
* Group: common
*
* @param connectionFactory the value to set
* @return the dsl builder
*/
default SjmsEndpointConsumerBuilder connectionFactory(String connectionFactory) {
doSetProperty("connectionFactory", connectionFactory);
return this;
}
/**
* Specifies whether Camel ignores the JMSReplyTo header in messages. If
* true, Camel does not send a reply back to the destination specified
* in the JMSReplyTo header. You can use this option if you want Camel
* to consume from a route and you do not want Camel to automatically
* send back a reply message because another component in your code
* handles the reply message. You can also use this option if you want
* to use Camel as a proxy between different message brokers and you
* want to route message from one system to another.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param disableReplyTo the value to set
* @return the dsl builder
*/
default SjmsEndpointConsumerBuilder disableReplyTo(boolean disableReplyTo) {
doSetProperty("disableReplyTo", disableReplyTo);
return this;
}
/**
* Specifies whether Camel ignores the JMSReplyTo header in messages. If
* true, Camel does not send a reply back to the destination specified
* in the JMSReplyTo header. You can use this option if you want Camel
* to consume from a route and you do not want Camel to automatically
* send back a reply message because another component in your code
* handles the reply message. You can also use this option if you want
* to use Camel as a proxy between different message brokers and you
* want to route message from one system to another.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param disableReplyTo the value to set
* @return the dsl builder
*/
default SjmsEndpointConsumerBuilder disableReplyTo(String disableReplyTo) {
doSetProperty("disableReplyTo", disableReplyTo);
return this;
}
/**
* Provides an explicit ReplyTo destination (overrides any incoming
* value of Message.getJMSReplyTo() in consumer).
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param replyTo the value to set
* @return the dsl builder
*/
default SjmsEndpointConsumerBuilder replyTo(String replyTo) {
doSetProperty("replyTo", replyTo);
return this;
}
/**
* Specifies whether to test the connection on startup. This ensures
* that when Camel starts that all the JMS consumers have a valid
* connection to the JMS broker. If a connection cannot be granted then
* Camel throws an exception on startup. This ensures that Camel is not
* started with failed connections. The JMS producers is tested as well.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param testConnectionOnStartup the value to set
* @return the dsl builder
*/
default SjmsEndpointConsumerBuilder testConnectionOnStartup(boolean testConnectionOnStartup) {
doSetProperty("testConnectionOnStartup", testConnectionOnStartup);
return this;
}
/**
* Specifies whether to test the connection on startup. This ensures
* that when Camel starts that all the JMS consumers have a valid
* connection to the JMS broker. If a connection cannot be granted then
* Camel throws an exception on startup. This ensures that Camel is not
* started with failed connections. The JMS producers is tested as well.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param testConnectionOnStartup the value to set
* @return the dsl builder
*/
default SjmsEndpointConsumerBuilder testConnectionOnStartup(String testConnectionOnStartup) {
doSetProperty("testConnectionOnStartup", testConnectionOnStartup);
return this;
}
/**
* Whether the JmsConsumer processes the Exchange asynchronously. If
* enabled then the JmsConsumer may pickup the next message from the JMS
* queue, while the previous message is being processed asynchronously
* (by the Asynchronous Routing Engine). This means that messages may be
* processed not 100% strictly in order. If disabled (as default) then
* the Exchange is fully processed before the JmsConsumer will pickup
* the next message from the JMS queue. Note if transacted has been
* enabled, then asyncConsumer=true does not run asynchronously, as
* transaction must be executed synchronously (Camel 3.0 may support
* async transactions).
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param asyncConsumer the value to set
* @return the dsl builder
*/
default SjmsEndpointConsumerBuilder asyncConsumer(boolean asyncConsumer) {
doSetProperty("asyncConsumer", asyncConsumer);
return this;
}
/**
* Whether the JmsConsumer processes the Exchange asynchronously. If
* enabled then the JmsConsumer may pickup the next message from the JMS
* queue, while the previous message is being processed asynchronously
* (by the Asynchronous Routing Engine). This means that messages may be
* processed not 100% strictly in order. If disabled (as default) then
* the Exchange is fully processed before the JmsConsumer will pickup
* the next message from the JMS queue. Note if transacted has been
* enabled, then asyncConsumer=true does not run asynchronously, as
* transaction must be executed synchronously (Camel 3.0 may support
* async transactions).
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param asyncConsumer the value to set
* @return the dsl builder
*/
default SjmsEndpointConsumerBuilder asyncConsumer(String asyncConsumer) {
doSetProperty("asyncConsumer", asyncConsumer);
return this;
}
/**
* Specifies whether the consumer container should auto-startup.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: consumer
*
* @param autoStartup the value to set
* @return the dsl builder
*/
default SjmsEndpointConsumerBuilder autoStartup(boolean autoStartup) {
doSetProperty("autoStartup", autoStartup);
return this;
}
/**
* Specifies whether the consumer container should auto-startup.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: true
* Group: consumer
*
* @param autoStartup the value to set
* @return the dsl builder
*/
default SjmsEndpointConsumerBuilder autoStartup(String autoStartup) {
doSetProperty("autoStartup", autoStartup);
return this;
}
/**
* Sets the JMS client ID to use. Note that this value, if specified,
* must be unique and can only be used by a single JMS connection
* instance. It is typically only required for durable topic
* subscriptions. If using Apache ActiveMQ you may prefer to use Virtual
* Topics instead.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: consumer
*
* @param clientId the value to set
* @return the dsl builder
*/
default SjmsEndpointConsumerBuilder clientId(String clientId) {
doSetProperty("clientId", clientId);
return this;
}
/**
* Specifies the default number of concurrent consumers when consuming
* from JMS (not for request/reply over JMS). See also the
* maxMessagesPerTask option to control dynamic scaling up/down of
* threads. When doing request/reply over JMS then the option
* replyToConcurrentConsumers is used to control number of concurrent
* consumers on the reply message listener.
*
* The option is a: <code>int</code> type.
*
* Default: 1
* Group: consumer
*
* @param concurrentConsumers the value to set
* @return the dsl builder
*/
default SjmsEndpointConsumerBuilder concurrentConsumers(int concurrentConsumers) {
doSetProperty("concurrentConsumers", concurrentConsumers);
return this;
}
/**
* Specifies the default number of concurrent consumers when consuming
* from JMS (not for request/reply over JMS). See also the
* maxMessagesPerTask option to control dynamic scaling up/down of
* threads. When doing request/reply over JMS then the option
* replyToConcurrentConsumers is used to control number of concurrent
* consumers on the reply message listener.
*
* The option will be converted to a <code>int</code> type.
*
* Default: 1
* Group: consumer
*
* @param concurrentConsumers the value to set
* @return the dsl builder
*/
default SjmsEndpointConsumerBuilder concurrentConsumers(String concurrentConsumers) {
doSetProperty("concurrentConsumers", concurrentConsumers);
return this;
}
/**
* The durable subscriber name for specifying durable topic
* subscriptions. The clientId option must be configured as well.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: consumer
*
* @param durableSubscriptionName the value to set
* @return the dsl builder
*/
default SjmsEndpointConsumerBuilder durableSubscriptionName(String durableSubscriptionName) {
doSetProperty("durableSubscriptionName", durableSubscriptionName);
return this;
}
/**
* Specifies whether to use persistent delivery by default for replies.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: consumer
*
* @param replyToDeliveryPersistent the value to set
* @return the dsl builder
*/
default SjmsEndpointConsumerBuilder replyToDeliveryPersistent(boolean replyToDeliveryPersistent) {
doSetProperty("replyToDeliveryPersistent", replyToDeliveryPersistent);
return this;
}
/**
* Specifies whether to use persistent delivery by default for replies.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: true
* Group: consumer
*
* @param replyToDeliveryPersistent the value to set
* @return the dsl builder
*/
default SjmsEndpointConsumerBuilder replyToDeliveryPersistent(String replyToDeliveryPersistent) {
doSetProperty("replyToDeliveryPersistent", replyToDeliveryPersistent);
return this;
}
/**
* Specifies whether to use transacted mode.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: transaction
*
* @param transacted the value to set
* @return the dsl builder
*/
default SjmsEndpointConsumerBuilder transacted(boolean transacted) {
doSetProperty("transacted", transacted);
return this;
}
/**
* Specifies whether to use transacted mode.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: transaction
*
* @param transacted the value to set
* @return the dsl builder
*/
default SjmsEndpointConsumerBuilder transacted(String transacted) {
doSetProperty("transacted", transacted);
return this;
}
}
/**
* Advanced builder for endpoint consumers for the Simple JMS component.
*/
public
|
SjmsEndpointConsumerBuilder
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/issue_3200/Issue3266.java
|
{
"start": 587,
"end": 1164
}
|
enum ____ {
Red(1001),
White(1002),
Black(1003),
Blue(1004);
private final int code;
private Color(int code) {
this.code = code;
}
@JSONField
public int getCode() {
return code;
}
@JSONCreator
public static Color from(int code) {
for (Color v : values()) {
if (v.code == code) {
return v;
}
}
throw new IllegalArgumentException("code " + code);
}
}
}
|
Color
|
java
|
apache__logging-log4j2
|
log4j-api-test/src/test/java/org/apache/logging/log4j/util/StringsTest.java
|
{
"start": 1234,
"end": 3645
}
|
class ____ {
@Test
void testConcat() {
assertEquals("ab", Strings.concat("a", "b"));
assertEquals("a", Strings.concat("a", ""));
assertEquals("a", Strings.concat("a", null));
assertEquals("b", Strings.concat("", "b"));
assertEquals("b", Strings.concat(null, "b"));
}
/**
* A sanity test to make sure a typo does not mess up {@link Strings#EMPTY}.
*/
@Test
void testEMPTY() {
assertEquals("", Strings.EMPTY);
assertEquals(0, Strings.EMPTY.length());
}
@Test
void testIsBlank() {
assertTrue(Strings.isBlank(null));
assertTrue(Strings.isBlank(""));
assertTrue(Strings.isBlank(" "));
assertTrue(Strings.isBlank("\n"));
assertTrue(Strings.isBlank("\r"));
assertTrue(Strings.isBlank("\t"));
assertFalse(Strings.isEmpty("a"));
}
@Test
void testIsEmpty() {
assertTrue(Strings.isEmpty(null));
assertTrue(Strings.isEmpty(""));
assertFalse(Strings.isEmpty(" "));
assertFalse(Strings.isEmpty("a"));
}
@Test
void testJoin() {
assertNull(Strings.join((Iterable<?>) null, '.'));
assertNull(Strings.join((Iterator<?>) null, '.'));
assertEquals("", Strings.join((Collections.emptyList()), '.'));
assertEquals("a", Strings.join(Collections.singletonList("a"), '.'));
assertEquals("a.b", Strings.join(Arrays.asList("a", "b"), '.'));
assertEquals("a.b.c", Strings.join(Arrays.asList("a", "b", "c"), '.'));
assertEquals("", Strings.join(Collections.singletonList((String) null), ':'));
assertEquals(":", Strings.join(Arrays.asList(null, null), ':'));
assertEquals("a:", Strings.join(Arrays.asList("a", null), ':'));
assertEquals(":b", Strings.join(Arrays.asList(null, "b"), ':'));
}
@Test
void splitList() {
String[] list = Strings.splitList("1, 2, 3");
assertEquals(3, list.length);
list = Strings.splitList("");
assertEquals(1, list.length);
list = Strings.splitList(null);
assertEquals(0, list.length);
}
@Test
void testQuote() {
assertEquals("'Q'", Strings.quote("Q"));
}
@Test
void testToLowerCase() {
assertEquals("a", Strings.toRootLowerCase("A"));
assertEquals("a", Strings.toRootLowerCase("a"));
}
}
|
StringsTest
|
java
|
apache__camel
|
components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsRouteUsingJMSXGroupTest.java
|
{
"start": 1783,
"end": 4566
}
|
class ____ extends AbstractJMSTest {
public static final int POOL_SIZE = 1;
@Order(2)
@RegisterExtension
public static CamelContextExtension camelContextExtension = new DefaultCamelContextExtension();
private static final Logger LOG = LoggerFactory.getLogger(JmsRouteUsingJMSXGroupTest.class);
protected CamelContext context;
protected ProducerTemplate template;
protected ConsumerTemplate consumer;
private ExecutorService executor;
@Test
public void testNoConcurrentProducersJMSXGroupID() throws Exception {
doSendMessages(1);
}
@Test
public void testConcurrentProducersJMSXGroupID() throws Exception {
doSendMessages(10);
}
private void doSendMessages(int files) throws Exception {
getMockEndpoint("mock:result").expectedMessageCount(files * 2);
getMockEndpoint("mock:result").expectsNoDuplicates(body());
for (int i = 0; i < files; i++) {
final int index = i;
executor.submit(() -> {
template.sendBodyAndHeader("direct:start", "IBM: " + index, JMS_X_GROUP_ID, "IBM");
template.sendBodyAndHeader("direct:start", "SUN: " + index, JMS_X_GROUP_ID, "SUN");
return null;
});
}
MockEndpoint.assertIsSatisfied(context, 20, TimeUnit.SECONDS);
}
@BeforeEach
void setupExecutor() {
executor = Executors.newFixedThreadPool(POOL_SIZE);
}
@AfterEach
void cleanupExecutor() {
executor.shutdown();
try {
final boolean finished = executor.awaitTermination(1, TimeUnit.SECONDS);
if (!finished) {
LOG.debug("Executor tasks did not terminate within the timeout (shutdown will be forced)");
}
} catch (InterruptedException e) {
executor.shutdownNow();
}
}
@Override
protected String getComponentName() {
return "jms";
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start").to("jms:queue:JmsRouteUsingJMSXGroupTest");
from("jms:queue:JmsRouteUsingJMSXGroupTest?concurrentConsumers=2").to("log:foo?showHeaders=false")
.to("mock:result");
}
};
}
@Override
public CamelContextExtension getCamelContextExtension() {
return camelContextExtension;
}
@BeforeEach
void setUpRequirements() {
context = camelContextExtension.getContext();
template = camelContextExtension.getProducerTemplate();
consumer = camelContextExtension.getConsumerTemplate();
}
}
|
JmsRouteUsingJMSXGroupTest
|
java
|
google__auto
|
value/src/test/java/com/google/auto/value/processor/AutoBuilderCompilationTest.java
|
{
"start": 10887,
"end": 11193
}
|
enum ____");
}
@Test
public void autoBuilderPrivate() {
JavaFileObject javaFileObject =
JavaFileObjects.forSourceLines(
"foo.bar.Baz",
"package foo.bar;",
"",
"import com.google.auto.value.AutoBuilder;",
"",
"public
|
Baz
|
java
|
assertj__assertj-core
|
assertj-core/src/main/java/org/assertj/core/error/future/ShouldNotBeCompleted.java
|
{
"start": 820,
"end": 1255
}
|
class ____ extends BasicErrorMessageFactory {
private static final String SHOULD_NOT_BE_COMPLETED = "%nExpecting%n <%s>%nnot to be completed.%n" + Warning.WARNING;
public static ErrorMessageFactory shouldNotBeCompleted(CompletableFuture<?> actual) {
return new ShouldNotBeCompleted(actual);
}
private ShouldNotBeCompleted(CompletableFuture<?> actual) {
super(SHOULD_NOT_BE_COMPLETED, actual);
}
}
|
ShouldNotBeCompleted
|
java
|
lettuce-io__lettuce-core
|
src/test/java/io/lettuce/core/search/RedisJsonIndexingResp2IntegrationTests.java
|
{
"start": 430,
"end": 981
}
|
class ____ {@link RedisJsonIndexingIntegrationTests} and runs all the same tests but using the RESP2 protocol
* instead of the default RESP3 protocol.
* <p>
* The tests verify that Redis JSON indexing functionality works correctly with both RESP2 and RESP3 protocols, ensuring
* backward compatibility and protocol-agnostic behavior.
* <p>
* Based on the Redis documentation tutorial:
* <a href="https://redis.io/docs/latest/develop/interact/search-and-query/indexing/">...</a>
*
* @author Tihomir Mateev
*/
@Tag(INTEGRATION_TEST)
public
|
extends
|
java
|
apache__camel
|
core/camel-core-model/src/main/java/org/apache/camel/model/ExecutorServiceAwareDefinition.java
|
{
"start": 994,
"end": 1889
}
|
interface ____<Type extends ProcessorDefinition<?>> {
/**
* Setting the executor service for executing
*
* @param executorService the executor service
* @return the builder
*/
Type executorService(ExecutorService executorService);
/**
* Setting the executor service for executing
*
* @param executorService reference for a {@link java.util.concurrent.ExecutorService} to lookup in the
* {@link org.apache.camel.spi.Registry}
* @return the builder
*/
Type executorService(String executorService);
/**
* Gets the executor service for executing
*/
ExecutorService getExecutorServiceBean();
/**
* Gets a reference id to lookup the executor service from the registry
*/
String getExecutorServiceRef();
}
|
ExecutorServiceAwareDefinition
|
java
|
mockito__mockito
|
mockito-core/src/testFixtures/java/org/mockitoutil/ClassLoaders.java
|
{
"start": 5976,
"end": 7289
}
|
class ____ fields
for (Field field : task.getClass().getDeclaredFields()) {
Field declaredField = taskClassReloaded.getDeclaredField(field.getName());
int modifiers = declaredField.getModifiers();
if (Modifier.isStatic(modifiers) && Modifier.isFinal(modifiers)) {
// Skip static final fields (e.g. jacoco fields)
// otherwise IllegalAccessException (can be bypassed with Unsafe though)
// We may also miss coverage data.
continue;
}
if (declaredField.getType() == field.getType()) { // don't copy this
MemberAccessor accessor = Plugins.getMemberAccessor();
accessor.set(declaredField, reloaded, accessor.get(field, task));
}
}
return reloaded;
} catch (ClassNotFoundException e) {
throw new IllegalStateException(e);
} catch (IllegalAccessException e) {
throw new IllegalStateException(e);
} catch (NoSuchFieldException e) {
throw new IllegalStateException(e);
}
}
}
public static
|
compatible
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/TypeParameterShadowingTest.java
|
{
"start": 7877,
"end": 8425
}
|
class ____ {
@SuppressWarnings("TypeParameterShadowing")
<T> void doSomething() {}
void doSomethingElse(T3 t) {
this.<T3>doSomething();
}
}
MethodInner myInner = null;
}
}
""")
.doTest();
}
@Test
public void refactorCheckForExisting() {
refactoring
.addInputLines(
"in/Test.java",
"""
package foo.bar;
|
MethodInner
|
java
|
elastic__elasticsearch
|
x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/support/SessionFactory.java
|
{
"start": 11301,
"end": 13192
}
|
class ____ {
private final String[] addresses;
private final int[] ports;
private final boolean ssl;
public LDAPServers(String[] urls) {
ssl = secureUrls(urls);
addresses = new String[urls.length];
ports = new int[urls.length];
for (int i = 0; i < urls.length; i++) {
try {
LDAPURL url = new LDAPURL(urls[i]);
addresses[i] = url.getHost();
ports[i] = url.getPort();
} catch (LDAPException e) {
throw new IllegalArgumentException("unable to parse configured LDAP url [" + urls[i] + "]", e);
}
}
}
public String[] addresses() {
return addresses;
}
public int[] ports() {
return ports;
}
public boolean ssl() {
return ssl;
}
/**
* @param ldapUrls URLS in the form of "ldap://..." or "ldaps://..."
*/
private static boolean secureUrls(String[] ldapUrls) {
if (ldapUrls.length == 0) {
return true;
}
final boolean allSecure = Arrays.stream(ldapUrls).allMatch(s -> STARTS_WITH_LDAPS.matcher(s).find());
final boolean allClear = Arrays.stream(ldapUrls).allMatch(s -> STARTS_WITH_LDAP.matcher(s).find());
if (allSecure == false && allClear == false) {
// No mixing is allowed because we use the same socketfactory
throw new IllegalArgumentException(
"configured LDAP protocols are not all equal (ldaps://.. and ldap://..): ["
+ Strings.arrayToCommaDelimitedString(ldapUrls)
+ "]"
);
}
return allSecure;
}
}
}
|
LDAPServers
|
java
|
apache__flink
|
flink-kubernetes/src/main/java/org/apache/flink/kubernetes/kubeclient/resources/KubernetesTooOldResourceVersionException.java
|
{
"start": 973,
"end": 1326
}
|
class ____ extends FlinkException {
private static final long serialVersionUID = 1L;
public KubernetesTooOldResourceVersionException(Throwable cause) {
super(cause);
}
public KubernetesTooOldResourceVersionException(String message, Throwable cause) {
super(message, cause);
}
}
|
KubernetesTooOldResourceVersionException
|
java
|
spring-projects__spring-framework
|
spring-core/src/test/java/org/springframework/core/convert/support/GenericConversionServiceTests.java
|
{
"start": 26824,
"end": 26899
}
|
class ____ implements MyInterface {
}
private static
|
MyInterfaceImplementer
|
java
|
apache__camel
|
components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/processor/SpringValidateSimpleTest.java
|
{
"start": 1039,
"end": 1296
}
|
class ____ extends ValidateSimpleTest {
@Override
protected CamelContext createCamelContext() throws Exception {
return createSpringCamelContext(this, "org/apache/camel/spring/processor/ValidateSimpleTest.xml");
}
}
|
SpringValidateSimpleTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/sagemaker/SageMakerClientTests.java
|
{
"start": 2238,
"end": 7869
}
|
class ____ extends ESTestCase {
private static final InvokeEndpointRequest REQUEST = InvokeEndpointRequest.builder().build();
private static final InvokeEndpointWithResponseStreamRequest STREAM_REQUEST = InvokeEndpointWithResponseStreamRequest.builder().build();
private SageMakerRuntimeAsyncClient awsClient;
private CacheLoader<SageMakerClient.RegionAndSecrets, SageMakerRuntimeAsyncClient> clientFactory;
private SageMakerClient client;
private ThreadPool threadPool;
@Before
public void setUp() throws Exception {
super.setUp();
threadPool = createThreadPool(inferenceUtilityExecutors());
awsClient = mock();
clientFactory = spy(new CacheLoader<>() {
public SageMakerRuntimeAsyncClient load(SageMakerClient.RegionAndSecrets key) {
return awsClient;
}
});
client = new SageMakerClient(clientFactory, threadPool);
}
@After
public void shutdown() throws IOException {
terminate(threadPool);
}
public void testInvoke() throws Exception {
var expectedResponse = InvokeEndpointResponse.builder().build();
when(awsClient.invokeEndpoint(any(InvokeEndpointRequest.class))).thenReturn(CompletableFuture.completedFuture(expectedResponse));
var listener = invoke(TimeValue.THIRTY_SECONDS);
verify(clientFactory, times(1)).load(any());
verify(listener, times(1)).onResponse(eq(expectedResponse));
}
private static SageMakerClient.RegionAndSecrets regionAndSecrets() {
return new SageMakerClient.RegionAndSecrets(
"us-east-1",
new AwsSecretSettings(new SecureString("access"), new SecureString("secrets"))
);
}
private ActionListener<InvokeEndpointResponse> invoke(TimeValue timeout) throws InterruptedException {
var latch = new CountDownLatch(1);
ActionListener<InvokeEndpointResponse> listener = spy(ActionListener.noop());
client.invoke(regionAndSecrets(), REQUEST, timeout, ActionListener.runAfter(listener, latch::countDown));
assertTrue("Timed out waiting for invoke call", latch.await(5, TimeUnit.SECONDS));
return listener;
}
public void testInvokeCache() throws Exception {
when(awsClient.invokeEndpoint(any(InvokeEndpointRequest.class))).thenReturn(
CompletableFuture.completedFuture(InvokeEndpointResponse.builder().build())
);
invoke(TimeValue.THIRTY_SECONDS);
invoke(TimeValue.THIRTY_SECONDS);
verify(clientFactory, times(1)).load(any());
}
public void testInvokeTimeout() throws Exception {
when(awsClient.invokeEndpoint(any(InvokeEndpointRequest.class))).thenReturn(new CompletableFuture<>());
var listener = invoke(TimeValue.timeValueMillis(10));
verify(clientFactory, times(1)).load(any());
verifyTimeout(listener);
}
private static void verifyTimeout(ActionListener<?> listener) {
verify(listener, times(1)).onFailure(assertArg(e -> assertThat(e.getMessage(), equalTo("Request timed out after [10ms]"))));
}
public void testInvokeStream() throws Exception {
SdkPublisher<ResponseStream> publisher = mockPublisher();
var listener = invokeStream(TimeValue.THIRTY_SECONDS);
verify(publisher, never()).subscribe(ArgumentMatchers.<Subscriber<ResponseStream>>any());
verify(listener, times(1)).onResponse(assertArg(stream -> stream.responseStream().subscribe(mock())));
verify(publisher, times(1)).subscribe(ArgumentMatchers.<Subscriber<ResponseStream>>any());
}
private SdkPublisher<ResponseStream> mockPublisher() {
SdkPublisher<ResponseStream> publisher = mock();
doAnswer(ans -> {
InvokeEndpointWithResponseStreamResponseHandler handler = ans.getArgument(1);
handler.responseReceived(InvokeEndpointWithResponseStreamResponse.builder().build());
handler.onEventStream(publisher);
return CompletableFuture.completedFuture((Void) null);
}).when(awsClient).invokeEndpointWithResponseStream(any(InvokeEndpointWithResponseStreamRequest.class), any());
return publisher;
}
private ActionListener<SageMakerClient.SageMakerStream> invokeStream(TimeValue timeout) throws Exception {
var latch = new CountDownLatch(1);
ActionListener<SageMakerClient.SageMakerStream> listener = spy(ActionListener.noop());
client.invokeStream(regionAndSecrets(), STREAM_REQUEST, timeout, ActionListener.runAfter(listener, latch::countDown));
assertTrue("Timed out waiting for invoke call", latch.await(5, TimeUnit.SECONDS));
return listener;
}
public void testInvokeStreamCache() throws Exception {
mockPublisher();
invokeStream(TimeValue.THIRTY_SECONDS);
invokeStream(TimeValue.THIRTY_SECONDS);
verify(clientFactory, times(1)).load(any());
}
public void testInvokeStreamTimeout() throws Exception {
when(awsClient.invokeEndpointWithResponseStream(any(InvokeEndpointWithResponseStreamRequest.class), any())).thenReturn(
new CompletableFuture<>()
);
var listener = invokeStream(TimeValue.timeValueMillis(10));
verify(clientFactory, times(1)).load(any());
verifyTimeout(listener);
}
public void testClose() throws Exception {
// load cache
mockPublisher();
invokeStream(TimeValue.THIRTY_SECONDS);
// clear cache
client.close();
verify(awsClient, times(1)).close();
}
}
|
SageMakerClientTests
|
java
|
assertj__assertj-core
|
assertj-tests/assertj-integration-tests/assertj-guava-tests/src/test/java/org/assertj/tests/guava/api/RangeAssert_isNotEmpty_Test.java
|
{
"start": 1021,
"end": 1974
}
|
class ____ {
@Test
public void should_fail_if_actual_is_null() {
// GIVEN
Range<Integer> actual = null;
// WHEN
Throwable throwable = catchThrowable(() -> assertThat(actual).isNotEmpty());
// THEN
assertThat(throwable).isInstanceOf(AssertionError.class)
.hasMessage(actualIsNull());
}
@Test
public void should_fail_when_range_is_empty() {
// GIVEN
final Range<Integer> actual = Range.openClosed(1, 1);
// WHEN
Throwable throwable = catchThrowable(() -> assertThat(actual).isNotEmpty());
// THEN
assertThat(throwable).isInstanceOf(AssertionError.class)
.hasMessage("%nExpecting actual not to be empty".formatted());
}
@Test
public void should_pass_if_range_is_not_empty() throws Exception {
// GIVEN
final Range<Integer> actual = Range.closed(1, 10);
// THEN
assertThat(actual).isNotEmpty();
}
}
|
RangeAssert_isNotEmpty_Test
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/EqualsIncompatibleTypeTest.java
|
{
"start": 1908,
"end": 3605
}
|
class ____ extends C {}
void checkEqualsCC1C2C3(C c, C1 c1, C2 c2, C3 c3) {
// BUG: Diagnostic contains: incompatible types
c3.equals(c1);
// BUG: Diagnostic contains: incompatible types
c3.equals(c2);
// BUG: Diagnostic contains: incompatible types
c1.equals(c3);
// BUG: Diagnostic contains: incompatible types
c2.equals(c3);
}
void checkStaticEqualsCC1C2C3(C c, C1 c1, C2 c2, C3 c3) {
// BUG: Diagnostic contains: incompatible types
java.util.Objects.equals(c3, c1);
// BUG: Diagnostic contains: incompatible types
java.util.Objects.equals(c3, c2);
// BUG: Diagnostic contains: incompatible types
java.util.Objects.equals(c1, c3);
// BUG: Diagnostic contains: incompatible types
java.util.Objects.equals(c2, c3);
}
void checkGuavaStaticEqualsCC1C2C3(C c, C1 c1, C2 c2, C3 c3) {
// BUG: Diagnostic contains: incompatible types
com.google.common.base.Objects.equal(c3, c1);
// BUG: Diagnostic contains: incompatible types
com.google.common.base.Objects.equal(c3, c2);
// BUG: Diagnostic contains: incompatible types
com.google.common.base.Objects.equal(c1, c3);
// BUG: Diagnostic contains: incompatible types
com.google.common.base.Objects.equal(c2, c3);
}
void checkPrimitiveEquals(int a, long b) {
// BUG: Diagnostic contains: incompatible types
java.util.Objects.equals(a, b);
// BUG: Diagnostic contains: incompatible types
java.util.Objects.equals(b, a);
// BUG: Diagnostic contains: incompatible types
com.google.common.base.Objects.equal(a, b);
// BUG: Diagnostic contains: incompatible types
com.google.common.base.Objects.equal(b, a);
}
|
C3
|
java
|
quarkusio__quarkus
|
extensions/resteasy-classic/resteasy-client/deployment/src/test/java/io/quarkus/restclient/configuration/RestClientOverrideRuntimeConfigTest.java
|
{
"start": 888,
"end": 5003
}
|
class ____ {
@RegisterExtension
static final QuarkusUnitTest TEST = new QuarkusUnitTest().setArchiveProducer(
() -> ShrinkWrap.create(JavaArchive.class)
.addClasses(EchoResource.class, EchoClient.class, RestClientBuildTimeConfigBuilderCustomizer.class)
.addAsServiceProvider("io.smallrye.config.SmallRyeConfigBuilderCustomizer",
"io.quarkus.restclient.configuration.RestClientBuildTimeConfigBuilderCustomizer"));
@Inject
@RestClient
EchoClient echoClient;
@Inject
SmallRyeConfig config;
@Inject
RestClientsConfig restClientsConfig;
@Test
void overrideConfig() {
// Build time property recording
Optional<ConfigSource> runtimeValues = config.getConfigSource("Runtime Values");
assertTrue(runtimeValues.isPresent());
assertTrue(runtimeValues.get().getPropertyNames()
.contains("io.quarkus.restclient.configuration.EchoClient/mp-rest/url"));
assertEquals("http://nohost",
runtimeValues.get().getValue("io.quarkus.restclient.configuration.EchoClient/mp-rest/url"));
assertTrue(StreamSupport.stream(config.getPropertyNames().spliterator(), false).anyMatch(
property -> property.equals("quarkus.rest-client.\"io.quarkus.restclient.configuration.EchoClient\".url")));
// Override MP Build time property with Quarkus property
ConfigValue mpValue = config.getConfigValue("io.quarkus.restclient.configuration.EchoClient/mp-rest/url");
// Fallbacks from runtime to the override build time value
ConfigValue quarkusValue = config
.getConfigValue("quarkus.rest-client.\"io.quarkus.restclient.configuration.EchoClient\".url");
assertEquals(mpValue.getValue(), quarkusValue.getValue());
assertEquals("RestClientRuntimeConfigSource", quarkusValue.getConfigSourceName());
// There is no relocate for MP names, so it keeps the same name
assertEquals("io.quarkus.restclient.configuration.EchoClient/mp-rest/url", mpValue.getName());
// We use the Quarkus name, because that is the one that has priority
assertEquals("quarkus.rest-client.\"io.quarkus.restclient.configuration.EchoClient\".url", quarkusValue.getName());
assertTrue(restClientsConfig.clients().containsKey("io.quarkus.restclient.configuration.EchoClient"));
Optional<String> url = restClientsConfig.clients().get("io.quarkus.restclient.configuration.EchoClient").url();
assertTrue(url.isPresent());
assertEquals(url.get(), mpValue.getValue());
assertEquals(url.get(), quarkusValue.getValue());
// overrides nohost -> localhost so the invoke succeeds
assertEquals("Hi", echoClient.echo("Hi"));
}
@Test
void buildTime() {
Set<String> properties = StreamSupport.stream(config.getPropertyNames().spliterator(), false).collect(toSet());
// MP/mp-rest/url - This one exists at build time
assertTrue(properties.contains("BT-MP/mp-rest/url"));
assertEquals("from-mp", config.getConfigValue("BT-MP/mp-rest/url").getValue());
// quarkus.rest-client.MP.url - Is not set, and it is not recorded
assertFalse(properties.contains("quarkus.rest-client.BT-MP.url"));
// Both properties exist
assertTrue(properties.contains("BT-QUARKUS-MP/mp-rest/url"));
assertTrue(properties.contains("quarkus.rest-client.BT-QUARKUS-MP.url"));
// There is no relocate for the MP property (only fallback), so each will get their own value
ConfigValue mpValue = config.getConfigValue("BT-QUARKUS-MP/mp-rest/url");
assertEquals("BT-QUARKUS-MP/mp-rest/url", mpValue.getName());
assertEquals("from-mp", mpValue.getValue());
ConfigValue quarkusValue = config.getConfigValue("quarkus.rest-client.BT-QUARKUS-MP.url");
assertEquals("quarkus.rest-client.BT-QUARKUS-MP.url", quarkusValue.getName());
assertEquals("from-quarkus", quarkusValue.getValue());
}
}
|
RestClientOverrideRuntimeConfigTest
|
java
|
quarkusio__quarkus
|
extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/MicrometerTimedInterceptor.java
|
{
"start": 6686,
"end": 7112
}
|
class ____ extends Sample {
private final Timer.Sample sample;
public TimerSample(Timed timed, Tags commonTags) {
super(timed, commonTags);
this.sample = Timer.start(meterRegistry);
}
@Override
void stop(String exceptionClass) {
record(timed, sample, exceptionClass, Tags.concat(commonTags, timed.extraTags()));
}
}
final
|
TimerSample
|
java
|
apache__flink
|
flink-datastream/src/test/java/org/apache/flink/datastream/impl/attribute/StreamingJobGraphGeneratorWithAttributeTest.java
|
{
"start": 11338,
"end": 11799
}
|
class ____
implements TwoOutputStreamProcessFunction<Integer, Integer, Integer> {
@Override
public void processRecord(
Integer record,
Collector<Integer> output1,
Collector<Integer> output2,
TwoOutputPartitionedContext<Integer, Integer> ctx) {
output1.collect(record + 1);
output2.collect(record - 1);
}
}
}
|
TestTwoOutputProcessFunction
|
java
|
google__guava
|
android/guava-tests/test/com/google/common/collect/IteratorsTest.java
|
{
"start": 21537,
"end": 21837
}
|
class ____<E> implements Iterable<E> {
final List<E> elements;
int modCount = 0;
PickyIterable(E... elements) {
this.elements = new ArrayList<E>(asList(elements));
}
@Override
public Iterator<E> iterator() {
return new PickyIterator();
}
final
|
PickyIterable
|
java
|
apache__camel
|
components/camel-jms/src/test/java/org/apache/camel/component/jms/integration/JmsSplitterParallelIT.java
|
{
"start": 1484,
"end": 3559
}
|
class ____ extends AbstractJMSTest {
@Order(2)
@RegisterExtension
public static CamelContextExtension camelContextExtension = new DefaultCamelContextExtension();
protected CamelContext context;
protected ProducerTemplate template;
protected ConsumerTemplate consumer;
protected String getUri() {
return "activemq:queue:JmsSplitterParallelTest?useMessageIDAsCorrelationID=true";
}
@Test
public void testSplitParallel() throws Exception {
getMockEndpoint("mock:result").expectedBodiesReceived("A,B,C,D,E");
getMockEndpoint("mock:reply").expectedBodiesReceivedInAnyOrder("Bye A", "Bye B", "Bye C", "Bye D", "Bye E");
getMockEndpoint("mock:split").expectedBodiesReceivedInAnyOrder("Bye A", "Bye B", "Bye C", "Bye D", "Bye E");
template.sendBody("direct:start", "A,B,C,D,E");
MockEndpoint.assertIsSatisfied(context);
}
@Override
protected String getComponentName() {
return "activemq";
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start")
.split(body().tokenize(",")).parallelProcessing()
.to("log:before")
.to(ExchangePattern.InOut, getUri())
.to("log:after")
.to("mock:split")
.end()
.to("mock:result");
from(getUri())
.transform(body().prepend("Bye "))
.to("mock:reply");
}
};
}
@Override
public CamelContextExtension getCamelContextExtension() {
return camelContextExtension;
}
@BeforeEach
void setUpRequirements() {
context = camelContextExtension.getContext();
template = camelContextExtension.getProducerTemplate();
consumer = camelContextExtension.getConsumerTemplate();
}
}
|
JmsSplitterParallelIT
|
java
|
spring-projects__spring-security
|
config/src/main/java/org/springframework/security/config/annotation/web/reactive/EnableWebFluxSecurity.java
|
{
"start": 1081,
"end": 1365
}
|
class ____ have Spring Security WebFlux
* support added. User's can then create one or more {@link ServerHttpSecurity}
* {@code Bean} instances.
*
* A minimal configuration can be found below:
*
* <pre class="code">
* @Configuration
* @EnableWebFluxSecurity
* public
|
to
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/EventEndpointBuilderFactory.java
|
{
"start": 11315,
"end": 12793
}
|
interface ____ {
/**
* Spring Event (camel-spring)
* Listen for Spring Application Events.
*
* Category: messaging
* Since: 1.4
* Maven coordinates: org.apache.camel:camel-spring
*
* Syntax: <code>spring-event:name</code>
*
* Path parameter: name
* Name of endpoint
*
* @param path name
* @return the dsl builder
*/
default EventEndpointBuilder springEvent(String path) {
return EventEndpointBuilderFactory.endpointBuilder("spring-event", path);
}
/**
* Spring Event (camel-spring)
* Listen for Spring Application Events.
*
* Category: messaging
* Since: 1.4
* Maven coordinates: org.apache.camel:camel-spring
*
* Syntax: <code>spring-event:name</code>
*
* Path parameter: name
* Name of endpoint
*
* @param componentName to use a custom component name for the endpoint
* instead of the default name
* @param path name
* @return the dsl builder
*/
default EventEndpointBuilder springEvent(String componentName, String path) {
return EventEndpointBuilderFactory.endpointBuilder(componentName, path);
}
}
static EventEndpointBuilder endpointBuilder(String componentName, String path) {
|
EventBuilders
|
java
|
micronaut-projects__micronaut-core
|
inject-java/src/test/groovy/io/micronaut/inject/constructor/arrayinjection/B.java
|
{
"start": 745,
"end": 917
}
|
class ____ {
private A[] all;
@Inject
public B(A[] all) {
this.all = all;
}
public List<A> getAll() {
return Arrays.asList(all);
}
}
|
B
|
java
|
apache__dubbo
|
dubbo-plugin/dubbo-security/src/test/java/org/apache/dubbo/security/cert/DubboCertProviderTest.java
|
{
"start": 1228,
"end": 6864
}
|
class ____ {
@Test
void testEnable() {
AtomicReference<DubboCertManager> reference = new AtomicReference<>();
try (MockedConstruction<DubboCertManager> construction =
Mockito.mockConstruction(DubboCertManager.class, (mock, context) -> {
reference.set(mock);
})) {
FrameworkModel frameworkModel = new FrameworkModel();
DubboCertProvider provider = new DubboCertProvider(frameworkModel);
Mockito.when(reference.get().isConnected()).thenReturn(true);
Assertions.assertTrue(provider.isSupport(null));
Mockito.when(reference.get().isConnected()).thenReturn(false);
Assertions.assertFalse(provider.isSupport(null));
frameworkModel.destroy();
}
}
@Test
void testEnable1() {
ClassLoader originClassLoader = Thread.currentThread().getContextClassLoader();
ClassLoader newClassLoader = new ClassLoader(originClassLoader) {
@Override
public Class<?> loadClass(String name) throws ClassNotFoundException {
if (name.startsWith("io.grpc.Channel")) {
throw new ClassNotFoundException("Test");
}
return super.loadClass(name);
}
};
Thread.currentThread().setContextClassLoader(newClassLoader);
try (MockedConstruction<DubboCertManager> construction =
Mockito.mockConstruction(DubboCertManager.class, (mock, context) -> {
// ignore
})) {
FrameworkModel frameworkModel = new FrameworkModel();
DubboCertProvider provider = new DubboCertProvider(frameworkModel);
Assertions.assertFalse(provider.isSupport(null));
frameworkModel.destroy();
}
Thread.currentThread().setContextClassLoader(originClassLoader);
}
@Test
void testEnable2() {
ClassLoader originClassLoader = Thread.currentThread().getContextClassLoader();
ClassLoader newClassLoader = new ClassLoader(originClassLoader) {
@Override
public Class<?> loadClass(String name) throws ClassNotFoundException {
if (name.startsWith("org.bouncycastle.pkcs.jcajce.JcaPKCS10CertificationRequestBuilder")) {
throw new ClassNotFoundException("Test");
}
return super.loadClass(name);
}
};
Thread.currentThread().setContextClassLoader(newClassLoader);
try (MockedConstruction<DubboCertManager> construction =
Mockito.mockConstruction(DubboCertManager.class, (mock, context) -> {
// ignore
})) {
FrameworkModel frameworkModel = new FrameworkModel();
DubboCertProvider provider = new DubboCertProvider(frameworkModel);
Assertions.assertFalse(provider.isSupport(null));
frameworkModel.destroy();
}
Thread.currentThread().setContextClassLoader(originClassLoader);
}
@Test
void getProviderConnectionConfigTest() {
AtomicReference<DubboCertManager> reference = new AtomicReference<>();
try (MockedConstruction<DubboCertManager> construction =
Mockito.mockConstruction(DubboCertManager.class, (mock, context) -> {
reference.set(mock);
})) {
FrameworkModel frameworkModel = new FrameworkModel();
DubboCertProvider provider = new DubboCertProvider(frameworkModel);
Assertions.assertNull(provider.getProviderConnectionConfig(null));
CertPair certPair = new CertPair("privateKey", "publicKey", "trustCerts", 12345);
Mockito.when(reference.get().generateCert()).thenReturn(certPair);
ProviderCert providerConnectionConfig = provider.getProviderConnectionConfig(null);
Assertions.assertArrayEquals("privateKey".getBytes(), providerConnectionConfig.getPrivateKey());
Assertions.assertArrayEquals("publicKey".getBytes(), providerConnectionConfig.getKeyCertChain());
Assertions.assertArrayEquals("trustCerts".getBytes(), providerConnectionConfig.getTrustCert());
Assertions.assertEquals(AuthPolicy.NONE, providerConnectionConfig.getAuthPolicy());
frameworkModel.destroy();
}
}
@Test
void getConsumerConnectionConfigTest() {
AtomicReference<DubboCertManager> reference = new AtomicReference<>();
try (MockedConstruction<DubboCertManager> construction =
Mockito.mockConstruction(DubboCertManager.class, (mock, context) -> {
reference.set(mock);
})) {
FrameworkModel frameworkModel = new FrameworkModel();
DubboCertProvider provider = new DubboCertProvider(frameworkModel);
Assertions.assertNull(provider.getConsumerConnectionConfig(null));
CertPair certPair = new CertPair("privateKey", "publicKey", "trustCerts", 12345);
Mockito.when(reference.get().generateCert()).thenReturn(certPair);
Cert connectionConfig = provider.getConsumerConnectionConfig(null);
Assertions.assertArrayEquals("privateKey".getBytes(), connectionConfig.getPrivateKey());
Assertions.assertArrayEquals("publicKey".getBytes(), connectionConfig.getKeyCertChain());
Assertions.assertArrayEquals("trustCerts".getBytes(), connectionConfig.getTrustCert());
frameworkModel.destroy();
}
}
}
|
DubboCertProviderTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/type/AbstractNamedEnumTest.java
|
{
"start": 2850,
"end": 3279
}
|
enum ____ not exported" );
String type = tableInfo.getString( 6 );
assertEquals( getDataTypeForNamedOrdinalEnum( "SkyType" ), type );
assertFalse( tableInfo.next() );
}
} ) );
}
protected abstract String normalizeNameForQueryingMetadata(String name);
protected abstract String getDataTypeForNamedEnum(String namedEnum);
protected abstract String getDataTypeForNamedOrdinalEnum(String namedEnum);
public
|
type
|
java
|
mapstruct__mapstruct
|
processor/src/main/java/org/mapstruct/ap/internal/model/dependency/GraphAnalyzer.java
|
{
"start": 599,
"end": 3298
}
|
class ____ {
private final Map<String, Node> nodes;
private final Set<List<String>> cycles;
private final Stack<Node> currentPath;
private int nextTraversalSequence = 0;
private GraphAnalyzer(Map<String, Node> nodes) {
this.nodes = nodes;
cycles = new HashSet<>();
currentPath = new Stack<>();
}
public static GraphAnalyzerBuilder builder() {
return new GraphAnalyzerBuilder();
}
public static GraphAnalyzerBuilder withNode(String name, String... descendants) {
return builder().withNode( name, descendants );
}
/**
* Performs a full traversal of the graph, detecting potential cycles and calculates the full list of descendants of
* the nodes.
*/
private void analyze() {
for ( Node node : nodes.values() ) {
depthFirstSearch( node );
}
}
/**
* Returns the traversal sequence number of the given node. The ascending order of the traversal sequence numbers of
* multiple nodes represents the depth-first traversal order of those nodes.
* <p>
* <b>Note</b>: The traversal sequence numbers will only be complete if the graph contains no cycles.
*
* @param name the node name to get the traversal sequence number for
* @return the traversal sequence number, or {@code -1} if the node doesn't exist or the node was not visited (in
* case of cycles).
*/
public int getTraversalSequence(String name) {
Node node = nodes.get( name );
return node != null ? node.getTraversalSequence() : -1;
}
public Set<List<String>> getCycles() {
return cycles;
}
private void depthFirstSearch(Node node) {
if ( node.isProcessed() ) {
return;
}
currentPath.push( node );
// the node is on the stack already -> cycle
if ( node.isVisited() ) {
cycles.add( getCurrentCycle( node ) );
currentPath.pop();
return;
}
node.setVisited( true );
for ( Node descendant : node.getDescendants() ) {
depthFirstSearch( descendant );
}
node.setTraversalSequence( nextTraversalSequence++ );
currentPath.pop();
}
private List<String> getCurrentCycle(Node start) {
List<String> cycle = new ArrayList<>();
boolean inCycle = false;
for ( Node n : currentPath ) {
if ( !inCycle && n.equals( start ) ) {
inCycle = true;
}
if ( inCycle ) {
cycle.add( n.getName() );
}
}
return cycle;
}
public static
|
GraphAnalyzer
|
java
|
apache__camel
|
dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/RestEndpointComponentBuilderFactory.java
|
{
"start": 1851,
"end": 8861
}
|
interface ____ extends ComponentBuilder<RestComponent> {
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions (if possible) occurred while the Camel
* consumer is trying to pickup incoming messages, or the likes, will
* now be processed as a message and handled by the routing Error
* Handler. Important: This is only possible if the 3rd party component
* allows Camel to be alerted if an exception was thrown. Some
* components handle this internally only, and therefore
* bridgeErrorHandler is not possible. In other situations we may
* improve the Camel component to hook into the 3rd party component and
* make this possible for future releases. By default the consumer will
* use the org.apache.camel.spi.ExceptionHandler to deal with
* exceptions, that will be logged at WARN or ERROR level and ignored.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default RestEndpointComponentBuilder bridgeErrorHandler(boolean bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* The Camel Rest component to use for the consumer REST transport, such
* as jetty, servlet, undertow. If no component has been explicitly
* configured, then Camel will lookup if there is a Camel component that
* integrates with the Rest DSL, or if a
* org.apache.camel.spi.RestConsumerFactory is registered in the
* registry. If either one is found, then that is being used.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: consumer
*
* @param consumerComponentName the value to set
* @return the dsl builder
*/
default RestEndpointComponentBuilder consumerComponentName(java.lang.String consumerComponentName) {
doSetProperty("consumerComponentName", consumerComponentName);
return this;
}
/**
* The swagger api doc resource to use. The resource is loaded from
* classpath by default and must be in JSON format.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param apiDoc the value to set
* @return the dsl builder
*/
default RestEndpointComponentBuilder apiDoc(java.lang.String apiDoc) {
doSetProperty("apiDoc", apiDoc);
return this;
}
/**
* Host and port of HTTP service to use (override host in swagger
* schema).
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param host the value to set
* @return the dsl builder
*/
default RestEndpointComponentBuilder host(java.lang.String host) {
doSetProperty("host", host);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default RestEndpointComponentBuilder lazyStartProducer(boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* The Camel Rest component to use for the producer REST transport, such
* as http, undertow. If no component has been explicitly configured,
* then Camel will lookup if there is a Camel component that integrates
* with the Rest DSL, or if a org.apache.camel.spi.RestProducerFactory
* is registered in the registry. If either one is found, then that is
* being used.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param producerComponentName the value to set
* @return the dsl builder
*/
default RestEndpointComponentBuilder producerComponentName(java.lang.String producerComponentName) {
doSetProperty("producerComponentName", producerComponentName);
return this;
}
/**
* Whether autowiring is enabled. This is used for automatic autowiring
* options (the option must be marked as autowired) by looking up in the
* registry to find if there is a single instance of matching type,
* which then gets configured on the component. This can be used for
* automatic configuring JDBC data sources, JMS connection factories,
* AWS Clients, etc.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: advanced
*
* @param autowiredEnabled the value to set
* @return the dsl builder
*/
default RestEndpointComponentBuilder autowiredEnabled(boolean autowiredEnabled) {
doSetProperty("autowiredEnabled", autowiredEnabled);
return this;
}
/**
* To use a custom org.apache.camel.spi.HeaderFilterStrategy to filter
* header to and from Camel message.
*
* The option is a:
* <code>org.apache.camel.spi.HeaderFilterStrategy</code>
* type.
*
* Group: filter
*
* @param headerFilterStrategy the value to set
* @return the dsl builder
*/
default RestEndpointComponentBuilder headerFilterStrategy(org.apache.camel.spi.HeaderFilterStrategy headerFilterStrategy) {
doSetProperty("headerFilterStrategy", headerFilterStrategy);
return this;
}
}
|
RestEndpointComponentBuilder
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/converters/table/SqlAlterTableSchemaAddConverter.java
|
{
"start": 1348,
"end": 1715
}
|
class ____
extends SqlAlterTableSchemaConverter<SqlAlterTableAdd> {
@Override
protected SchemaConverter createSchemaConverter(
SqlAlterTableSchema alterTableSchema,
ResolvedCatalogTable oldTable,
ConvertContext context) {
return new SchemaAddConverter(oldTable, context);
}
}
|
SqlAlterTableSchemaAddConverter
|
java
|
google__guava
|
android/guava/src/com/google/common/collect/AbstractMapBasedMultimap.java
|
{
"start": 21492,
"end": 23812
}
|
class ____ extends WrappedSortedSet implements NavigableSet<V> {
WrappedNavigableSet(
@ParametricNullness K key, NavigableSet<V> delegate, @Nullable WrappedCollection ancestor) {
super(key, delegate, ancestor);
}
@Override
NavigableSet<V> getSortedSetDelegate() {
return (NavigableSet<V>) super.getSortedSetDelegate();
}
@Override
public @Nullable V lower(@ParametricNullness V v) {
return getSortedSetDelegate().lower(v);
}
@Override
public @Nullable V floor(@ParametricNullness V v) {
return getSortedSetDelegate().floor(v);
}
@Override
public @Nullable V ceiling(@ParametricNullness V v) {
return getSortedSetDelegate().ceiling(v);
}
@Override
public @Nullable V higher(@ParametricNullness V v) {
return getSortedSetDelegate().higher(v);
}
@Override
public @Nullable V pollFirst() {
return Iterators.pollNext(iterator());
}
@Override
public @Nullable V pollLast() {
return Iterators.pollNext(descendingIterator());
}
private NavigableSet<V> wrap(NavigableSet<V> wrapped) {
return new WrappedNavigableSet(key, wrapped, (getAncestor() == null) ? this : getAncestor());
}
@Override
public NavigableSet<V> descendingSet() {
return wrap(getSortedSetDelegate().descendingSet());
}
@Override
public Iterator<V> descendingIterator() {
return new WrappedIterator(getSortedSetDelegate().descendingIterator());
}
@Override
public NavigableSet<V> subSet(
@ParametricNullness V fromElement,
boolean fromInclusive,
@ParametricNullness V toElement,
boolean toInclusive) {
return wrap(
getSortedSetDelegate().subSet(fromElement, fromInclusive, toElement, toInclusive));
}
@Override
public NavigableSet<V> headSet(@ParametricNullness V toElement, boolean inclusive) {
return wrap(getSortedSetDelegate().headSet(toElement, inclusive));
}
@Override
public NavigableSet<V> tailSet(@ParametricNullness V fromElement, boolean inclusive) {
return wrap(getSortedSetDelegate().tailSet(fromElement, inclusive));
}
}
/** List decorator that stays in sync with the multimap values for a key. */
@WeakOuter
private
|
WrappedNavigableSet
|
java
|
apache__spark
|
common/network-common/src/main/java/org/apache/spark/network/protocol/Encoders.java
|
{
"start": 4379,
"end": 5146
}
|
class ____ {
public static int encodedLength(String[] strings) {
int totalLength = 4;
for (String s : strings) {
totalLength += Strings.encodedLength(s);
}
return totalLength;
}
public static void encode(ByteBuf buf, String[] strings) {
buf.writeInt(strings.length);
for (String s : strings) {
Strings.encode(buf, s);
}
}
public static String[] decode(ByteBuf buf) {
int numStrings = buf.readInt();
String[] strings = new String[numStrings];
for (int i = 0; i < strings.length; i ++) {
strings[i] = Strings.decode(buf);
}
return strings;
}
}
/** Integer arrays are encoded with their length followed by integers. */
public static
|
StringArrays
|
java
|
apache__camel
|
components/camel-test/camel-test-spring-junit5/src/test/java/org/apache/camel/test/spring/CamelSpringTestSupportTest.java
|
{
"start": 1232,
"end": 2122
}
|
class ____ {
@Test
public void testReplacement() throws IOException {
String input = "<camel id='{{myCamelContext}}'>\n" +
" <bean class='{{fooClass}}'/>\n" +
"</camel>\n";
Resource io = new ByteArrayResource(input.getBytes(StandardCharsets.UTF_8));
Map<String, String> props = new HashMap<>();
props.put("myCamelContext", "camel-context-id");
Resource tr = new CamelSpringTestSupport.TranslatedResource(io, props);
byte[] buf = new byte[1024];
int l = tr.getInputStream().read(buf);
String output = new String(buf, 0, l, StandardCharsets.UTF_8);
assertEquals("""
<camel id='camel-context-id'>
<bean class='{{fooClass}}'/>
</camel>
""",
output);
}
}
|
CamelSpringTestSupportTest
|
java
|
spring-projects__spring-boot
|
module/spring-boot-actuator/src/main/java/org/springframework/boot/actuate/info/Info.java
|
{
"start": 2476,
"end": 3357
}
|
class ____ {
private final Map<String, Object> content;
public Builder() {
this.content = new LinkedHashMap<>();
}
/**
* Record detail using given {@code key} and {@code value}.
* @param key the detail key
* @param value the detail value
* @return this {@link Builder} instance
*/
public Builder withDetail(String key, Object value) {
this.content.put(key, value);
return this;
}
/**
* Record several details.
* @param details the details
* @return this {@link Builder} instance
* @see #withDetail(String, Object)
*/
public Builder withDetails(Map<String, Object> details) {
this.content.putAll(details);
return this;
}
/**
* Create a new {@link Info} instance based on the state of this builder.
* @return a new {@link Info} instance
*/
public Info build() {
return new Info(this);
}
}
}
|
Builder
|
java
|
apache__dubbo
|
dubbo-test/dubbo-test-check/src/main/java/org/apache/dubbo/test/check/registrycenter/Processor.java
|
{
"start": 1067,
"end": 1333
}
|
interface ____ {
/**
* Process the command with the global context.
*
* @param context the global context.
* @throws DubboTestException when any exception occurred.
*/
void process(Context context) throws DubboTestException;
}
|
Processor
|
java
|
spring-projects__spring-security
|
core/src/main/java/org/springframework/security/jackson2/UnmodifiableMapDeserializer.java
|
{
"start": 1272,
"end": 1918
}
|
class ____ extends JsonDeserializer<Map<?, ?>> {
@Override
public Map<?, ?> deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException {
ObjectMapper mapper = (ObjectMapper) jp.getCodec();
JsonNode node = mapper.readTree(jp);
Map<String, Object> result = new LinkedHashMap<>();
if (node != null && node.isObject()) {
Iterable<Map.Entry<String, JsonNode>> fields = node::fields;
for (Map.Entry<String, JsonNode> field : fields) {
result.put(field.getKey(), mapper.readValue(field.getValue().traverse(mapper), Object.class));
}
}
return Collections.unmodifiableMap(result);
}
}
|
UnmodifiableMapDeserializer
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/bug/Bug_for_BlankRain_Issue_502.java
|
{
"start": 189,
"end": 1079
}
|
class ____ extends TestCase {
public void test_for_issue() throws Exception {
People a1 = new People();
a1.set姓名("A");
a1.set类型("B");
a1.set状态("C");
a1.set满意度("D");
a1.set统计("E");
a1.set时间("F");
String text = JSON.toJSONString(a1);
Assert.assertEquals("{\"姓名\":\"A\",\"时间\":\"F\",\"满意度\":\"D\",\"状态\":\"C\",\"类型\":\"B\",\"统计\":\"E\"}", text);
System.out.println(text);
People a2 = JSON.parseObject(text, People.class);
Assert.assertEquals(a1.get姓名(), a2.get姓名());
Assert.assertEquals(a1.get类型(), a2.get类型());
Assert.assertEquals(a1.get状态(), a2.get状态());
Assert.assertEquals(a1.get满意度(), a2.get满意度());
Assert.assertEquals(a1.get统计(), a2.get统计());
Assert.assertEquals(a1.get时间(), a2.get时间());
}
public static
|
Bug_for_BlankRain_Issue_502
|
java
|
micronaut-projects__micronaut-core
|
inject-java/src/test/groovy/io/micronaut/inject/configproperties/eachbeanparameter/EachPropertyPropertiesDefault.java
|
{
"start": 388,
"end": 1045
}
|
class ____ {
private String name;
private String url;
private int serverPort;
@ConfigurationInject
public EachPropertyPropertiesDefault(@Parameter String name) {
this.name = name;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public int getServerPort() {
return serverPort;
}
public void setServerPort(int serverPort) {
this.serverPort = serverPort;
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
}
|
EachPropertyPropertiesDefault
|
java
|
apache__kafka
|
connect/runtime/src/main/java/org/apache/kafka/connect/util/ConcreteSubClassValidator.java
|
{
"start": 1490,
"end": 1827
}
|
class ____'t be found; no point in performing follow-up validation
return;
}
Class<?> cls = (Class<?>) value;
Utils.ensureConcreteSubclass(expectedSuperClass, cls);
}
@Override
public String toString() {
return "A concrete subclass of " + expectedSuperClass.getName();
}
}
|
couldn
|
java
|
apache__camel
|
dsl/camel-jbang/camel-jbang-plugin-kubernetes/src/main/java/org/apache/camel/dsl/jbang/core/commands/kubernetes/ClusterType.java
|
{
"start": 939,
"end": 1497
}
|
enum ____ {
KUBERNETES,
OPENSHIFT,
KIND,
K3S,
MINIKUBE;
public static ClusterType fromName(String name) {
return Arrays.stream(values())
.filter(ct -> ct.name().equalsIgnoreCase(name))
.findFirst().orElseThrow(() -> new IllegalArgumentException("Unknown cluster type: %s".formatted(name)));
}
public boolean isEqualTo(String clusterType) {
if (clusterType == null) {
return false;
}
return this.name().equalsIgnoreCase(clusterType);
}
}
|
ClusterType
|
java
|
netty__netty
|
testsuite/src/main/java/io/netty/testsuite/transport/sctp/AbstractSctpTest.java
|
{
"start": 1066,
"end": 1754
}
|
class ____ extends AbstractComboTestsuiteTest<ServerBootstrap, Bootstrap> {
@Override
protected List<TestsuitePermutation.BootstrapComboFactory<ServerBootstrap, Bootstrap>> newFactories() {
return SctpTestPermutation.sctpChannel();
}
@Override
protected void configure(ServerBootstrap serverBootstrap, Bootstrap bootstrap, ByteBufAllocator allocator) {
serverBootstrap.localAddress(new InetSocketAddress(NetUtil.LOCALHOST, 0));
serverBootstrap.option(ChannelOption.ALLOCATOR, allocator);
serverBootstrap.childOption(ChannelOption.ALLOCATOR, allocator);
bootstrap.option(ChannelOption.ALLOCATOR, allocator);
}
}
|
AbstractSctpTest
|
java
|
ReactiveX__RxJava
|
src/test/java/io/reactivex/rxjava3/internal/operators/flowable/FlowableSampleTest.java
|
{
"start": 1310,
"end": 15991
}
|
class ____ extends RxJavaTest {
private TestScheduler scheduler;
private Scheduler.Worker innerScheduler;
private Subscriber<Long> subscriber;
private Subscriber<Object> subscriber2;
@Before
// due to mocking
public void before() {
scheduler = new TestScheduler();
innerScheduler = scheduler.createWorker();
subscriber = TestHelper.mockSubscriber();
subscriber2 = TestHelper.mockSubscriber();
}
@Test
public void sample() {
Flowable<Long> source = Flowable.unsafeCreate(new Publisher<Long>() {
@Override
public void subscribe(final Subscriber<? super Long> subscriber1) {
subscriber1.onSubscribe(new BooleanSubscription());
innerScheduler.schedule(new Runnable() {
@Override
public void run() {
subscriber1.onNext(1L);
}
}, 1, TimeUnit.SECONDS);
innerScheduler.schedule(new Runnable() {
@Override
public void run() {
subscriber1.onNext(2L);
}
}, 2, TimeUnit.SECONDS);
innerScheduler.schedule(new Runnable() {
@Override
public void run() {
subscriber1.onComplete();
}
}, 3, TimeUnit.SECONDS);
}
});
Flowable<Long> sampled = source.sample(400L, TimeUnit.MILLISECONDS, scheduler);
sampled.subscribe(subscriber);
InOrder inOrder = inOrder(subscriber);
scheduler.advanceTimeTo(800L, TimeUnit.MILLISECONDS);
verify(subscriber, never()).onNext(any(Long.class));
verify(subscriber, never()).onComplete();
verify(subscriber, never()).onError(any(Throwable.class));
scheduler.advanceTimeTo(1200L, TimeUnit.MILLISECONDS);
inOrder.verify(subscriber, times(1)).onNext(1L);
verify(subscriber, never()).onNext(2L);
verify(subscriber, never()).onComplete();
verify(subscriber, never()).onError(any(Throwable.class));
scheduler.advanceTimeTo(1600L, TimeUnit.MILLISECONDS);
inOrder.verify(subscriber, never()).onNext(1L);
verify(subscriber, never()).onNext(2L);
verify(subscriber, never()).onComplete();
verify(subscriber, never()).onError(any(Throwable.class));
scheduler.advanceTimeTo(2000L, TimeUnit.MILLISECONDS);
inOrder.verify(subscriber, never()).onNext(1L);
inOrder.verify(subscriber, times(1)).onNext(2L);
verify(subscriber, never()).onComplete();
verify(subscriber, never()).onError(any(Throwable.class));
scheduler.advanceTimeTo(3000L, TimeUnit.MILLISECONDS);
inOrder.verify(subscriber, never()).onNext(1L);
inOrder.verify(subscriber, never()).onNext(2L);
verify(subscriber, times(1)).onComplete();
verify(subscriber, never()).onError(any(Throwable.class));
}
@Test
public void sampleWithSamplerNormal() {
PublishProcessor<Integer> source = PublishProcessor.create();
PublishProcessor<Integer> sampler = PublishProcessor.create();
Flowable<Integer> m = source.sample(sampler);
m.subscribe(subscriber2);
source.onNext(1);
source.onNext(2);
sampler.onNext(1);
source.onNext(3);
source.onNext(4);
sampler.onNext(2);
source.onComplete();
sampler.onNext(3);
InOrder inOrder = inOrder(subscriber2);
inOrder.verify(subscriber2, never()).onNext(1);
inOrder.verify(subscriber2, times(1)).onNext(2);
inOrder.verify(subscriber2, never()).onNext(3);
inOrder.verify(subscriber2, times(1)).onNext(4);
inOrder.verify(subscriber2, times(1)).onComplete();
verify(subscriber, never()).onError(any(Throwable.class));
}
@Test
public void sampleWithSamplerNoDuplicates() {
PublishProcessor<Integer> source = PublishProcessor.create();
PublishProcessor<Integer> sampler = PublishProcessor.create();
Flowable<Integer> m = source.sample(sampler);
m.subscribe(subscriber2);
source.onNext(1);
source.onNext(2);
sampler.onNext(1);
sampler.onNext(1);
source.onNext(3);
source.onNext(4);
sampler.onNext(2);
sampler.onNext(2);
source.onComplete();
sampler.onNext(3);
InOrder inOrder = inOrder(subscriber2);
inOrder.verify(subscriber2, never()).onNext(1);
inOrder.verify(subscriber2, times(1)).onNext(2);
inOrder.verify(subscriber2, never()).onNext(3);
inOrder.verify(subscriber2, times(1)).onNext(4);
inOrder.verify(subscriber2, times(1)).onComplete();
verify(subscriber, never()).onError(any(Throwable.class));
}
@Test
public void sampleWithSamplerTerminatingEarly() {
PublishProcessor<Integer> source = PublishProcessor.create();
PublishProcessor<Integer> sampler = PublishProcessor.create();
Flowable<Integer> m = source.sample(sampler);
m.subscribe(subscriber2);
source.onNext(1);
source.onNext(2);
sampler.onNext(1);
sampler.onComplete();
source.onNext(3);
source.onNext(4);
InOrder inOrder = inOrder(subscriber2);
inOrder.verify(subscriber2, never()).onNext(1);
inOrder.verify(subscriber2, times(1)).onNext(2);
inOrder.verify(subscriber2, times(1)).onComplete();
inOrder.verify(subscriber2, never()).onNext(any());
verify(subscriber, never()).onError(any(Throwable.class));
}
@Test
public void sampleWithSamplerEmitAndTerminate() {
PublishProcessor<Integer> source = PublishProcessor.create();
PublishProcessor<Integer> sampler = PublishProcessor.create();
Flowable<Integer> m = source.sample(sampler);
m.subscribe(subscriber2);
source.onNext(1);
source.onNext(2);
sampler.onNext(1);
source.onNext(3);
source.onComplete();
sampler.onNext(2);
sampler.onComplete();
InOrder inOrder = inOrder(subscriber2);
inOrder.verify(subscriber2, never()).onNext(1);
inOrder.verify(subscriber2, times(1)).onNext(2);
inOrder.verify(subscriber2, never()).onNext(3);
inOrder.verify(subscriber2, times(1)).onComplete();
inOrder.verify(subscriber2, never()).onNext(any());
verify(subscriber, never()).onError(any(Throwable.class));
}
@Test
public void sampleWithSamplerEmptySource() {
PublishProcessor<Integer> source = PublishProcessor.create();
PublishProcessor<Integer> sampler = PublishProcessor.create();
Flowable<Integer> m = source.sample(sampler);
m.subscribe(subscriber2);
source.onComplete();
sampler.onNext(1);
InOrder inOrder = inOrder(subscriber2);
inOrder.verify(subscriber2, times(1)).onComplete();
verify(subscriber2, never()).onNext(any());
verify(subscriber, never()).onError(any(Throwable.class));
}
@Test
public void sampleWithSamplerSourceThrows() {
PublishProcessor<Integer> source = PublishProcessor.create();
PublishProcessor<Integer> sampler = PublishProcessor.create();
Flowable<Integer> m = source.sample(sampler);
m.subscribe(subscriber2);
source.onNext(1);
source.onError(new RuntimeException("Forced failure!"));
sampler.onNext(1);
InOrder inOrder = inOrder(subscriber2);
inOrder.verify(subscriber2, times(1)).onError(any(Throwable.class));
verify(subscriber2, never()).onNext(any());
verify(subscriber, never()).onComplete();
}
@Test
public void sampleWithSamplerThrows() {
PublishProcessor<Integer> source = PublishProcessor.create();
PublishProcessor<Integer> sampler = PublishProcessor.create();
Flowable<Integer> m = source.sample(sampler);
m.subscribe(subscriber2);
source.onNext(1);
sampler.onNext(1);
sampler.onError(new RuntimeException("Forced failure!"));
InOrder inOrder = inOrder(subscriber2);
inOrder.verify(subscriber2, times(1)).onNext(1);
inOrder.verify(subscriber2, times(1)).onError(any(RuntimeException.class));
verify(subscriber, never()).onComplete();
}
@Test
public void sampleUnsubscribe() {
final Subscription s = mock(Subscription.class);
Flowable<Integer> f = Flowable.unsafeCreate(
new Publisher<Integer>() {
@Override
public void subscribe(Subscriber<? super Integer> subscriber) {
subscriber.onSubscribe(s);
}
}
);
f.throttleLast(1, TimeUnit.MILLISECONDS).subscribe().dispose();
verify(s).cancel();
}
@Test
public void dispose() {
TestHelper.checkDisposed(PublishProcessor.create().sample(1, TimeUnit.SECONDS, new TestScheduler()));
TestHelper.checkDisposed(PublishProcessor.create().sample(Flowable.never()));
}
@Test
public void error() {
Flowable.error(new TestException())
.sample(1, TimeUnit.SECONDS)
.test()
.assertFailure(TestException.class);
}
@Test
public void backpressureOverflow() {
BehaviorProcessor.createDefault(1)
.sample(1, TimeUnit.MILLISECONDS)
.test(0L)
.awaitDone(5, TimeUnit.SECONDS)
.assertFailure(MissingBackpressureException.class);
}
@Test
public void backpressureOverflowWithOtherPublisher() {
PublishProcessor<Integer> pp1 = PublishProcessor.create();
PublishProcessor<Integer> pp2 = PublishProcessor.create();
TestSubscriber<Integer> ts = pp1
.sample(pp2)
.test(0L);
pp1.onNext(1);
pp2.onNext(2);
ts.assertFailure(MissingBackpressureException.class);
assertFalse(pp1.hasSubscribers());
assertFalse(pp2.hasSubscribers());
}
@Test
public void emitLastTimed() {
Flowable.just(1)
.sample(1, TimeUnit.DAYS, true)
.test()
.assertResult(1);
}
@Test
public void emitLastTimedEmpty() {
Flowable.empty()
.sample(1, TimeUnit.DAYS, true)
.test()
.assertResult();
}
@Test
public void emitLastTimedCustomScheduler() {
Flowable.just(1)
.sample(1, TimeUnit.DAYS, Schedulers.single(), true)
.test()
.assertResult(1);
}
@Test
public void emitLastTimedRunCompleteRace() {
for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) {
final TestScheduler scheduler = new TestScheduler();
final PublishProcessor<Integer> pp = PublishProcessor.create();
TestSubscriber<Integer> ts = pp.sample(1, TimeUnit.SECONDS, scheduler, true)
.test();
pp.onNext(1);
Runnable r1 = new Runnable() {
@Override
public void run() {
pp.onComplete();
}
};
Runnable r2 = new Runnable() {
@Override
public void run() {
scheduler.advanceTimeBy(1, TimeUnit.SECONDS);
}
};
TestHelper.race(r1, r2);
ts.assertResult(1);
}
}
@Test
public void emitLastOther() {
Flowable.just(1)
.sample(Flowable.timer(1, TimeUnit.DAYS), true)
.test()
.assertResult(1);
}
@Test
public void emitLastOtherEmpty() {
Flowable.empty()
.sample(Flowable.timer(1, TimeUnit.DAYS), true)
.test()
.assertResult();
}
@Test
public void emitLastOtherRunCompleteRace() {
for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) {
final PublishProcessor<Integer> pp = PublishProcessor.create();
final PublishProcessor<Integer> sampler = PublishProcessor.create();
TestSubscriber<Integer> ts = pp.sample(sampler, true)
.test();
pp.onNext(1);
Runnable r1 = new Runnable() {
@Override
public void run() {
pp.onComplete();
}
};
Runnable r2 = new Runnable() {
@Override
public void run() {
sampler.onNext(1);
}
};
TestHelper.race(r1, r2);
ts.assertResult(1);
}
}
@Test
public void emitLastOtherCompleteCompleteRace() {
for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) {
final PublishProcessor<Integer> pp = PublishProcessor.create();
final PublishProcessor<Integer> sampler = PublishProcessor.create();
TestSubscriber<Integer> ts = pp.sample(sampler, true).test();
pp.onNext(1);
Runnable r1 = new Runnable() {
@Override
public void run() {
pp.onComplete();
}
};
Runnable r2 = new Runnable() {
@Override
public void run() {
sampler.onComplete();
}
};
TestHelper.race(r1, r2);
ts.assertResult(1);
}
}
@Test
public void doubleOnSubscribe() {
TestHelper.checkDoubleOnSubscribeFlowable(new Function<Flowable<Object>, Flowable<Object>>() {
@Override
public Flowable<Object> apply(Flowable<Object> f)
throws Exception {
return f.sample(1, TimeUnit.SECONDS);
}
});
TestHelper.checkDoubleOnSubscribeFlowable(new Function<Flowable<Object>, Flowable<Object>>() {
@Override
public Flowable<Object> apply(Flowable<Object> f)
throws Exception {
return f.sample(PublishProcessor.create());
}
});
}
@Test
public void badRequest() {
TestHelper.assertBadRequestReported(PublishProcessor.create()
.sample(PublishProcessor.create()));
}
@Test
public void badRequestTimed() {
TestHelper.assertBadRequestReported(PublishProcessor.create()
.sample(1, TimeUnit.MINUTES));
}
}
|
FlowableSampleTest
|
java
|
netty__netty
|
example/src/main/java/io/netty/example/udt/echo/rendezvous/MsgEchoPeerHandler.java
|
{
"start": 1123,
"end": 2159
}
|
class ____ extends SimpleChannelInboundHandler<UdtMessage> {
private final UdtMessage message;
public MsgEchoPeerHandler(final int messageSize) {
super(false);
final ByteBuf byteBuf = Unpooled.buffer(messageSize);
for (int i = 0; i < byteBuf.capacity(); i++) {
byteBuf.writeByte((byte) i);
}
message = new UdtMessage(byteBuf);
}
@Override
public void channelActive(final ChannelHandlerContext ctx) {
System.err.println("ECHO active " + NioUdtProvider.socketUDT(ctx.channel()).toStringOptions());
ctx.writeAndFlush(message);
}
@Override
public void channelRead0(ChannelHandlerContext ctx, UdtMessage message) {
ctx.write(message);
}
@Override
public void channelReadComplete(ChannelHandlerContext ctx) {
ctx.flush();
}
@Override
public void exceptionCaught(final ChannelHandlerContext ctx, final Throwable cause) {
cause.printStackTrace();
ctx.close();
}
}
|
MsgEchoPeerHandler
|
java
|
apache__flink
|
flink-core/src/main/java/org/apache/flink/util/FatalExitExceptionHandler.java
|
{
"start": 1262,
"end": 1991
}
|
class ____ implements Thread.UncaughtExceptionHandler {
private static final Logger LOG = LoggerFactory.getLogger(FatalExitExceptionHandler.class);
public static final FatalExitExceptionHandler INSTANCE = new FatalExitExceptionHandler();
public static final int EXIT_CODE = -17;
@Override
public void uncaughtException(Thread t, Throwable e) {
try {
LOG.error(
"FATAL: Thread '{}' produced an uncaught exception. Stopping the process...",
t.getName(),
e);
ThreadUtils.errorLogThreadDump(LOG);
} finally {
FlinkSecurityManager.forceProcessExit(EXIT_CODE);
}
}
}
|
FatalExitExceptionHandler
|
java
|
apache__commons-lang
|
src/main/java/org/apache/commons/lang3/function/FailableToIntFunction.java
|
{
"start": 1141,
"end": 1878
}
|
interface ____<T, E extends Throwable> {
/** NOP singleton */
@SuppressWarnings("rawtypes")
FailableToIntFunction NOP = t -> 0;
/**
* Gets the NOP singleton.
*
* @param <T> the type of the argument to the function
* @param <E> The kind of thrown exception or error.
* @return The NOP singleton.
*/
@SuppressWarnings("unchecked")
static <T, E extends Throwable> FailableToIntFunction<T, E> nop() {
return NOP;
}
/**
* Applies this function to the given arguments.
*
* @param t the first function argument
* @return the function result
* @throws E Thrown when the function fails.
*/
int applyAsInt(T t) throws E;
}
|
FailableToIntFunction
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/ext/javatime/ser/MonthDaySerTest.java
|
{
"start": 1815,
"end": 3795
}
|
class ____ {
@JsonFormat(shape = JsonFormat.Shape.ARRAY)
public MonthDay value;
public ShapeArrayBean() { }
public ShapeArrayBean(MonthDay v) { value = v; }
}
@Test
public void testSerialization01() throws Exception
{
assertEquals("\"--01-17\"",
MAPPER.writeValueAsString(MonthDay.of(Month.JANUARY, 17)));
}
@Test
public void testSerialization02() throws Exception
{
assertEquals("\"--08-21\"",
MAPPER.writeValueAsString(MonthDay.of(Month.AUGUST, 21)));
}
@Test
public void testSerializationWithTypeInfo01() throws Exception
{
final ObjectMapper mapper = mapperBuilder()
.addMixIn(TemporalAccessor.class, MockObjectConfiguration.class)
.build();
MonthDay monthDay = MonthDay.of(Month.NOVEMBER, 5);
String value = mapper.writeValueAsString(monthDay);
assertEquals("[\"" + MonthDay.class.getName() + "\",\"--11-05\"]", value);
}
// ShapeInt Test
@Test
public void testSerializationWithShapeInt() throws Exception
{
// One with shape
String json = MAPPER.writeValueAsString(new ShapeIntWrapper(MonthDay.of(Month.MARCH, 17)));
assertEquals("{\"value\":[3,17]}", json);
// One without shape
json = MAPPER.writeValueAsString(new NoShapeIntWrapper(MonthDay.of(Month.MARCH, 17)));
assertEquals("{\"value\":\"--03-17\"}", json);
}
@Test
public void testSerializationWithFrLocale() throws Exception
{
String json = MAPPER.writeValueAsString(new FrBean(MonthDay.of(Month.MARCH, 17)));
assertEquals("{\"value\":\"mars-17\"}", json);
}
@Test
public void testSerializationWithShapeArray() throws Exception
{
String json = MAPPER.writeValueAsString(new ShapeArrayBean(MonthDay.of(Month.DECEMBER, 31)));
assertEquals("{\"value\":[12,31]}", json);
}
}
|
ShapeArrayBean
|
java
|
mockito__mockito
|
mockito-core/src/test/java/org/mockito/internal/debugging/LoggingListenerTest.java
|
{
"start": 329,
"end": 5243
}
|
class ____ extends TestBase {
@Test
public void may_not_have_any_information() {
// given
LoggingListener listener = new LoggingListener(true);
// expect
assertEquals("", listener.getStubbingInfo());
}
@Test
public void informs_about_unused_stubs() {
// given
LoggingListener listener = new LoggingListener(false);
// when
listener.foundUnusedStub(invocationAt("at com.FooTest:30"));
listener.foundUnusedStub(invocationAt("at com.FooTest:32"));
// then
assertEquals(
"[Mockito] Additional stubbing information (see javadoc for StubbingInfo class):\n"
+ "[Mockito]\n"
+ "[Mockito] Unused stubbing (perhaps can be removed from the test?):\n"
+ "[Mockito]\n"
+ "[Mockito] 1. at com.FooTest:30\n"
+ "[Mockito] 2. at com.FooTest:32",
listener.getStubbingInfo());
}
@Test
public void calculates_indexes_for_clean_output() {
assertEquals(1, LoggingListener.indexOfNextPair(0));
assertEquals(2, LoggingListener.indexOfNextPair(2));
assertEquals(3, LoggingListener.indexOfNextPair(4));
assertEquals(4, LoggingListener.indexOfNextPair(6));
}
@Test
public void informs_about_unused_stubs_due_arg_mismatch() {
// given
LoggingListener listener = new LoggingListener(false);
// when
listener.foundStubCalledWithDifferentArgs(
invocationAt("at com.FooTest:20"), invocationMatcherAt("at com.Foo:100"));
listener.foundStubCalledWithDifferentArgs(
invocationAt("at com.FooTest:21"), invocationMatcherAt("at com.Foo:121"));
// then
assertEquals(
"[Mockito] Additional stubbing information (see javadoc for StubbingInfo class):\n"
+ "[Mockito]\n"
+ "[Mockito] Argument mismatch between stubbing and actual invocation (is stubbing correct in the test?):\n"
+ "[Mockito]\n"
+ "[Mockito] 1. Stubbed at com.FooTest:20\n"
+ "[Mockito] Invoked at com.Foo:100\n"
+ "[Mockito] 2. Stubbed at com.FooTest:21\n"
+ "[Mockito] Invoked at com.Foo:121",
listener.getStubbingInfo());
}
@Test
public void informs_about_various_kinds_of_stubs() {
// given
LoggingListener listener = new LoggingListener(true);
// when
listener.foundUnusedStub(invocationAt("at com.FooTest:30"));
listener.foundStubCalledWithDifferentArgs(
invocationAt("at com.FooTest:20"), invocationMatcherAt("at com.Foo:100"));
listener.foundUnstubbed(invocationMatcherAt("at com.Foo:96"));
// then
assertEquals(
"[Mockito] Additional stubbing information (see javadoc for StubbingInfo class):\n"
+ "[Mockito]\n"
+ "[Mockito] Argument mismatch between stubbing and actual invocation (is stubbing correct in the test?):\n"
+ "[Mockito]\n"
+ "[Mockito] 1. Stubbed at com.FooTest:20\n"
+ "[Mockito] Invoked at com.Foo:100\n"
+ "[Mockito]\n"
+ "[Mockito] Unused stubbing (perhaps can be removed from the test?):\n"
+ "[Mockito]\n"
+ "[Mockito] 1. at com.FooTest:30\n"
+ "[Mockito]\n"
+ "[Mockito] Un-stubbed method invocations (perhaps missing stubbing in the test?):\n"
+ "[Mockito]\n"
+ "[Mockito] 1. at com.Foo:96",
listener.getStubbingInfo());
}
@Test
public void hides_unstubbed() {
// given
LoggingListener listener = new LoggingListener(false);
// when
listener.foundUnstubbed(new InvocationBuilder().toInvocationMatcher());
// then
assertEquals("", listener.getStubbingInfo());
}
@Test
public void informs_about_unstubbed() {
// given
LoggingListener listener = new LoggingListener(true);
// when
listener.foundUnstubbed(invocationMatcherAt("com.Foo:20"));
// then
assertEquals(
"[Mockito] Additional stubbing information (see javadoc for StubbingInfo class):\n"
+ "[Mockito]\n"
+ "[Mockito] Un-stubbed method invocations (perhaps missing stubbing in the test?):\n"
+ "[Mockito]\n"
+ "[Mockito] 1. com.Foo:20",
listener.getStubbingInfo());
}
}
|
LoggingListenerTest
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/distributed/TestCentralizedOpportunisticContainerAllocator.java
|
{
"start": 2931,
"end": 26458
}
|
class ____ {
private static final Logger LOG = LoggerFactory.getLogger(
TestCentralizedOpportunisticContainerAllocator.class);
private static final int GB = 1024;
private CentralizedOpportunisticContainerAllocator allocator = null;
private OpportunisticContainerContext oppCntxt = null;
private static final Priority PRIORITY_NORMAL = Priority.newInstance(1);
private static final Resource CAPABILITY_1GB =
Resources.createResource(GB);
private static final ResourceBlacklistRequest EMPTY_BLACKLIST_REQUEST =
ResourceBlacklistRequest.newInstance(
new ArrayList<>(), new ArrayList<>());
@BeforeEach
public void setup() {
// creating a dummy master key to be used for creation of container.
final MasterKey mKey = new MasterKey() {
@Override
public int getKeyId() {
return 1;
}
@Override
public void setKeyId(int keyId) {}
@Override
public ByteBuffer getBytes() {
return ByteBuffer.allocate(8);
}
@Override
public void setBytes(ByteBuffer bytes) {}
};
// creating a dummy tokenSecretManager to be used for creation of
// container.
BaseContainerTokenSecretManager secMan =
new BaseContainerTokenSecretManager(new Configuration()) {
@Override
public MasterKey getCurrentKey() {
return mKey;
}
@Override
public byte[] createPassword(ContainerTokenIdentifier identifier) {
return new byte[]{1, 2};
}
};
allocator = new CentralizedOpportunisticContainerAllocator(secMan);
oppCntxt = new OpportunisticContainerContext();
oppCntxt.getAppParams().setMinResource(Resource.newInstance(1024, 1));
oppCntxt.getAppParams().setIncrementResource(Resource.newInstance(512, 1));
oppCntxt.getAppParams().setMaxResource(Resource.newInstance(1024, 10));
}
/**
* Tests allocation of an Opportunistic container from single application.
* @throws Exception
*/
@Test
public void testSimpleAllocation() throws Exception {
List<ResourceRequest> reqs =
Collections.singletonList(createResourceRequest(1, "*", 1));
ApplicationAttemptId appAttId = ApplicationAttemptId.newInstance(
ApplicationId.newInstance(0L, 1), 1);
allocator.setNodeQueueLoadMonitor(createNodeQueueLoadMonitor(1, 2, 100));
List<Container> containers = allocator.allocateContainers(
EMPTY_BLACKLIST_REQUEST, reqs, appAttId, oppCntxt, 1L, "user");
assertEquals(1, containers.size());
assertEquals(0, oppCntxt.getOutstandingOpReqs().size());
}
/**
* Tests Opportunistic container should not be allocated on blacklisted
* nodes.
* @throws Exception
*/
@Test
public void testBlacklistRejection() throws Exception {
ResourceBlacklistRequest blacklistRequest =
ResourceBlacklistRequest.newInstance(
Arrays.asList("h1", "h2"), new ArrayList<>());
List<ResourceRequest> reqs =
Collections.singletonList(createResourceRequest(1, "*", 1));
ApplicationAttemptId appAttId = ApplicationAttemptId.newInstance(
ApplicationId.newInstance(0L, 1), 1);
allocator.setNodeQueueLoadMonitor(createNodeQueueLoadMonitor(2, 2, 100));
List<Container> containers = allocator.allocateContainers(
blacklistRequest, reqs, appAttId, oppCntxt, 1L, "user");
assertEquals(0, containers.size());
assertEquals(1, oppCntxt.getOutstandingOpReqs().size());
}
/**
* Tests that allocation of Opportunistic containers should be spread out.
* @throws Exception
*/
@Test
public void testRoundRobinSimpleAllocation() throws Exception {
List<ResourceRequest> reqs =
Arrays.asList(
createResourceRequest(1, ResourceRequest.ANY, 1),
createResourceRequest(2, ResourceRequest.ANY, 1),
createResourceRequest(3, ResourceRequest.ANY, 1));
ApplicationAttemptId appAttId = ApplicationAttemptId.newInstance(
ApplicationId.newInstance(0L, 1), 1);
allocator.setNodeQueueLoadMonitor(createNodeQueueLoadMonitor(3, 2, 3));
List<Container> containers = allocator.allocateContainers(
EMPTY_BLACKLIST_REQUEST, reqs, appAttId, oppCntxt, 1L, "user");
LOG.info("Containers: {}", containers);
Set<String> allocatedNodes = new HashSet<>();
for (Container c : containers) {
allocatedNodes.add(c.getNodeId().toString());
}
assertTrue(allocatedNodes.contains("h1:1234"));
assertTrue(allocatedNodes.contains("h2:1234"));
assertTrue(allocatedNodes.contains("h3:1234"));
assertEquals(3, containers.size());
}
/**
* Tests allocation of node local Opportunistic container requests.
* @throws Exception
*/
@Test
public void testNodeLocalAllocation() throws Exception {
List<ResourceRequest> reqs =
Arrays.asList(
createResourceRequest(1, ResourceRequest.ANY, 1),
createResourceRequest(2, "/r1", 1),
createResourceRequest(2, "h1", 1),
createResourceRequest(2, ResourceRequest.ANY, 1),
createResourceRequest(3, "/r1", 1),
createResourceRequest(3, "h1", 1),
createResourceRequest(3, ResourceRequest.ANY, 1));
ApplicationAttemptId appAttId = ApplicationAttemptId.newInstance(
ApplicationId.newInstance(0L, 1), 1);
allocator.setNodeQueueLoadMonitor(createNodeQueueLoadMonitor(3, 2, 5));
List<Container> containers = allocator.allocateContainers(
EMPTY_BLACKLIST_REQUEST, reqs, appAttId, oppCntxt, 1L, "user");
LOG.info("Containers: {}", containers);
// all 3 containers should be allocated.
assertEquals(3, containers.size());
// container with allocation id 2 and 3 should be allocated on node h1
for (Container c : containers) {
if (c.getAllocationRequestId() == 2 || c.getAllocationRequestId() == 3) {
assertEquals("h1:1234", c.getNodeId().toString());
}
}
}
/**
* Tests node local allocation of Opportunistic container requests with
* same allocation request id.
* @throws Exception
*/
@Test
public void testNodeLocalAllocationSameSchedulerKey() throws Exception {
List<ResourceRequest> reqs =
Arrays.asList(
createResourceRequest(2, "/r1", 2),
createResourceRequest(2, "h1", 2),
createResourceRequest(2, ResourceRequest.ANY, 2));
ApplicationAttemptId appAttId = ApplicationAttemptId.newInstance(
ApplicationId.newInstance(0L, 1), 1);
allocator.setNodeQueueLoadMonitor(createNodeQueueLoadMonitor(3, 2, 5));
List<Container> containers = allocator.allocateContainers(
EMPTY_BLACKLIST_REQUEST, reqs, appAttId, oppCntxt, 1L, "user");
LOG.info("Containers: {}", containers);
Set<String> allocatedHosts = new HashSet<>();
for (Container c : containers) {
allocatedHosts.add(c.getNodeId().toString());
}
assertEquals(2, containers.size());
assertTrue(allocatedHosts.contains("h1:1234"));
assertFalse(allocatedHosts.contains("h2:1234"));
assertFalse(allocatedHosts.contains("h3:1234"));
}
/**
* Tests rack local allocation of Opportunistic container requests.
* @throws Exception
*/
@Test
public void testSimpleRackLocalAllocation() throws Exception {
List<ResourceRequest> reqs =
Arrays.asList(
createResourceRequest(2, "/r1", 1),
createResourceRequest(2, "h4", 1),
createResourceRequest(2, ResourceRequest.ANY, 1));
ApplicationAttemptId appAttId = ApplicationAttemptId.newInstance(
ApplicationId.newInstance(0L, 1), 1);
NodeQueueLoadMonitor selector = createNodeQueueLoadMonitor(
Arrays.asList("h1", "h2", "h3"), Arrays.asList("/r2", "/r1", "/r3"),
Arrays.asList(2, 2, 2), Arrays.asList(5, 5, 5));
allocator.setNodeQueueLoadMonitor(selector);
List<Container> containers = allocator.allocateContainers(
EMPTY_BLACKLIST_REQUEST, reqs, appAttId, oppCntxt, 1L, "user");
Set<String> allocatedHosts = new HashSet<>();
for (Container c : containers) {
allocatedHosts.add(c.getNodeId().toString());
}
assertTrue(allocatedHosts.contains("h2:1234"));
assertFalse(allocatedHosts.contains("h3:1234"));
assertFalse(allocatedHosts.contains("h4:1234"));
assertEquals(1, containers.size());
}
/**
* Tests that allocation of rack local Opportunistic container requests
* should be spread out.
* @throws Exception
*/
@Test
public void testRoundRobinRackLocalAllocation() throws Exception {
List<ResourceRequest> reqs =
Arrays.asList(
createResourceRequest(1, "/r1", 1),
createResourceRequest(1, "h5", 1),
createResourceRequest(1, ResourceRequest.ANY, 1),
createResourceRequest(2, "/r1", 1),
createResourceRequest(2, "h5", 1),
createResourceRequest(2, ResourceRequest.ANY, 1));
ApplicationAttemptId appAttId = ApplicationAttemptId.newInstance(
ApplicationId.newInstance(0L, 1), 1);
NodeQueueLoadMonitor selector = createNodeQueueLoadMonitor(
Arrays.asList("h1", "h2", "h3", "h4"),
Arrays.asList("/r2", "/r1", "/r3", "/r1"),
Arrays.asList(4, 4, 4, 4), Arrays.asList(5, 5, 5, 5));
allocator.setNodeQueueLoadMonitor(selector);
List<Container> containers = allocator.allocateContainers(
EMPTY_BLACKLIST_REQUEST, reqs, appAttId, oppCntxt, 1L, "user");
Set<String> allocatedHosts = new HashSet<>();
for (Container c : containers) {
allocatedHosts.add(c.getNodeId().toString());
}
LOG.info("Containers: {}", containers);
assertTrue(allocatedHosts.contains("h2:1234"));
assertTrue(allocatedHosts.contains("h4:1234"));
assertFalse(allocatedHosts.contains("h1:1234"));
assertFalse(allocatedHosts.contains("h3:1234"));
assertEquals(2, containers.size());
}
/**
* Tests that allocation of rack local Opportunistic container requests
* with same allocation request id should be spread out.
* @throws Exception
*/
@Test
public void testRoundRobinRackLocalAllocationSameSchedulerKey()
throws Exception {
List<ResourceRequest> reqs =
Arrays.asList(
createResourceRequest(2, "/r1", 2),
createResourceRequest(2, "h5", 2),
createResourceRequest(2, ResourceRequest.ANY, 2));
ApplicationAttemptId appAttId = ApplicationAttemptId.newInstance(
ApplicationId.newInstance(0L, 1), 1);
NodeQueueLoadMonitor selector = createNodeQueueLoadMonitor(
Arrays.asList("h1", "h2", "h3", "h4"),
Arrays.asList("/r2", "/r1", "/r3", "/r1"),
Arrays.asList(4, 4, 4, 4), Arrays.asList(5, 5, 5, 5));
allocator.setNodeQueueLoadMonitor(selector);
List<Container> containers = allocator.allocateContainers(
EMPTY_BLACKLIST_REQUEST, reqs, appAttId, oppCntxt, 1L, "user");
Set<String> allocatedHosts = new HashSet<>();
for (Container c : containers) {
allocatedHosts.add(c.getNodeId().toString());
}
LOG.info("Containers: {}", containers);
assertTrue(allocatedHosts.contains("h2:1234"));
assertTrue(allocatedHosts.contains("h4:1234"));
assertFalse(allocatedHosts.contains("h1:1234"));
assertFalse(allocatedHosts.contains("h3:1234"));
assertEquals(2, containers.size());
}
/**
* Tests off switch allocation of Opportunistic containers.
* @throws Exception
*/
@Test
public void testOffSwitchAllocationWhenNoNodeOrRack() throws Exception {
List<ResourceRequest> reqs =
Arrays.asList(
createResourceRequest(2, "/r3", 2),
createResourceRequest(2, "h6", 2),
createResourceRequest(2, ResourceRequest.ANY, 2));
ApplicationAttemptId appAttId = ApplicationAttemptId.newInstance(
ApplicationId.newInstance(0L, 1), 1);
NodeQueueLoadMonitor selector = createNodeQueueLoadMonitor(
Arrays.asList("h1", "h2", "h3", "h4"),
Arrays.asList("/r2", "/r1", "/r2", "/r1"),
Arrays.asList(4, 4, 4, 4), Arrays.asList(5, 5, 5, 5));
allocator.setNodeQueueLoadMonitor(selector);
List<Container> containers = allocator.allocateContainers(
EMPTY_BLACKLIST_REQUEST, reqs, appAttId, oppCntxt, 1L, "user");
LOG.info("Containers: {}", containers);
assertEquals(2, containers.size());
}
/**
* Tests allocation of rack local Opportunistic containers with same
* scheduler key.
* @throws Exception
*/
@Test
public void testLotsOfContainersRackLocalAllocationSameSchedulerKey()
throws Exception {
List<ResourceRequest> reqs =
Arrays.asList(
createResourceRequest(2, "/r1", 1000),
createResourceRequest(2, "h1", 1000),
createResourceRequest(2, ResourceRequest.ANY, 1000));
ApplicationAttemptId appAttId = ApplicationAttemptId.newInstance(
ApplicationId.newInstance(0L, 1), 1);
NodeQueueLoadMonitor selector = createNodeQueueLoadMonitor(
Arrays.asList("h1", "h2", "h3", "h4"),
Arrays.asList("/r1", "/r1", "/r1", "/r2"),
Arrays.asList(0, 0, 0, 0), Arrays.asList(500, 500, 500, 300));
allocator.setNodeQueueLoadMonitor(selector);
List<Container> containers = allocator.allocateContainers(
EMPTY_BLACKLIST_REQUEST, reqs, appAttId, oppCntxt, 1L, "user");
Map<String, Integer> hostsToNumContainerMap = new HashMap<>();
for (Container c : containers) {
String host = c.getNodeId().toString();
int numContainers = 0;
if (hostsToNumContainerMap.containsKey(host)) {
numContainers = hostsToNumContainerMap.get(host);
}
hostsToNumContainerMap.put(host, numContainers + 1);
}
assertEquals(1000, containers.size());
assertEquals(500, hostsToNumContainerMap.get("h1:1234").intValue());
assertFalse(hostsToNumContainerMap.containsKey("h4:1234"));
}
/**
* Tests scheduling of many rack local Opportunistic container requests.
* @throws Exception
*/
@Test
public void testLotsOfContainersRackLocalAllocation()
throws Exception {
List<ResourceRequest> reqs = new ArrayList<>();
// add 100 container requests.
for (int i = 0; i < 100; i++) {
reqs.add(createResourceRequest(i + 1, ResourceRequest.ANY, 1));
reqs.add(createResourceRequest(i + 1, "h5", 1));
reqs.add(createResourceRequest(i + 1, "/r1", 1));
}
ApplicationAttemptId appAttId = ApplicationAttemptId.newInstance(
ApplicationId.newInstance(0L, 1), 1);
NodeQueueLoadMonitor selector = createNodeQueueLoadMonitor(
Arrays.asList("h1", "h2", "h3", "h4"),
Arrays.asList("/r1", "/r1", "/r1", "/r2"),
Arrays.asList(0, 0, 0, 0), Arrays.asList(500, 500, 500, 300));
allocator.setNodeQueueLoadMonitor(selector);
List<Container> containers = new ArrayList<>();
containers = allocator.allocateContainers(
EMPTY_BLACKLIST_REQUEST, reqs, appAttId, oppCntxt, 1L, "user");
assertEquals(100, containers.size());
}
/**
* Tests maximum number of opportunistic containers that can be allocated in
* AM heartbeat.
* @throws Exception
*/
@Test
public void testMaxAllocationsPerAMHeartbeat() throws Exception {
allocator.setMaxAllocationsPerAMHeartbeat(2);
List<ResourceRequest> reqs = Arrays.asList(
createResourceRequest(2, "/r3", 3),
createResourceRequest(2, "h6", 3),
createResourceRequest(2, ResourceRequest.ANY, 3));
ApplicationAttemptId appAttId = ApplicationAttemptId.newInstance(
ApplicationId.newInstance(0L, 1), 1);
allocator.setNodeQueueLoadMonitor(createNodeQueueLoadMonitor(3, 2, 5));
List<Container> containers = allocator.allocateContainers(
EMPTY_BLACKLIST_REQUEST, reqs, appAttId, oppCntxt, 1L, "user");
LOG.info("Containers: {}", containers);
// Although capacity is present, but only 2 containers should be allocated
// as max allocation per AM heartbeat is set to 2.
assertEquals(2, containers.size());
containers = allocator.allocateContainers(
EMPTY_BLACKLIST_REQUEST, new ArrayList<>(), appAttId,
oppCntxt, 1L, "user");
LOG.info("Containers: {}", containers);
// Remaining 1 container should be allocated.
assertEquals(1, containers.size());
}
/**
* Tests maximum opportunistic container allocation per AM heartbeat for
* allocation requests with different scheduler key.
* @throws Exception
*/
@Test
public void testMaxAllocationsPerAMHeartbeatDifferentSchedKey()
throws Exception {
allocator.setMaxAllocationsPerAMHeartbeat(2);
List<ResourceRequest> reqs =
Arrays.asList(
createResourceRequest(1, ResourceRequest.ANY, 1),
createResourceRequest(2, "h6", 2),
createResourceRequest(3, "/r3", 2));
ApplicationAttemptId appAttId = ApplicationAttemptId.newInstance(
ApplicationId.newInstance(0L, 1), 1);
allocator.setNodeQueueLoadMonitor(createNodeQueueLoadMonitor(3, 2, 5));
List<Container> containers = allocator.allocateContainers(
EMPTY_BLACKLIST_REQUEST, reqs, appAttId, oppCntxt, 1L, "user");
LOG.info("Containers: {}", containers);
// Although capacity is present, but only 2 containers should be allocated
// as max allocation per AM heartbeat is set to 2.
assertEquals(2, containers.size());
containers = allocator.allocateContainers(
EMPTY_BLACKLIST_REQUEST, new ArrayList<>(), appAttId,
oppCntxt, 1L, "user");
LOG.info("Containers: {}", containers);
// 2 more containers should be allocated from pending allocation requests.
assertEquals(2, containers.size());
containers = allocator.allocateContainers(
EMPTY_BLACKLIST_REQUEST, new ArrayList<>(), appAttId,
oppCntxt, 1L, "user");
LOG.info("Containers: {}", containers);
// Remaining 1 container should be allocated.
assertEquals(1, containers.size());
}
/**
* Tests maximum opportunistic container allocation per AM heartbeat when
* limit is set to -1.
* @throws Exception
*/
@Test
public void testMaxAllocationsPerAMHeartbeatWithNoLimit() throws Exception {
allocator.setMaxAllocationsPerAMHeartbeat(-1);
List<ResourceRequest> reqs = new ArrayList<>();
final int numContainers = 20;
for (int i = 0; i < numContainers; i++) {
reqs.add(createResourceRequest(i + 1, "h1", 1));
}
ApplicationAttemptId appAttId = ApplicationAttemptId.newInstance(
ApplicationId.newInstance(0L, 1), 1);
allocator.setNodeQueueLoadMonitor(createNodeQueueLoadMonitor(3, 2, 500));
List<Container> containers = allocator.allocateContainers(
EMPTY_BLACKLIST_REQUEST, reqs, appAttId, oppCntxt, 1L, "user");
// all containers should be allocated in single heartbeat.
assertEquals(numContainers, containers.size());
}
/**
* Tests maximum opportunistic container allocation per AM heartbeat when
* limit is set to higher value.
* @throws Exception
*/
@Test
public void testMaxAllocationsPerAMHeartbeatWithHighLimit()
throws Exception {
allocator.setMaxAllocationsPerAMHeartbeat(100);
final int numContainers = 20;
List<ResourceRequest> reqs = new ArrayList<>();
for (int i = 0; i < numContainers; i++) {
reqs.add(createResourceRequest(i + 1, "h1", 1));
}
ApplicationAttemptId appAttId = ApplicationAttemptId.newInstance(
ApplicationId.newInstance(0L, 1), 1);
allocator.setNodeQueueLoadMonitor(createNodeQueueLoadMonitor(3, 2, 500));
List<Container> containers = allocator.allocateContainers(
EMPTY_BLACKLIST_REQUEST, reqs, appAttId, oppCntxt, 1L, "user");
// all containers should be allocated in single heartbeat.
assertEquals(numContainers, containers.size());
}
/**
* Test opportunistic container allocation latency metrics.
* @throws Exception
*/
@Test
public void testAllocationLatencyMetrics() throws Exception {
oppCntxt = spy(oppCntxt);
OpportunisticSchedulerMetrics metrics =
mock(OpportunisticSchedulerMetrics.class);
when(oppCntxt.getOppSchedulerMetrics()).thenReturn(metrics);
List<ResourceRequest> reqs = Arrays.asList(
createResourceRequest(2, "/r3", 2),
createResourceRequest(2, "h6", 2),
createResourceRequest(2, ResourceRequest.ANY, 2));
ApplicationAttemptId appAttId = ApplicationAttemptId.newInstance(
ApplicationId.newInstance(0L, 1), 1);
allocator.setNodeQueueLoadMonitor(createNodeQueueLoadMonitor(3, 2, 5));
List<Container> containers = allocator.allocateContainers(
EMPTY_BLACKLIST_REQUEST, reqs, appAttId, oppCntxt, 1L, "user");
LOG.info("Containers: {}", containers);
assertEquals(2, containers.size());
// for each allocated container, latency should be added.
verify(metrics, times(2)).addAllocateOLatencyEntry(anyLong());
}
private NodeQueueLoadMonitor createNodeQueueLoadMonitor(int numNodes,
int queueLength, int queueCapacity) {
NodeQueueLoadMonitor selector = new NodeQueueLoadMonitor(
NodeQueueLoadMonitor.LoadComparator.QUEUE_LENGTH);
for (int i = 1; i <= numNodes; ++i) {
RMNode node = createRMNode("h" + i, 1234, queueLength, queueCapacity);
selector.addNode(null, node);
selector.updateNode(node);
}
selector.computeTask.run();
return selector;
}
private NodeQueueLoadMonitor createNodeQueueLoadMonitor(List<String> hosts,
List<String> racks, List<Integer> queueLengths,
List<Integer> queueCapacities) {
NodeQueueLoadMonitor selector = new NodeQueueLoadMonitor(
NodeQueueLoadMonitor.LoadComparator.QUEUE_LENGTH);
for (int i = 0; i < hosts.size(); ++i) {
RMNode node = createRMNode(hosts.get(i), 1234, racks.get(i),
queueLengths.get(i), queueCapacities.get(i));
selector.addNode(null, node);
selector.updateNode(node);
}
selector.computeTask.run();
return selector;
}
private ResourceRequest createResourceRequest(int allocationId,
String location, int numContainers) {
return ResourceRequest.newBuilder()
.allocationRequestId(allocationId)
.priority(PRIORITY_NORMAL)
.resourceName(location)
.capability(CAPABILITY_1GB)
.relaxLocality(true)
.numContainers(numContainers)
.executionType(ExecutionType.OPPORTUNISTIC).build();
}
private RMNode createRMNode(String host, int port, int queueLength,
int queueCapacity) {
return createRMNode(host, port, "default", queueLength,
queueCapacity);
}
private RMNode createRMNode(String host, int port, String rack,
int queueLength, int queueCapacity) {
RMNode node1 = mock(RMNode.class);
NodeId nID1 = new TestNodeQueueLoadMonitor.FakeNodeId(host, port);
when(node1.getHostName()).thenReturn(host);
when(node1.getRackName()).thenReturn(rack);
when(node1.getNodeID()).thenReturn(nID1);
when(node1.getState()).thenReturn(NodeState.RUNNING);
OpportunisticContainersStatus status1 =
mock(OpportunisticContainersStatus.class);
when(status1.getEstimatedQueueWaitTime())
.thenReturn(-1);
when(status1.getWaitQueueLength())
.thenReturn(queueLength);
when(status1.getOpportQueueCapacity())
.thenReturn(queueCapacity);
when(node1.getOpportunisticContainersStatus()).thenReturn(status1);
return node1;
}
}
|
TestCentralizedOpportunisticContainerAllocator
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/checkreturnvalue/CanIgnoreReturnValueSuggesterTest.java
|
{
"start": 25637,
"end": 26352
}
|
class ____ {
public Client setFoo(String... args) {
return setFoo(Arrays.asList(args));
}
public Client setFoos(String... args) {
return this.setFoo(Arrays.asList(args));
}
@CanIgnoreReturnValue
public Client setFoo(List<String> args) {
return this;
}
}
""")
.addOutputLines(
"Client.java",
"""
package com.google.frobber;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import java.util.Arrays;
import java.util.List;
public final
|
Client
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/BaseSessionEventListener.java
|
{
"start": 213,
"end": 439
}
|
class ____ developing
* {@code SessionEventListener} implementations.
*
* @author Steve Ebersole
*
* @deprecated Just implement {@link SessionEventListener} directly.
*/
@Deprecated(since = "7", forRemoval = true)
public
|
for
|
java
|
quarkusio__quarkus
|
integration-tests/main/src/test/java/io/quarkus/it/main/DefaultMethodTestCase.java
|
{
"start": 165,
"end": 246
}
|
class ____ implements DefaultMethodInterface {
@Nested
|
DefaultMethodTestCase
|
java
|
elastic__elasticsearch
|
x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/SecuritySearchOperationListener.java
|
{
"start": 1983,
"end": 5665
}
|
class ____ implements SearchOperationListener {
private final SecurityContext securityContext;
private final AuditTrailService auditTrailService;
public SecuritySearchOperationListener(SecurityContext securityContext, AuditTrailService auditTrail) {
this.securityContext = securityContext;
this.auditTrailService = auditTrail;
}
/**
* Adds the {@link Authentication} to the {@link ScrollContext}
*/
@Override
public void onNewScrollContext(ReaderContext readerContext) {
readerContext.putInContext(AuthenticationField.AUTHENTICATION_KEY, securityContext.getAuthentication());
// store the DLS and FLS permissions of the initial search request that created the scroll
// this is then used to assert the DLS/FLS permission for the scroll search action
securityContext.copyIndicesAccessControlToReaderContext(readerContext);
}
/**
* Checks for the {@link ReaderContext} if it exists and compares the {@link Authentication}
* object from the scroll context with the current authentication context
*/
@Override
public void validateReaderContext(ReaderContext readerContext, TransportRequest request) {
if (readerContext.scrollContext() != null) {
final Authentication originalAuth = readerContext.getFromContext(AuthenticationField.AUTHENTICATION_KEY);
if (false == securityContext.canIAccessResourcesCreatedBy(originalAuth)) {
auditAccessDenied(request);
throw new SearchContextMissingException(readerContext.id());
}
// piggyback on context validation to assert the DLS/FLS permissions on the thread context of the scroll search handler
if (null == INDICES_PERMISSIONS_VALUE.get(securityContext.getThreadContext())) {
// fill in the DLS and FLS permissions for the scroll search action from the scroll context
securityContext.copyIndicesAccessControlFromReaderContext(readerContext);
}
}
}
@Override
public void onPreFetchPhase(SearchContext searchContext) {
ensureIndicesAccessControlForScrollThreadContext(searchContext);
}
@Override
public void onPreQueryPhase(SearchContext searchContext) {
ensureIndicesAccessControlForScrollThreadContext(searchContext);
}
void ensureIndicesAccessControlForScrollThreadContext(SearchContext searchContext) {
if (searchContext.readerContext().scrollContext() != null) {
IndicesAccessControl threadIndicesAccessControl = INDICES_PERMISSIONS_VALUE.get(securityContext.getThreadContext());
if (null == threadIndicesAccessControl) {
throw new ElasticsearchSecurityException(
"Unexpected null indices access control for search context ["
+ searchContext.id()
+ "] for request ["
+ searchContext.request().getDescription()
+ "] with source ["
+ searchContext.source()
+ "]"
);
}
}
}
private void auditAccessDenied(TransportRequest request) {
auditTrailService.get()
.accessDenied(
AuditUtil.extractRequestId(securityContext.getThreadContext()),
securityContext.getAuthentication(),
ORIGINATING_ACTION_VALUE.get(securityContext.getThreadContext()),
request,
AUTHORIZATION_INFO_VALUE.get(securityContext.getThreadContext())
);
}
}
|
SecuritySearchOperationListener
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgUnsignedLongEvaluator.java
|
{
"start": 5320,
"end": 5819
}
|
class ____ implements EvalOperator.ExpressionEvaluator.Factory {
private final EvalOperator.ExpressionEvaluator.Factory field;
public Factory(EvalOperator.ExpressionEvaluator.Factory field) {
this.field = field;
}
@Override
public MvAvgUnsignedLongEvaluator get(DriverContext context) {
return new MvAvgUnsignedLongEvaluator(field.get(context), context);
}
@Override
public String toString() {
return "MvAvg[field=" + field + "]";
}
}
}
|
Factory
|
java
|
elastic__elasticsearch
|
build-tools/src/main/java/org/elasticsearch/gradle/util/FileUtils.java
|
{
"start": 699,
"end": 3507
}
|
class ____ {
/**
* Like {@link java.io.File#mkdirs()}, except throws an informative error if a dir cannot be created.
*
* @param dir The dir to create, including any non existent parent dirs.
*/
public static void mkdirs(File dir) {
dir = dir.getAbsoluteFile();
if (dir.isDirectory()) {
return;
}
if (dir.exists() && dir.isDirectory() == false) {
throw new UncheckedIOException(String.format("Cannot create directory '%s' as it already exists, but is not a directory", dir));
}
List<File> toCreate = new LinkedList<File>();
File parent = dir.getParentFile();
while (parent.exists() == false) {
toCreate.add(parent);
parent = parent.getParentFile();
}
Collections.reverse(toCreate);
for (File parentDirToCreate : toCreate) {
if (parentDirToCreate.isDirectory()) {
continue;
}
File parentDirToCreateParent = parentDirToCreate.getParentFile();
if (parentDirToCreateParent.isDirectory() == false) {
throw new UncheckedIOException(
String.format(
"Cannot create parent directory '%s' when creating directory '%s' as '%s' is not a directory",
parentDirToCreate,
dir,
parentDirToCreateParent
)
);
}
if (parentDirToCreate.mkdir() == false && parentDirToCreate.isDirectory() == false) {
throw new UncheckedIOException(
String.format("Failed to create parent directory '%s' when creating directory '%s'", parentDirToCreate, dir)
);
}
}
if (dir.mkdir() == false && dir.isDirectory() == false) {
throw new UncheckedIOException(String.format("Failed to create directory '%s'", dir));
}
}
public static String read(File file, String encoding) {
try {
return org.apache.commons.io.FileUtils.readFileToString(file, encoding);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
public static List<String> readLines(File file, String encoding) {
try {
return org.apache.commons.io.FileUtils.readLines(file, encoding);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
public static void write(File outputFile, CharSequence content, String encoding) {
try {
org.apache.commons.io.FileUtils.write(outputFile, content, encoding);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
}
|
FileUtils
|
java
|
spring-projects__spring-framework
|
spring-web/src/test/java/org/springframework/http/codec/json/Jackson2JsonDecoderTests.java
|
{
"start": 2541,
"end": 13802
}
|
class ____ extends AbstractDecoderTests<Jackson2JsonDecoder> {
private final Pojo pojo1 = new Pojo("f1", "b1");
private final Pojo pojo2 = new Pojo("f2", "b2");
public Jackson2JsonDecoderTests() {
super(new Jackson2JsonDecoder());
}
@Override
@Test
public void canDecode() {
assertThat(decoder.canDecode(ResolvableType.forClass(Pojo.class), APPLICATION_JSON)).isTrue();
assertThat(decoder.canDecode(ResolvableType.forClass(Pojo.class), APPLICATION_NDJSON)).isTrue();
assertThat(decoder.canDecode(ResolvableType.forClass(Pojo.class), null)).isTrue();
assertThat(decoder.canDecode(ResolvableType.forClass(String.class), null)).isFalse();
assertThat(decoder.canDecode(ResolvableType.forClass(Pojo.class), APPLICATION_XML)).isFalse();
assertThat(this.decoder.canDecode(ResolvableType.forClass(Pojo.class),
new MediaType("application", "json", StandardCharsets.UTF_8))).isTrue();
assertThat(this.decoder.canDecode(ResolvableType.forClass(Pojo.class),
new MediaType("application", "json", StandardCharsets.US_ASCII))).isTrue();
assertThat(this.decoder.canDecode(ResolvableType.forClass(Pojo.class),
new MediaType("application", "json", StandardCharsets.ISO_8859_1))).isTrue();
}
@Test
void canDecodeWithObjectMapperRegistrationForType() {
MediaType halJsonMediaType = MediaType.parseMediaType("application/hal+json");
MediaType halFormsJsonMediaType = MediaType.parseMediaType("application/prs.hal-forms+json");
assertThat(decoder.canDecode(ResolvableType.forClass(Pojo.class), halJsonMediaType)).isTrue();
assertThat(decoder.canDecode(ResolvableType.forClass(Pojo.class), MediaType.APPLICATION_JSON)).isTrue();
assertThat(decoder.canDecode(ResolvableType.forClass(Pojo.class), halFormsJsonMediaType)).isTrue();
assertThat(decoder.canDecode(ResolvableType.forClass(Map.class), MediaType.APPLICATION_JSON)).isTrue();
decoder.registerObjectMappersForType(Pojo.class, map -> {
map.put(halJsonMediaType, new ObjectMapper());
map.put(MediaType.APPLICATION_JSON, new ObjectMapper());
});
assertThat(decoder.canDecode(ResolvableType.forClass(Pojo.class), halJsonMediaType)).isTrue();
assertThat(decoder.canDecode(ResolvableType.forClass(Pojo.class), MediaType.APPLICATION_JSON)).isTrue();
assertThat(decoder.canDecode(ResolvableType.forClass(Pojo.class), halFormsJsonMediaType)).isFalse();
assertThat(decoder.canDecode(ResolvableType.forClass(Map.class), MediaType.APPLICATION_JSON)).isTrue();
}
@Test // SPR-15866
void canDecodeWithProvidedMimeType() {
MimeType textJavascript = new MimeType("text", "javascript", StandardCharsets.UTF_8);
Jackson2JsonDecoder decoder = new Jackson2JsonDecoder(new ObjectMapper(), textJavascript);
assertThat(decoder.getDecodableMimeTypes()).isEqualTo(Collections.singletonList(textJavascript));
}
@Test
void decodableMimeTypesIsImmutable() {
MimeType textJavascript = new MimeType("text", "javascript", StandardCharsets.UTF_8);
Jackson2JsonDecoder decoder = new Jackson2JsonDecoder(new ObjectMapper(), textJavascript);
assertThatExceptionOfType(UnsupportedOperationException.class).isThrownBy(() ->
decoder.getMimeTypes().add(new MimeType("text", "ecmascript")));
}
@Test
void decodableMimeTypesWithObjectMapperRegistration() {
MimeType mimeType1 = MediaType.parseMediaType("application/hal+json");
MimeType mimeType2 = new MimeType("text", "javascript", StandardCharsets.UTF_8);
Jackson2JsonDecoder decoder = new Jackson2JsonDecoder(new ObjectMapper(), mimeType2);
decoder.registerObjectMappersForType(Pojo.class, map -> map.put(mimeType1, new ObjectMapper()));
assertThat(decoder.getDecodableMimeTypes(ResolvableType.forClass(Pojo.class)))
.containsExactly(mimeType1);
}
@Override
@Test
protected void decode() {
Flux<DataBuffer> input = Flux.concat(
stringBuffer("[{\"bar\":\"b1\",\"foo\":\"f1\"},"),
stringBuffer("{\"bar\":\"b2\",\"foo\":\"f2\"}]"));
testDecodeAll(input, Pojo.class, step -> step
.expectNext(pojo1)
.expectNext(pojo2)
.verifyComplete());
}
@Override
@Test
protected void decodeToMono() {
Flux<DataBuffer> input = Flux.concat(
stringBuffer("[{\"bar\":\"b1\",\"foo\":\"f1\"},"),
stringBuffer("{\"bar\":\"b2\",\"foo\":\"f2\"}]"));
ResolvableType elementType = ResolvableType.forClassWithGenerics(List.class, Pojo.class);
testDecodeToMonoAll(input, elementType, step -> step
.expectNext(Arrays.asList(new Pojo("f1", "b1"), new Pojo("f2", "b2")))
.expectComplete()
.verify(), null, null);
}
@Test
void decodeToFluxWithListElements() {
Flux<DataBuffer> input = Flux.concat(
stringBuffer("[{\"bar\":\"b1\",\"foo\":\"f1\"},{\"bar\":\"b2\",\"foo\":\"f2\"}]"),
stringBuffer("[{\"bar\":\"b3\",\"foo\":\"f3\"},{\"bar\":\"b4\",\"foo\":\"f4\"}]"));
ResolvableType elementType = ResolvableType.forClassWithGenerics(List.class, Pojo.class);
testDecodeAll(input, elementType,
step -> step
.expectNext(List.of(pojo1, pojo2))
.expectNext(List.of(new Pojo("f3", "b3"), new Pojo("f4", "b4")))
.verifyComplete(),
MimeTypeUtils.APPLICATION_JSON,
Collections.emptyMap());
}
@Test
void decodeEmptyArrayToFlux() {
Flux<DataBuffer> input = Flux.from(stringBuffer("[]"));
testDecode(input, Pojo.class, StepVerifier.LastStep::verifyComplete);
}
@Test
void fieldLevelJsonView() {
Flux<DataBuffer> input = Flux.from(stringBuffer(
"{\"withView1\" : \"with\", \"withView2\" : \"with\", \"withoutView\" : \"without\"}"));
ResolvableType elementType = ResolvableType.forClass(JacksonViewBean.class);
Map<String, Object> hints = Map.of(
org.springframework.http.codec.json.Jackson2CodecSupport.JSON_VIEW_HINT, MyJacksonView1.class);
testDecode(input, elementType, step -> step
.consumeNextWith(value -> {
JacksonViewBean bean = (JacksonViewBean) value;
assertThat(bean.getWithView1()).isEqualTo("with");
assertThat(bean.getWithView2()).isNull();
assertThat(bean.getWithoutView()).isNull();
}), null, hints);
}
@Test
void classLevelJsonView() {
Flux<DataBuffer> input = Flux.from(stringBuffer(
"{\"withView1\" : \"with\", \"withView2\" : \"with\", \"withoutView\" : \"without\"}"));
ResolvableType elementType = ResolvableType.forClass(JacksonViewBean.class);
Map<String, Object> hints = Map.of(
org.springframework.http.codec.json.Jackson2CodecSupport.JSON_VIEW_HINT, MyJacksonView3.class);
testDecode(input, elementType, step -> step
.consumeNextWith(value -> {
JacksonViewBean bean = (JacksonViewBean) value;
assertThat(bean.getWithoutView()).isEqualTo("without");
assertThat(bean.getWithView1()).isNull();
assertThat(bean.getWithView2()).isNull();
})
.verifyComplete(), null, hints);
}
@Test
void invalidData() {
Flux<DataBuffer> input = Flux.from(stringBuffer("{\"foofoo\": \"foofoo\", \"barbar\": \"barbar\""));
testDecode(input, Pojo.class, step -> step.verifyError(DecodingException.class));
}
@Test // gh-22042
void decodeWithNullLiteral() {
Flux<Object> result = this.decoder.decode(Flux.concat(stringBuffer("null")),
ResolvableType.forType(Pojo.class), MediaType.APPLICATION_JSON, Collections.emptyMap());
StepVerifier.create(result).expectComplete().verify();
}
@Test // gh-27511
void noDefaultConstructor() {
Flux<DataBuffer> input = Flux.from(stringBuffer("{\"property1\":\"foo\",\"property2\":\"bar\"}"));
testDecode(input, BeanWithNoDefaultConstructor.class, step -> step
.consumeNextWith(o -> {
assertThat(o.getProperty1()).isEqualTo("foo");
assertThat(o.getProperty2()).isEqualTo("bar");
})
.verifyComplete()
);
}
@Test
void codecException() {
Flux<DataBuffer> input = Flux.from(stringBuffer("["));
ResolvableType elementType = ResolvableType.forClass(BeanWithNoDefaultConstructor.class);
Flux<Object> flux = new Jackson2JsonDecoder().decode(input, elementType, null, Collections.emptyMap());
StepVerifier.create(flux).verifyError(CodecException.class);
}
@Test // SPR-15975
void customDeserializer() {
Mono<DataBuffer> input = stringBuffer("{\"test\": 1}");
testDecode(input, TestObject.class, step -> step
.consumeNextWith(o -> assertThat(o.getTest()).isEqualTo(1))
.verifyComplete()
);
}
@Test
void bigDecimalFlux() {
Flux<DataBuffer> input = stringBuffer("[ 1E+2 ]").flux();
testDecode(input, BigDecimal.class, step -> step
.expectNext(new BigDecimal("1E+2"))
.verifyComplete()
);
}
@Test
@SuppressWarnings("unchecked")
void decodeNonUtf8Encoding() {
Mono<DataBuffer> input = stringBuffer("{\"foo\":\"bar\"}", StandardCharsets.UTF_16);
ResolvableType type = ResolvableType.forType(new ParameterizedTypeReference<Map<String, String>>() {});
testDecode(input, type, step -> step
.assertNext(value -> assertThat((Map<String, String>) value).containsEntry("foo", "bar"))
.verifyComplete(),
MediaType.parseMediaType("application/json; charset=utf-16"),
null);
}
@Test
@SuppressWarnings("unchecked")
void decodeNonUnicode() {
Flux<DataBuffer> input = Flux.concat(stringBuffer("{\"føø\":\"bår\"}", StandardCharsets.ISO_8859_1));
ResolvableType type = ResolvableType.forType(new ParameterizedTypeReference<Map<String, String>>() {});
testDecode(input, type, step -> step
.assertNext(o -> assertThat((Map<String, String>) o).containsEntry("føø", "bår"))
.verifyComplete(),
MediaType.parseMediaType("application/json; charset=iso-8859-1"),
null);
}
@Test
@SuppressWarnings("unchecked")
void decodeMonoNonUtf8Encoding() {
Mono<DataBuffer> input = stringBuffer("{\"foo\":\"bar\"}", StandardCharsets.UTF_16);
ResolvableType type = ResolvableType.forType(new ParameterizedTypeReference<Map<String, String>>() {});
testDecodeToMono(input, type, step -> step
.assertNext(value -> assertThat((Map<String, String>) value).containsEntry("foo", "bar"))
.verifyComplete(),
MediaType.parseMediaType("application/json; charset=utf-16"),
null);
}
@Test
@SuppressWarnings("unchecked")
void decodeAscii() {
Flux<DataBuffer> input = Flux.concat(stringBuffer("{\"foo\":\"bar\"}", StandardCharsets.US_ASCII));
ResolvableType type = ResolvableType.forType(new ParameterizedTypeReference<Map<String, String>>() {});
testDecode(input, type, step -> step
.assertNext(value -> assertThat((Map<String, String>) value).containsEntry("foo", "bar"))
.verifyComplete(),
MediaType.parseMediaType("application/json; charset=us-ascii"),
null);
}
@Test
void cancelWhileDecoding() {
Flux<DataBuffer> input = Flux.just(
stringBuffer("[{\"bar\":\"b1\",\"foo\":\"f1\"},").block(),
stringBuffer("{\"bar\":\"b2\",\"foo\":\"f2\"}]").block());
testDecodeCancel(input, ResolvableType.forClass(Pojo.class), null, null);
}
private Mono<DataBuffer> stringBuffer(String value) {
return stringBuffer(value, StandardCharsets.UTF_8);
}
private Mono<DataBuffer> stringBuffer(String value, Charset charset) {
return Mono.defer(() -> {
byte[] bytes = value.getBytes(charset);
DataBuffer buffer = this.bufferFactory.allocateBuffer(bytes.length);
buffer.write(bytes);
return Mono.just(buffer);
});
}
@SuppressWarnings("unused")
private static
|
Jackson2JsonDecoderTests
|
java
|
spring-projects__spring-framework
|
spring-test/src/main/java/org/springframework/test/context/aot/GeneratedMapUtils.java
|
{
"start": 1015,
"end": 1145
}
|
class ____ {
private GeneratedMapUtils() {
}
/**
* Load a generated map.
* @param className the name of the
|
GeneratedMapUtils
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/SnapshotLifecyclePolicyItem.java
|
{
"start": 973,
"end": 1206
}
|
class ____ a special wrapper almost exactly like the
* {@link SnapshotLifecyclePolicyMetadata}, however, it elides the headers to ensure that they
* are not leaked to the user since they may contain sensitive information.
*/
public
|
is
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestAddStripedBlocks.java
|
{
"start": 3439,
"end": 21693
}
|
class ____ {
private final ErasureCodingPolicy ecPolicy =
StripedFileTestUtil.getDefaultECPolicy();
private final short dataBlocks = (short) ecPolicy.getNumDataUnits();
private final short parityBlocks = (short) ecPolicy.getNumParityUnits();
private final int cellSize = ecPolicy.getCellSize();
private final short groupSize = (short) (ecPolicy.getNumDataUnits() +
ecPolicy.getNumParityUnits());
private MiniDFSCluster cluster;
private DistributedFileSystem dfs;
@BeforeEach
public void setup() throws IOException {
HdfsConfiguration conf = new HdfsConfiguration();
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(groupSize).build();
cluster.waitActive();
dfs = cluster.getFileSystem();
dfs.enableErasureCodingPolicy(ecPolicy.getName());
dfs.getClient().setErasureCodingPolicy("/", ecPolicy.getName());
}
@AfterEach
public void tearDown() {
if (cluster != null) {
cluster.shutdown();
cluster = null;
}
}
/**
* Check if the scheduled block size on each DN storage is correctly updated
*/
@Test
public void testBlockScheduledUpdate() throws Exception {
final FSNamesystem fsn = cluster.getNamesystem();
final Path foo = new Path("/foo");
try (FSDataOutputStream out = dfs.create(foo, true)) {
DFSStripedOutputStream sout = (DFSStripedOutputStream) out.getWrappedStream();
writeAndFlushStripedOutputStream(sout, DFS_BYTES_PER_CHECKSUM_DEFAULT);
// make sure the scheduled block size has been updated for each DN storage
// in NN
final List<DatanodeDescriptor> dnList = new ArrayList<>();
fsn.getBlockManager().getDatanodeManager().fetchDatanodes(dnList, null, false);
for (DatanodeDescriptor dn : dnList) {
assertEquals(1, dn.getBlocksScheduled());
}
}
// we have completed the file, force the DN to flush IBR
for (DataNode dn : cluster.getDataNodes()) {
DataNodeTestUtils.triggerBlockReport(dn);
}
// check the scheduled block size again
final List<DatanodeDescriptor> dnList = new ArrayList<>();
fsn.getBlockManager().getDatanodeManager().fetchDatanodes(dnList, null, false);
for (DatanodeDescriptor dn : dnList) {
assertEquals(0, dn.getBlocksScheduled());
}
}
/**
* Make sure the IDs of striped blocks do not conflict
*/
@Test
public void testAllocateBlockId() throws Exception {
Path testPath = new Path("/testfile");
// create a file while allocates a new block
DFSTestUtil.writeFile(dfs, testPath, "hello, world!");
LocatedBlocks lb = dfs.getClient().getLocatedBlocks(testPath.toString(), 0);
final long firstId = lb.get(0).getBlock().getBlockId();
// delete the file
dfs.delete(testPath, true);
// allocate a new block, and make sure the new block's id does not conflict
// with the previous one
DFSTestUtil.writeFile(dfs, testPath, "hello again");
lb = dfs.getClient().getLocatedBlocks(testPath.toString(), 0);
final long secondId = lb.get(0).getBlock().getBlockId();
assertEquals(firstId + HdfsServerConstants.MAX_BLOCKS_IN_GROUP, secondId);
}
private static void writeAndFlushStripedOutputStream(
DFSStripedOutputStream out, int chunkSize) throws IOException {
// FSOutputSummer.BUFFER_NUM_CHUNKS == 9
byte[] toWrite = new byte[chunkSize * 9 + 1];
out.write(toWrite);
DFSTestUtil.flushInternal(out);
}
@Test
@Timeout(value = 60)
public void testAddStripedBlock() throws Exception {
final Path file = new Path("/file1");
// create an empty file
FSDataOutputStream out = null;
try {
out = dfs.create(file, (short) 1);
writeAndFlushStripedOutputStream(
(DFSStripedOutputStream) out.getWrappedStream(),
DFS_BYTES_PER_CHECKSUM_DEFAULT);
FSDirectory fsdir = cluster.getNamesystem().getFSDirectory();
INodeFile fileNode = fsdir.getINode4Write(file.toString()).asFile();
BlockInfo[] blocks = fileNode.getBlocks();
assertEquals(1, blocks.length);
assertTrue(blocks[0].isStriped());
checkStripedBlockUC((BlockInfoStriped) fileNode.getLastBlock(), true);
// restart NameNode to check editlog
cluster.restartNameNode(true);
fsdir = cluster.getNamesystem().getFSDirectory();
fileNode = fsdir.getINode4Write(file.toString()).asFile();
blocks = fileNode.getBlocks();
assertEquals(1, blocks.length);
assertTrue(blocks[0].isStriped());
checkStripedBlockUC((BlockInfoStriped) fileNode.getLastBlock(), false);
// save namespace, restart namenode, and check
dfs = cluster.getFileSystem();
dfs.setSafeMode(SafeModeAction.ENTER);
dfs.saveNamespace();
dfs.setSafeMode(SafeModeAction.LEAVE);
cluster.restartNameNode(true);
fsdir = cluster.getNamesystem().getFSDirectory();
fileNode = fsdir.getINode4Write(file.toString()).asFile();
blocks = fileNode.getBlocks();
assertEquals(1, blocks.length);
assertTrue(blocks[0].isStriped());
checkStripedBlockUC((BlockInfoStriped) fileNode.getLastBlock(), false);
} finally {
IOUtils.cleanupWithLogger(null, out);
}
}
private void checkStripedBlockUC(BlockInfoStriped block,
boolean checkReplica) {
assertEquals(0, block.numNodes());
assertFalse(block.isComplete());
assertEquals(dataBlocks, block.getDataBlockNum());
assertEquals(parityBlocks, block.getParityBlockNum());
assertEquals(0,
block.getBlockId() & HdfsServerConstants.BLOCK_GROUP_INDEX_MASK);
assertEquals(HdfsServerConstants.BlockUCState.UNDER_CONSTRUCTION,
block.getBlockUCState());
if (checkReplica) {
assertEquals(groupSize,
block.getUnderConstructionFeature().getNumExpectedLocations());
DatanodeStorageInfo[] storages = block.getUnderConstructionFeature()
.getExpectedStorageLocations();
for (DataNode dn : cluster.getDataNodes()) {
assertTrue(includeDataNode(dn.getDatanodeId(), storages));
}
}
}
private boolean includeDataNode(DatanodeID dn, DatanodeStorageInfo[] storages) {
for (DatanodeStorageInfo storage : storages) {
if (storage.getDatanodeDescriptor().equals(dn)) {
return true;
}
}
return false;
}
@Test
public void testGetLocatedStripedBlocks() throws Exception {
final Path file = new Path("/file1");
// create an empty file
FSDataOutputStream out = null;
try {
out = dfs.create(file, (short) 1);
writeAndFlushStripedOutputStream(
(DFSStripedOutputStream) out.getWrappedStream(),
DFS_BYTES_PER_CHECKSUM_DEFAULT);
FSDirectory fsdir = cluster.getNamesystem().getFSDirectory();
INodeFile fileNode = fsdir.getINode4Write(file.toString()).asFile();
BlockInfoStriped lastBlk = (BlockInfoStriped) fileNode.getLastBlock();
DatanodeInfo[] expectedDNs = DatanodeStorageInfo.toDatanodeInfos(
lastBlk.getUnderConstructionFeature().getExpectedStorageLocations());
byte[] indices = lastBlk.getUnderConstructionFeature().getBlockIndices();
LocatedBlocks blks = dfs.getClient().getLocatedBlocks(file.toString(), 0L);
assertEquals(1, blks.locatedBlockCount());
LocatedBlock lblk = blks.get(0);
assertTrue(lblk instanceof LocatedStripedBlock);
DatanodeInfo[] datanodes = lblk.getLocations();
byte[] blockIndices = ((LocatedStripedBlock) lblk).getBlockIndices();
assertEquals(groupSize, datanodes.length);
assertEquals(groupSize, blockIndices.length);
assertArrayEquals(indices, blockIndices);
assertArrayEquals(expectedDNs, datanodes);
} finally {
IOUtils.cleanupWithLogger(null, out);
}
}
/**
* Test BlockInfoStripedUnderConstruction#addReplicaIfNotPresent in different
* scenarios.
*/
@Test
public void testAddUCReplica() throws Exception {
final Path file = new Path("/file1");
final List<String> storageIDs = new ArrayList<>();
// create an empty file
FSDataOutputStream out = null;
try {
out = dfs.create(file, (short) 1);
// 1. create the UC striped block
FSDirectory fsdir = cluster.getNamesystem().getFSDirectory();
INodeFile fileNode = fsdir.getINode4Write(file.toString()).asFile();
cluster.getNamesystem().getAdditionalBlock(file.toString(),
fileNode.getId(), dfs.getClient().getClientName(), null, null, null, null);
BlockInfo lastBlock = fileNode.getLastBlock();
DatanodeStorageInfo[] locs = lastBlock.getUnderConstructionFeature()
.getExpectedStorageLocations();
byte[] indices = lastBlock.getUnderConstructionFeature().getBlockIndices();
assertEquals(groupSize, locs.length);
assertEquals(groupSize, indices.length);
// 2. mimic incremental block reports and make sure the uc-replica list in
// the BlockInfoUCStriped is correct
int i = 0;
for (DataNode dn : cluster.getDataNodes()) {
final Block block = new Block(lastBlock.getBlockId() + i++,
0, lastBlock.getGenerationStamp());
DatanodeStorage storage = new DatanodeStorage(UUID.randomUUID().toString());
storageIDs.add(storage.getStorageID());
StorageReceivedDeletedBlocks[] reports = DFSTestUtil
.makeReportForReceivedBlock(block, BlockStatus.RECEIVING_BLOCK,
storage);
for (StorageReceivedDeletedBlocks report : reports) {
cluster.getNamesystem().processIncrementalBlockReport(
dn.getDatanodeId(), report);
}
}
// make sure lastBlock is correct and the storages have been updated
locs = lastBlock.getUnderConstructionFeature().getExpectedStorageLocations();
indices = lastBlock.getUnderConstructionFeature().getBlockIndices();
assertEquals(groupSize, locs.length);
assertEquals(groupSize, indices.length);
for (DatanodeStorageInfo newstorage : locs) {
assertTrue(storageIDs.contains(newstorage.getStorageID()));
}
} finally {
IOUtils.cleanupWithLogger(null, out);
}
// 3. restart the namenode. mimic the full block reports and check the
// uc-replica list again
cluster.restartNameNode(true);
final String bpId = cluster.getNamesystem().getBlockPoolId();
INodeFile fileNode = cluster.getNamesystem().getFSDirectory()
.getINode4Write(file.toString()).asFile();
BlockInfo lastBlock = fileNode.getLastBlock();
int i = groupSize - 1;
for (DataNode dn : cluster.getDataNodes()) {
String storageID = storageIDs.get(i);
final Block block = new Block(lastBlock.getBlockId() + i--, 0,
lastBlock.getGenerationStamp());
DatanodeStorage storage = new DatanodeStorage(storageID);
List<ReplicaBeingWritten> blocks = new ArrayList<>();
ReplicaBeingWritten replica = new ReplicaBeingWritten(block, null, null,
null);
blocks.add(replica);
BlockListAsLongs bll = BlockListAsLongs.encode(blocks);
StorageBlockReport[] reports = {new StorageBlockReport(storage,
bll)};
cluster.getNameNodeRpc().blockReport(dn.getDNRegistrationForBP(bpId),
bpId, reports, null);
}
DatanodeStorageInfo[] locs = lastBlock.getUnderConstructionFeature()
.getExpectedStorageLocations();
byte[] indices = lastBlock.getUnderConstructionFeature().getBlockIndices();
assertEquals(groupSize, locs.length);
assertEquals(groupSize, indices.length);
for (i = 0; i < groupSize; i++) {
assertEquals(storageIDs.get(i),
locs[groupSize - 1 - i].getStorageID());
assertEquals(groupSize - i - 1, indices[i]);
}
}
@Test
public void testCheckStripedReplicaCorrupt() throws Exception {
final int numBlocks = 4;
final int numStripes = 4;
final Path filePath = new Path("/corrupt");
final FSNamesystem ns = cluster.getNameNode().getNamesystem();
final BlockManager bm = ns.getBlockManager();
DFSTestUtil.createStripedFile(cluster, filePath, null,
numBlocks, numStripes, false);
INodeFile fileNode = ns.getFSDirectory().getINode(filePath.toString()).
asFile();
assertTrue(fileNode.isStriped());
BlockInfo stored = fileNode.getBlocks()[0];
BlockManagerTestUtil.updateState(ns.getBlockManager());
assertEquals(0, ns.getCorruptReplicaBlocks());
// Now send a block report with correct size
DatanodeStorage storage = new DatanodeStorage(UUID.randomUUID().toString());
final Block reported = new Block(stored);
reported.setNumBytes(numStripes * cellSize);
StorageReceivedDeletedBlocks[] reports = DFSTestUtil
.makeReportForReceivedBlock(reported,
ReceivedDeletedBlockInfo.BlockStatus.RECEIVED_BLOCK, storage);
ns.processIncrementalBlockReport(
cluster.getDataNodes().get(0).getDatanodeId(), reports[0]);
BlockManagerTestUtil.updateState(ns.getBlockManager());
assertEquals(0, ns.getCorruptReplicaBlocks());
// Now send a block report with wrong size
reported.setBlockId(stored.getBlockId() + 1);
reported.setNumBytes(numStripes * cellSize - 1);
reports = DFSTestUtil.makeReportForReceivedBlock(reported,
ReceivedDeletedBlockInfo.BlockStatus.RECEIVED_BLOCK, storage);
ns.processIncrementalBlockReport(
cluster.getDataNodes().get(1).getDatanodeId(), reports[0]);
BlockManagerTestUtil.updateState(ns.getBlockManager());
assertEquals(1, ns.getCorruptReplicaBlocks());
// Now send a parity block report with correct size
reported.setBlockId(stored.getBlockId() + dataBlocks);
reported.setNumBytes(numStripes * cellSize);
reports = DFSTestUtil.makeReportForReceivedBlock(reported,
ReceivedDeletedBlockInfo.BlockStatus.RECEIVED_BLOCK, storage);
ns.processIncrementalBlockReport(
cluster.getDataNodes().get(2).getDatanodeId(), reports[0]);
BlockManagerTestUtil.updateState(ns.getBlockManager());
assertEquals(1, ns.getCorruptReplicaBlocks());
// Now send a parity block report with wrong size
reported.setBlockId(stored.getBlockId() + dataBlocks);
reported.setNumBytes(numStripes * cellSize + 1);
reports = DFSTestUtil.makeReportForReceivedBlock(reported,
ReceivedDeletedBlockInfo.BlockStatus.RECEIVED_BLOCK, storage);
ns.processIncrementalBlockReport(
cluster.getDataNodes().get(3).getDatanodeId(), reports[0]);
BlockManagerTestUtil.updateState(ns.getBlockManager());
// the total number of corrupted block info is still 1
assertEquals(1, ns.getCorruptECBlockGroups());
assertEquals(1, ns.getCorruptReplicaBlocks());
assertEquals(0, ns.getCorruptReplicatedBlocks());
// 2 internal blocks corrupted
assertEquals(2, bm.getCorruptReplicas(stored).size());
// Now change the size of stored block, and test verifying the last
// block size
stored.setNumBytes(stored.getNumBytes() + 10);
reported.setBlockId(stored.getBlockId() + dataBlocks + 2);
reported.setNumBytes(numStripes * cellSize);
reports = DFSTestUtil.makeReportForReceivedBlock(reported,
ReceivedDeletedBlockInfo.BlockStatus.RECEIVED_BLOCK, storage);
ns.processIncrementalBlockReport(
cluster.getDataNodes().get(4).getDatanodeId(), reports[0]);
BlockManagerTestUtil.updateState(ns.getBlockManager());
assertEquals(1, ns.getCorruptReplicaBlocks());
assertEquals(3, bm.getCorruptReplicas(stored).size());
// Now send a parity block report with correct size based on adjusted
// size of stored block
/** Now stored block has {@link numStripes} full stripes + a cell + 10 */
stored.setNumBytes(stored.getNumBytes() + cellSize);
reported.setBlockId(stored.getBlockId());
reported.setNumBytes((numStripes + 1) * cellSize);
reports = DFSTestUtil.makeReportForReceivedBlock(reported,
ReceivedDeletedBlockInfo.BlockStatus.RECEIVED_BLOCK, storage);
ns.processIncrementalBlockReport(
cluster.getDataNodes().get(0).getDatanodeId(), reports[0]);
BlockManagerTestUtil.updateState(ns.getBlockManager());
assertEquals(1, ns.getCorruptReplicaBlocks());
assertEquals(3, bm.getCorruptReplicas(stored).size());
reported.setBlockId(stored.getBlockId() + 1);
reported.setNumBytes(numStripes * cellSize + 10);
reports = DFSTestUtil.makeReportForReceivedBlock(reported,
ReceivedDeletedBlockInfo.BlockStatus.RECEIVED_BLOCK, storage);
ns.processIncrementalBlockReport(
cluster.getDataNodes().get(0).getDatanodeId(), reports[0]);
BlockManagerTestUtil.updateState(ns.getBlockManager());
assertEquals(1, ns.getCorruptReplicaBlocks());
assertEquals(3, bm.getCorruptReplicas(stored).size());
reported.setBlockId(stored.getBlockId() + dataBlocks);
reported.setNumBytes((numStripes + 1) * cellSize);
reports = DFSTestUtil.makeReportForReceivedBlock(reported,
ReceivedDeletedBlockInfo.BlockStatus.RECEIVED_BLOCK, storage);
ns.processIncrementalBlockReport(
cluster.getDataNodes().get(2).getDatanodeId(), reports[0]);
BlockManagerTestUtil.updateState(ns.getBlockManager());
assertEquals(1, ns.getCorruptReplicaBlocks());
assertEquals(3, bm.getCorruptReplicas(stored).size());
}
@Test
public void testStripedFlagInBlockLocation() throws IOException {
Path replicated = new Path("/blockLocation/replicated");
try (FSDataOutputStream out =
dfs.createFile(replicated).replicate().recursive().build()) {
out.write("this is a replicated file".getBytes());
}
BlockLocation[] locations = dfs.getFileBlockLocations(replicated, 0, 100);
assertEquals(1, locations.length,
"There should be exactly one Block present");
assertFalse(locations[0].isStriped(), "The file is Striped");
Path striped = new Path("/blockLocation/striped");
try (FSDataOutputStream out = dfs.createFile(striped).recursive().build()) {
out.write("this is a striped file".getBytes());
}
locations = dfs.getFileBlockLocations(striped, 0, 100);
assertEquals(1, locations.length,
"There should be exactly one Block present");
assertTrue(locations[0].isStriped(), "The file is not Striped");
}
}
|
TestAddStripedBlocks
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/JavaLangClashTest.java
|
{
"start": 1463,
"end": 1750
}
|
class ____ {}
""")
.doTest();
}
@Test
public void positiveTypeParameter() {
testHelper
.addSourceLines(
"java/lang/Foo.java",
"""
package java.lang;
// BUG: Diagnostic contains:
public
|
String
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/ErrorProneJavaCompilerTest.java
|
{
"start": 20445,
"end": 20768
}
|
class ____ {
String s = "old-value";
}
""");
}
@Test
public void patchSingleWithBugPatternCustomization() throws IOException {
JavaFileObject fileObject =
createOnDiskFileObject(
"StringConstantWrapper.java",
"""
|
StringConstantWrapper
|
java
|
spring-projects__spring-framework
|
spring-web/src/main/java/org/springframework/http/server/observation/ServerRequestObservationContext.java
|
{
"start": 1266,
"end": 1790
}
|
class ____ extends RequestReplyReceiverContext<HttpServletRequest, HttpServletResponse> {
private @Nullable String pathPattern;
public ServerRequestObservationContext(HttpServletRequest request, HttpServletResponse response) {
super(HttpServletRequest::getHeader);
setCarrier(request);
setResponse(response);
}
public @Nullable String getPathPattern() {
return this.pathPattern;
}
public void setPathPattern(@Nullable String pathPattern) {
this.pathPattern = pathPattern;
}
}
|
ServerRequestObservationContext
|
java
|
spring-cloud__spring-cloud-gateway
|
spring-cloud-gateway-server-webflux/src/main/java/org/springframework/cloud/gateway/support/tagsprovider/GatewayHttpTagsProvider.java
|
{
"start": 923,
"end": 2014
}
|
class ____ implements GatewayTagsProvider {
@Override
public Tags apply(ServerWebExchange exchange) {
String outcome = "CUSTOM";
String status = "CUSTOM";
String httpStatusCodeStr = "NA";
String httpMethod = exchange.getRequest().getMethod().name();
// a non standard HTTPS status could be used. Let's be defensive here
// it needs to be checked for first, otherwise the delegate response
// who's status DIDN'T change, will be used
Integer statusInt = null;
HttpStatusCode statusCode = exchange.getResponse().getStatusCode();
if (statusCode != null) {
statusInt = statusCode.value();
if (statusInt != null) {
status = String.valueOf(statusInt);
httpStatusCodeStr = status;
HttpStatus resolved = HttpStatus.resolve(statusInt);
if (resolved != null) {
// this is not a CUSTOM status, so use series here.
outcome = resolved.series().name();
status = resolved.name();
}
}
}
return Tags.of("outcome", outcome, "status", status, "httpStatusCode", httpStatusCodeStr, "httpMethod",
httpMethod);
}
}
|
GatewayHttpTagsProvider
|
java
|
micronaut-projects__micronaut-core
|
http-client-core/src/main/java/io/micronaut/http/client/interceptor/HttpClientIntroductionAdvice.java
|
{
"start": 4266,
"end": 4929
}
|
class ____ implements MethodInterceptor<Object, Object> {
private static final Logger LOG = LoggerFactory.getLogger(HttpClientIntroductionAdvice.class);
/**
* The default Accept-Types.
*/
private static final MediaType[] DEFAULT_ACCEPT_TYPES = {MediaType.APPLICATION_JSON_TYPE};
private final List<ReactiveClientResultTransformer> transformers;
private final HttpClientBinderRegistry binderRegistry;
private final JsonMediaTypeCodec jsonMediaTypeCodec;
private final HttpClientRegistry<?> clientFactory;
private final ConversionService conversionService;
/**
* Constructor for advice
|
HttpClientIntroductionAdvice
|
java
|
apache__camel
|
components/camel-netty-http/src/test/java/org/apache/camel/component/netty/http/rest/RestApiMatchUriNettyTest.java
|
{
"start": 1234,
"end": 2654
}
|
class ____ extends BaseNettyTest {
protected final Logger log = LoggerFactory.getLogger(RestApiMatchUriNettyTest.class);
@Test
public void testApi() {
String out = template.requestBody("netty-http:http://localhost:{{port}}/api-doc", null, String.class);
assertNotNull(out);
log.info(out);
assertTrue(out.contains("\"version\" : \"1.2.3\""));
assertTrue(out.contains("\"title\" : \"The hello rest thing\""));
assertTrue(out.contains("\"/hello/hi/{name}\""));
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
restConfiguration().component("netty-http").host("localhost").port(getPort()).apiContextPath("/api-doc")
.endpointProperty("matchOnUriPrefix", "true")
.apiProperty("cors", "true").apiProperty("api.title", "The hello rest thing")
.apiProperty("api.version", "1.2.3");
rest("/hello").consumes("application/json").produces("application/json")
.get("/hi/{name}").description("Saying hi")
.param().name("name").type(RestParamType.path).dataType("string").description("Who is it").endParam()
.to("log:hi");
}
};
}
}
|
RestApiMatchUriNettyTest
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/IncrementInForLoopAndHeaderTest.java
|
{
"start": 1405,
"end": 4177
}
|
class ____ {
public void basicTest() {
// BUG: Diagnostic contains: increment
for (int i = 0; i < 10; i++) {
i++;
}
}
public void decrement() {
// BUG: Diagnostic contains: increment
for (int i = 0; i < 10; i++) {
i--;
}
}
public void preInc() {
// BUG: Diagnostic contains: increment
for (int i = 0; i < 10; i++) {
--i;
}
}
public void multipleStatements() {
// BUG: Diagnostic contains: increment
for (int i = 0; i < 10; i++) {
--i;
int a = 0;
}
}
public void multipleUpdates() {
// BUG: Diagnostic contains: increment
for (int i = 0, a = 1; i < 10; i++, a++) {
a++;
}
}
public void multipleUpdatesOtherVar() {
// BUG: Diagnostic contains: increment
for (int i = 0, a = 1; i < 10; i++, a++) {
i++;
}
}
public void multipleUpdatesBothVars() {
// BUG: Diagnostic contains: increment
for (int i = 0, a = 1; i < 10; i++, a++) {
a++;
i++;
}
}
public void nestedFor() {
for (int i = 0; i < 10; i++) {
// BUG: Diagnostic contains: increment
for (int a = 0; a < 10; a++) {
a--;
}
}
}
public void nestedForBoth() {
// BUG: Diagnostic contains: increment
for (int i = 0; i < 10; i++) {
i++;
// BUG: Diagnostic contains: increment
for (int a = 0; a < 10; a++) {
a--;
}
}
}
public void expressionStatement() {
// BUG: Diagnostic contains: increment
for (int i = 0; i < 10; i++) i++;
}
}\
""")
.doTest();
}
@Test
public void negativeCases() {
compilationTestHelper
.addSourceLines(
"IncrementInForLoopAndHeaderNegativeCases.java",
"""
package com.google.errorprone.bugpatterns.testdata;
import java.util.List;
/** Created by mariasam on 7/20/17. */
public
|
IncrementInForLoopAndHeaderPositiveCases
|
java
|
apache__camel
|
components/camel-ai/camel-langchain4j-agent-api/src/main/java/org/apache/camel/component/langchain4j/agent/api/AgentConfiguration.java
|
{
"start": 7867,
"end": 7950
}
|
class ____.
*
* @param outputGuardrailClasses array of fully qualified
|
names
|
java
|
spring-projects__spring-boot
|
module/spring-boot-rsocket/src/main/java/org/springframework/boot/rsocket/server/RSocketServerFactory.java
|
{
"start": 872,
"end": 1386
}
|
interface ____ {
/**
* Gets a new fully configured but paused {@link RSocketServer} instance. Clients
* should not be able to connect to the returned server until
* {@link RSocketServer#start()} is called (which happens when the
* {@code ApplicationContext} has been fully refreshed).
* @param socketAcceptor the socket acceptor
* @return a fully configured and started {@link RSocketServer}
* @see RSocketServer#stop()
*/
RSocketServer create(SocketAcceptor socketAcceptor);
}
|
RSocketServerFactory
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/LangChain4jChatEndpointBuilderFactory.java
|
{
"start": 21474,
"end": 22629
}
|
class ____ {
/**
* The internal instance of the builder used to access to all the
* methods representing the name of headers.
*/
private static final LangChain4jChatHeaderNameBuilder INSTANCE = new LangChain4jChatHeaderNameBuilder();
/**
* The prompt Template.
*
* The option is a: {@code String} type.
*
* Group: common
*
* @return the name of the header {@code LangChain4jChatPromptTemplate}.
*/
public String langChain4jChatPromptTemplate() {
return "CamelLangChain4jChatPromptTemplate";
}
/**
* Augmented Data for RAG.
*
* The option is a: {@code String} type.
*
* Group: common
*
* @return the name of the header {@code LangChain4jChatAugmentedData}.
*/
public String langChain4jChatAugmentedData() {
return "CamelLangChain4jChatAugmentedData";
}
}
static LangChain4jChatEndpointBuilder endpointBuilder(String componentName, String path) {
|
LangChain4jChatHeaderNameBuilder
|
java
|
apache__camel
|
components/camel-pulsar/src/main/java/org/apache/camel/component/pulsar/PulsarConsumer.java
|
{
"start": 1870,
"end": 4874
}
|
class ____ extends DefaultConsumer implements Suspendable {
private static final Logger LOGGER = LoggerFactory.getLogger(PulsarConsumer.class);
private final PulsarEndpoint pulsarEndpoint;
private final ConsumerCreationStrategyFactory consumerCreationStrategyFactory;
private Queue<Consumer<byte[]>> pulsarConsumers;
private Queue<ExecutorService> executors;
public PulsarConsumer(PulsarEndpoint pulsarEndpoint, Processor processor) {
super(pulsarEndpoint, processor);
this.pulsarEndpoint = pulsarEndpoint;
this.pulsarConsumers = new ConcurrentLinkedQueue<>();
this.consumerCreationStrategyFactory = ConsumerCreationStrategyFactory.create(this);
this.executors = new ConcurrentLinkedQueue<>();
}
@Override
protected void doStart() throws Exception {
pulsarConsumers = stopConsumers(pulsarConsumers);
Collection<Consumer<byte[]>> consumers = createConsumers(pulsarEndpoint, consumerCreationStrategyFactory);
if (!pulsarEndpoint.getPulsarConfiguration().isMessageListener()) {
executors.addAll(subscribeWithThreadPool(consumers, pulsarEndpoint));
}
pulsarConsumers.addAll(consumers);
}
@Override
protected void doStop() throws PulsarClientException {
executors = stopExecutors(pulsarEndpoint.getCamelContext().getExecutorServiceManager(), executors);
pulsarConsumers = stopConsumers(pulsarConsumers);
}
/**
* Pauses the Pulsar consumers.
*
* Once paused, a Pulsar consumer does not request any more messages from the broker. However, it will still receive
* as many messages as it had already requested, which is equal to at most `consumerQueueSize`.
*/
@Override
protected void doSuspend() {
pauseConsumers(pulsarConsumers);
}
@Override
protected void doResume() throws Exception {
resumeConsumers(pulsarConsumers);
}
private Collection<Consumer<byte[]>> createConsumers(
final PulsarEndpoint endpoint, final ConsumerCreationStrategyFactory factory)
throws Exception {
ConsumerCreationStrategy strategy = factory.getStrategy(endpoint.getPulsarConfiguration().getSubscriptionType());
return strategy.create(endpoint);
}
private Collection<ExecutorService> subscribeWithThreadPool(
Collection<Consumer<byte[]>> consumers, PulsarEndpoint endpoint) {
int numThreads = endpoint.getPulsarConfiguration().getNumberOfConsumerThreads();
return consumers.stream().map(consumer -> {
ExecutorService executor = endpoint.getCamelContext().getExecutorServiceManager().newFixedThreadPool(this,
"pulsar-consumer", numThreads);
for (int i = 0; i < numThreads; i++) {
executor.submit(new PulsarConsumerLoop(endpoint, consumer));
}
return executor;
}).collect(Collectors.toList());
}
private
|
PulsarConsumer
|
java
|
netty__netty
|
codec-socks/src/main/java/io/netty/handler/codec/socksx/v4/Socks4CommandRequest.java
|
{
"start": 748,
"end": 1177
}
|
interface ____ extends Socks4Message {
/**
* Returns the type of this request.
*/
Socks4CommandType type();
/**
* Returns the {@code USERID} field of this request.
*/
String userId();
/**
* Returns the {@code DSTIP} field of this request.
*/
String dstAddr();
/**
* Returns the {@code DSTPORT} field of this request.
*/
int dstPort();
}
|
Socks4CommandRequest
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/test/benchmark/encode/ArrayByte1000Encode.java
|
{
"start": 153,
"end": 588
}
|
class ____ extends BenchmarkCase {
private Object object;
public ArrayByte1000Encode(){
super("ArrayByte1000Encode");
byte[] array = new byte[1000];
for (int i = 0; i < array.length; ++i) {
array[i] = (byte) (i % 100);
}
this.object = array;
}
@Override
public void execute(Codec codec) throws Exception {
codec.encode(object);
}
}
|
ArrayByte1000Encode
|
java
|
spring-projects__spring-framework
|
spring-aop/src/test/java/org/springframework/aop/target/LazyCreationTargetSourceTests.java
|
{
"start": 1907,
"end": 2195
}
|
class ____ {
public static int initCount;
public InitCountingBean() {
if (InitCountingBean.class.equals(getClass())) {
// only increment when creating the actual target - not the proxy
initCount++;
}
}
public void doSomething() {
//no-op
}
}
}
|
InitCountingBean
|
java
|
apache__rocketmq
|
tools/src/main/java/org/apache/rocketmq/tools/command/consumer/ConsumerSubCommand.java
|
{
"start": 1629,
"end": 6182
}
|
class ____ implements SubCommand {
public static void main(String[] args) {
System.setProperty(MixAll.NAMESRV_ADDR_PROPERTY, "127.0.0.1:9876");
MQAdminStartup.main(new String[] {new ConsumerSubCommand().commandName(), "-g", "benchmark_consumer"});
}
@Override
public String commandName() {
return "consumer";
}
@Override
public String commandDesc() {
return "Query consumer's connection, status, etc.";
}
@Override
public Options buildCommandlineOptions(Options options) {
Option opt = new Option("g", "consumerGroup", true, "consumer group name");
opt.setRequired(true);
options.addOption(opt);
opt = new Option("s", "jstack", false, "Run jstack command in the consumer progress");
opt.setRequired(false);
options.addOption(opt);
return options;
}
@Override
public void execute(CommandLine commandLine, Options options, RPCHook rpcHook) throws SubCommandException {
DefaultMQAdminExt defaultMQAdminExt = new DefaultMQAdminExt(rpcHook);
defaultMQAdminExt.setInstanceName(Long.toString(System.currentTimeMillis()));
try {
defaultMQAdminExt.start();
String group = commandLine.getOptionValue('g').trim();
ConsumerConnection cc = defaultMQAdminExt.examineConsumerConnectionInfo(group);
boolean jstack = commandLine.hasOption('s');
if (!commandLine.hasOption('i')) {
int i = 1;
long now = System.currentTimeMillis();
final TreeMap<String/* clientId */, ConsumerRunningInfo> criTable =
new TreeMap<>();
for (Connection conn : cc.getConnectionSet()) {
try {
ConsumerRunningInfo consumerRunningInfo =
defaultMQAdminExt.getConsumerRunningInfo(group, conn.getClientId(), jstack);
if (consumerRunningInfo != null) {
criTable.put(conn.getClientId(), consumerRunningInfo);
String filePath = now + "/" + conn.getClientId();
MixAll.string2FileNotSafe(consumerRunningInfo.formatString(), filePath);
System.out.printf("%03d %-40s %-20s %s%n",
i++,
conn.getClientId(),
MQVersion.getVersionDesc(conn.getVersion()),
filePath);
}
} catch (Exception e) {
e.printStackTrace();
}
}
if (!criTable.isEmpty()) {
boolean subSame = ConsumerRunningInfo.analyzeSubscription(criTable);
boolean rebalanceOK = subSame && ConsumerRunningInfo.analyzeRebalance(criTable);
if (subSame) {
System.out.printf("%n%nSame subscription in the same group of consumer");
System.out.printf("%n%nRebalance %s%n", rebalanceOK ? "OK" : "Failed");
Iterator<Entry<String, ConsumerRunningInfo>> it = criTable.entrySet().iterator();
while (it.hasNext()) {
Entry<String, ConsumerRunningInfo> next = it.next();
String result =
ConsumerRunningInfo.analyzeProcessQueue(next.getKey(), next.getValue());
if (result.length() > 0) {
System.out.printf("%s", result);
}
}
} else {
System.out.printf("%n%nWARN: Different subscription in the same group of consumer!!!");
}
}
} else {
String clientId = commandLine.getOptionValue('i').trim();
ConsumerRunningInfo consumerRunningInfo =
defaultMQAdminExt.getConsumerRunningInfo(group, clientId, jstack);
if (consumerRunningInfo != null) {
System.out.printf("%s", consumerRunningInfo.formatString());
}
}
} catch (Exception e) {
throw new SubCommandException(this.getClass().getSimpleName() + " command failed", e);
} finally {
defaultMQAdminExt.shutdown();
}
}
}
|
ConsumerSubCommand
|
java
|
assertj__assertj-core
|
assertj-core/src/main/java/org/assertj/core/error/ShouldBeSame.java
|
{
"start": 806,
"end": 1404
}
|
class ____ extends BasicErrorMessageFactory {
/**
* Creates a new <code>{@link ShouldBeSame}</code>.
* @param actual the actual value in the failed assertion.
* @param expected the expected value in the failed assertion.
* @return the created {@code ErrorMessageFactory}.
*/
public static ErrorMessageFactory shouldBeSame(Object actual, Object expected) {
return new ShouldBeSame(actual, expected);
}
private ShouldBeSame(Object actual, Object expected) {
super("%nExpecting actual:%n %s%nand:%n %s%nto refer to the same object", actual, expected);
}
}
|
ShouldBeSame
|
java
|
spring-projects__spring-framework
|
buildSrc/src/main/java/org/springframework/build/architecture/ArchitecturePlugin.java
|
{
"start": 1153,
"end": 2770
}
|
class ____ implements Plugin<Project> {
@Override
public void apply(Project project) {
project.getPlugins().withType(JavaPlugin.class, (javaPlugin) -> registerTasks(project));
}
private void registerTasks(Project project) {
JavaPluginExtension javaPluginExtension = project.getExtensions().getByType(JavaPluginExtension.class);
List<TaskProvider<ArchitectureCheck>> architectureChecks = new ArrayList<>();
for (SourceSet sourceSet : javaPluginExtension.getSourceSets()) {
if (sourceSet.getName().contains("test")) {
// skip test source sets.
continue;
}
TaskProvider<ArchitectureCheck> checkArchitecture = project.getTasks()
.register(taskName(sourceSet), ArchitectureCheck.class,
(task) -> {
task.setClasses(sourceSet.getOutput().getClassesDirs());
task.getResourcesDirectory().set(sourceSet.getOutput().getResourcesDir());
task.dependsOn(sourceSet.getProcessResourcesTaskName());
task.setDescription("Checks the architecture of the classes of the " + sourceSet.getName()
+ " source set.");
task.setGroup(LifecycleBasePlugin.VERIFICATION_GROUP);
});
architectureChecks.add(checkArchitecture);
}
if (!architectureChecks.isEmpty()) {
TaskProvider<Task> checkTask = project.getTasks().named(LifecycleBasePlugin.CHECK_TASK_NAME);
checkTask.configure((check) -> check.dependsOn(architectureChecks));
}
}
private static String taskName(SourceSet sourceSet) {
return "checkArchitecture"
+ sourceSet.getName().substring(0, 1).toUpperCase()
+ sourceSet.getName().substring(1);
}
}
|
ArchitecturePlugin
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/context/TestExecutionListenersTests.java
|
{
"start": 11983,
"end": 12157
}
|
interface ____ {
}
@TestExecutionListeners(listeners = QuuxTestExecutionListener.class, inheritListeners = false)
@Retention(RetentionPolicy.RUNTIME)
@
|
MetaInheritedListeners
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/component/bean/BodyAnnotationToDisambiguateMethodsTest.java
|
{
"start": 2139,
"end": 2547
}
|
class ____ {
public String body;
public void bar(String body) {
fail("bar() called with: " + body);
}
public void foo(@Body String body) {
this.body = body;
LOG.info("foo() method called on {}", this);
}
public void wrongMethod(String body) {
fail("wrongMethod() called with: {}" + body);
}
}
}
|
MyBean
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/threadsafety/ImmutableCheckerTest.java
|
{
"start": 25161,
"end": 25696
}
|
class ____ {
// BUG: Diagnostic contains: instantiated with mutable type
final X<Object> x = null;
}
""")
.doTest();
}
@Test
public void containerOf_noSuchType() {
compilationHelper
.addSourceLines(
"X.java",
"""
import com.google.errorprone.annotations.Immutable;
@Immutable(containerOf = "Z")
// BUG: Diagnostic contains: could not find type(s) referenced by containerOf: Z
public
|
Test
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequestTests.java
|
{
"start": 1501,
"end": 8413
}
|
class ____ extends AbstractWireSerializingTestCase<RestoreSnapshotRequest> {
private RestoreSnapshotRequest randomState(RestoreSnapshotRequest instance) {
if (randomBoolean()) {
List<String> indices = new ArrayList<>();
int count = randomInt(3) + 1;
for (int i = 0; i < count; ++i) {
indices.add(randomAlphaOfLength(randomInt(3) + 2));
}
instance.indices(indices);
}
if (randomBoolean()) {
List<String> plugins = new ArrayList<>();
int count = randomInt(3) + 1;
for (int i = 0; i < count; ++i) {
plugins.add(randomAlphaOfLength(randomInt(3) + 2));
}
instance.featureStates(plugins);
}
if (randomBoolean()) {
instance.renamePattern(randomUnicodeOfLengthBetween(1, 100));
}
if (randomBoolean()) {
instance.renameReplacement(randomUnicodeOfLengthBetween(1, 100));
}
instance.partial(randomBoolean());
instance.includeAliases(randomBoolean());
instance.quiet(randomBoolean());
if (randomBoolean()) {
Map<String, Object> indexSettings = new HashMap<>();
int count = randomInt(3) + 1;
for (int i = 0; i < count; ++i) {
indexSettings.put(randomAlphaOfLengthBetween(2, 5), randomAlphaOfLengthBetween(2, 5));
}
instance.indexSettings(indexSettings);
}
instance.includeGlobalState(randomBoolean());
if (randomBoolean()) {
instance.indicesOptions(
IndicesOptions.builder()
.concreteTargetOptions(new IndicesOptions.ConcreteTargetOptions(randomBoolean()))
.wildcardOptions(
new IndicesOptions.WildcardOptions(
randomBoolean(),
randomBoolean(),
randomBoolean(),
instance.indicesOptions().ignoreAliases() == false,
randomBoolean()
)
)
.gatekeeperOptions(IndicesOptions.GatekeeperOptions.builder().allowSelectors(false).includeFailureIndices(true).build())
.build()
);
}
instance.waitForCompletion(randomBoolean());
if (randomBoolean()) {
instance.masterNodeTimeout(randomTimeValue());
}
if (randomBoolean()) {
instance.snapshotUuid(randomBoolean() ? null : randomAlphaOfLength(10));
}
return instance;
}
@Override
protected RestoreSnapshotRequest createTestInstance() {
return randomState(new RestoreSnapshotRequest(TEST_REQUEST_TIMEOUT, randomAlphaOfLength(5), randomAlphaOfLength(10)));
}
@Override
protected Writeable.Reader<RestoreSnapshotRequest> instanceReader() {
return RestoreSnapshotRequest::new;
}
@Override
protected RestoreSnapshotRequest mutateInstance(RestoreSnapshotRequest instance) throws IOException {
RestoreSnapshotRequest copy = copyInstance(instance);
// ensure that at least one property is different
copy.repository("copied-" + instance.repository());
return randomState(copy);
}
public void testSource() throws IOException {
RestoreSnapshotRequest original = createTestInstance();
original.snapshotUuid(null); // cannot be set via the REST API
original.quiet(false); // cannot be set via the REST API
XContentBuilder builder = original.toXContent(XContentFactory.jsonBuilder(), new ToXContent.MapParams(Collections.emptyMap()));
Map<String, Object> map;
try (
XContentParser parser = XContentType.JSON.xContent()
.createParser(NamedXContentRegistry.EMPTY, null, BytesReference.bytes(builder).streamInput())
) {
map = parser.mapOrdered();
}
// we will only restore properties from the map that are contained in the request body. All other
// properties are restored from the original (in the actual REST action this is restored from the
// REST path and request parameters).
RestoreSnapshotRequest processed = new RestoreSnapshotRequest(TEST_REQUEST_TIMEOUT, original.repository(), original.snapshot());
processed.masterNodeTimeout(original.masterNodeTimeout());
processed.waitForCompletion(original.waitForCompletion());
processed.source(map);
assertEquals(original, processed);
}
public void testSkipOperatorOnlyWillNotBeSerialised() throws IOException {
RestoreSnapshotRequest original = createTestInstance();
assertFalse(original.skipOperatorOnlyState()); // default is false
if (randomBoolean()) {
original.skipOperatorOnlyState(true);
}
Map<String, Object> map = convertRequestToMap(original);
// It is not serialised as xcontent
assertFalse(map.containsKey("skip_operator_only"));
// Xcontent is not affected by the value of skipOperatorOnlyState
original.skipOperatorOnlyState(original.skipOperatorOnlyState() == false);
assertEquals(map, convertRequestToMap(original));
// Nor does it serialise to streamInput
final BytesStreamOutput streamOutput = new BytesStreamOutput();
original.writeTo(streamOutput);
final RestoreSnapshotRequest deserialized = new RestoreSnapshotRequest(streamOutput.bytes().streamInput());
assertFalse(deserialized.skipOperatorOnlyState());
}
public void testToStringWillIncludeSkipOperatorOnlyState() {
RestoreSnapshotRequest original = createTestInstance();
assertThat(original.toString(), containsString("skipOperatorOnlyState"));
}
public void testRenameReplacementNameTooLong() {
RestoreSnapshotRequest request = createTestInstance();
request.indices("b".repeat(255));
request.renamePattern("b");
request.renameReplacement("1".repeat(randomIntBetween(266, 10_000)));
ActionRequestValidationException validation = request.validate();
assertNotNull(validation);
assertThat(validation.getMessage(), containsString("rename_replacement"));
}
private Map<String, Object> convertRequestToMap(RestoreSnapshotRequest request) throws IOException {
XContentBuilder builder = request.toXContent(XContentFactory.jsonBuilder(), new ToXContent.MapParams(Collections.emptyMap()));
try (
XContentParser parser = XContentType.JSON.xContent()
.createParser(NamedXContentRegistry.EMPTY, null, BytesReference.bytes(builder).streamInput())
) {
return parser.mapOrdered();
}
}
}
|
RestoreSnapshotRequestTests
|
java
|
spring-projects__spring-boot
|
module/spring-boot-r2dbc/src/test/java/org/springframework/boot/r2dbc/autoconfigure/R2dbcInitializationAutoConfigurationTests.java
|
{
"start": 1914,
"end": 5614
}
|
class ____ {
private final ApplicationContextRunner contextRunner = new ApplicationContextRunner()
.withConfiguration(AutoConfigurations.of(R2dbcInitializationAutoConfiguration.class))
.withPropertyValues("spring.r2dbc.generate-unique-name:true");
@Test
void whenNoConnectionFactoryIsAvailableThenAutoConfigurationBacksOff() {
this.contextRunner
.run((context) -> assertThat(context).doesNotHaveBean(AbstractScriptDatabaseInitializer.class));
}
@Test
void whenConnectionFactoryIsAvailableThenR2dbcInitializerIsAutoConfigured() {
this.contextRunner.withConfiguration(AutoConfigurations.of(R2dbcAutoConfiguration.class))
.run((context) -> assertThat(context).hasSingleBean(R2dbcScriptDatabaseInitializer.class));
}
@Test
void whenConnectionFactoryIsAvailableAndModeIsNeverThenInitializerIsNotAutoConfigured() {
this.contextRunner.withConfiguration(AutoConfigurations.of(R2dbcAutoConfiguration.class))
.withPropertyValues("spring.sql.init.mode:never")
.run((context) -> assertThat(context).doesNotHaveBean(AbstractScriptDatabaseInitializer.class));
}
@Test
void whenAnSqlInitializerIsDefinedThenInitializerIsNotAutoConfigured() {
this.contextRunner.withConfiguration(AutoConfigurations.of(R2dbcAutoConfiguration.class))
.withUserConfiguration(SqlDatabaseInitializerConfiguration.class)
.run((context) -> assertThat(context).hasSingleBean(AbstractScriptDatabaseInitializer.class)
.hasBean("customInitializer"));
}
@Test
void whenAnInitializerIsDefinedThenSqlInitializerIsStillAutoConfigured() {
this.contextRunner.withConfiguration(AutoConfigurations.of(R2dbcAutoConfiguration.class))
.withUserConfiguration(DatabaseInitializerConfiguration.class)
.run((context) -> assertThat(context).hasSingleBean(ApplicationScriptDatabaseInitializer.class)
.hasBean("customInitializer"));
}
@Test
void whenBeanIsAnnotatedAsDependingOnDatabaseInitializationThenItDependsOnR2dbcScriptDatabaseInitializer() {
this.contextRunner.withConfiguration(AutoConfigurations.of(R2dbcAutoConfiguration.class))
.withUserConfiguration(DependsOnInitializedDatabaseConfiguration.class)
.run((context) -> {
ConfigurableListableBeanFactory beanFactory = context.getBeanFactory();
BeanDefinition beanDefinition = beanFactory.getBeanDefinition(
"r2dbcInitializationAutoConfigurationTests.DependsOnInitializedDatabaseConfiguration");
assertThat(beanDefinition.getDependsOn()).containsExactlyInAnyOrder("r2dbcScriptDatabaseInitializer");
});
}
@Test
void whenBeanIsAnnotatedAsDependingOnDatabaseInitializationThenItDependsOnDataSourceScriptDatabaseInitializer() {
this.contextRunner.withConfiguration(AutoConfigurations.of(R2dbcAutoConfiguration.class))
.withUserConfiguration(DependsOnInitializedDatabaseConfiguration.class)
.run((context) -> {
ConfigurableListableBeanFactory beanFactory = context.getBeanFactory();
BeanDefinition beanDefinition = beanFactory.getBeanDefinition(
"r2dbcInitializationAutoConfigurationTests.DependsOnInitializedDatabaseConfiguration");
assertThat(beanDefinition.getDependsOn()).containsExactlyInAnyOrder("r2dbcScriptDatabaseInitializer");
});
}
@Test
void whenAConnectionFactoryIsAvailableAndSpringR2dbcIsNotThenAutoConfigurationBacksOff() {
this.contextRunner.withConfiguration(AutoConfigurations.of(R2dbcAutoConfiguration.class))
.withClassLoader(new FilteredClassLoader(DatabasePopulator.class))
.run((context) -> {
assertThat(context).hasSingleBean(ConnectionFactory.class);
assertThat(context).doesNotHaveBean(AbstractScriptDatabaseInitializer.class);
});
}
@Configuration(proxyBeanMethods = false)
static
|
R2dbcInitializationAutoConfigurationTests
|
java
|
apache__maven
|
its/core-it-suite/src/test/java/org/apache/maven/it/MavenITmng3558PropertyEscapingTest.java
|
{
"start": 1253,
"end": 2622
}
|
class ____ extends AbstractMavenIntegrationTestCase {
@Test
public void testPropertyEscaping() throws Exception {
File testDir = extractResources("/mng-3558-property-escaping");
Verifier verifier = newVerifier(testDir.getAbsolutePath());
verifier.setAutoclean(false);
verifier.deleteDirectory("target");
verifier.addCliArgument("validate");
verifier.execute();
verifier.verifyErrorFreeLog();
// Verify model properties
Properties modelProps = verifier.loadProperties("target/property-values.properties");
assertEquals("${test.value}", modelProps.getProperty("project.properties.escaped.property"));
assertEquals("prefix-${test.value}-suffix", modelProps.getProperty("project.properties.escaped.with.context"));
assertEquals("interpolated-value", modelProps.getProperty("project.properties.normal.property"));
// Verify plugin configuration
Properties configProps = verifier.loadProperties("target/config-values.properties");
assertEquals("${test.value}", configProps.getProperty("mapParam.param"));
assertEquals("prefix-${test.value}-suffix", configProps.getProperty("mapParam.paramWithContext"));
assertEquals("interpolated-value", configProps.getProperty("mapParam.normalParam"));
}
}
|
MavenITmng3558PropertyEscapingTest
|
java
|
quarkusio__quarkus
|
extensions/vertx/runtime/src/main/java/io/quarkus/vertx/runtime/jackson/ByteArrayDeserializer.java
|
{
"start": 971,
"end": 1439
}
|
class ____ extends JsonDeserializer<byte[]> {
@Override
public byte[] deserialize(JsonParser p, DeserializationContext ctxt) throws IOException, JsonProcessingException {
String text = p.getText();
try {
return BASE64_DECODER.decode(text);
} catch (IllegalArgumentException e) {
throw new InvalidFormatException(p, "Expected a base64 encoded byte array", text, byte[].class);
}
}
}
|
ByteArrayDeserializer
|
java
|
netty__netty
|
transport-native-kqueue/src/test/java/io/netty/channel/kqueue/KQueueETSocketHalfClosedTest.java
|
{
"start": 905,
"end": 1163
}
|
class ____ extends SocketHalfClosedTest {
@Override
protected List<TestsuitePermutation.BootstrapComboFactory<ServerBootstrap, Bootstrap>> newFactories() {
return KQueueSocketTestPermutation.INSTANCE.socket();
}
}
|
KQueueETSocketHalfClosedTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/query/joinfetch/Comment.java
|
{
"start": 208,
"end": 929
}
|
class ____ {
private String text;
private Item item;
private Calendar timestamp;
private Long id;
public Item getItem() {
return item;
}
public void setItem(Item item) {
this.item = item;
}
public Calendar getTimestamp() {
return timestamp;
}
public void setTimestamp(Calendar timestamp) {
this.timestamp = timestamp;
}
Comment() {}
public Comment(Item item, String comment) {
this.text = comment;
this.item = item;
item.getComments().add(this);
this.timestamp = Calendar.getInstance();
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getText() {
return text;
}
public void setText(String text) {
this.text = text;
}
}
|
Comment
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2InputStream.java
|
{
"start": 3128,
"end": 3173
}
|
class ____ not threadsafe.
* </p>
*/
public
|
are
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.