language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
elastic__elasticsearch
|
test/framework/src/main/java/org/elasticsearch/search/DummyQueryBuilder.java
|
{
"start": 1022,
"end": 2325
}
|
class ____ extends AbstractQueryBuilder<DummyQueryBuilder> {
public static final String NAME = "dummy";
public DummyQueryBuilder() {}
public DummyQueryBuilder(StreamInput in) throws IOException {
super(in);
}
@Override
protected void doWriteTo(StreamOutput out) throws IOException {
// only the superclass has state
}
@Override
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(NAME).endObject();
}
public static DummyQueryBuilder fromXContent(XContentParser parser) throws IOException {
XContentParser.Token token = parser.nextToken();
assert token == XContentParser.Token.END_OBJECT;
return new DummyQueryBuilder();
}
@Override
protected Query doToQuery(SearchExecutionContext context) throws IOException {
return new DummyQuery();
}
@Override
protected int doHashCode() {
return 0;
}
@Override
protected boolean doEquals(DummyQueryBuilder other) {
return true;
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
public TransportVersion getMinimalSupportedVersion() {
return TransportVersion.zero();
}
}
|
DummyQueryBuilder
|
java
|
assertj__assertj-core
|
assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/urls/Urls_assertHasAuthority_Test.java
|
{
"start": 1044,
"end": 3021
}
|
class ____ extends UrlsBaseTest {
@Test
void should_fail_if_actual_is_null() {
// GIVEN
URL url = null;
String expectedAuthority = "http://www.helloworld.org";
// WHEN
var assertionError = expectAssertionError(() -> urls.assertHasAuthority(info, url, expectedAuthority));
// THEN
then(assertionError).hasMessage(actualIsNull());
}
@Test
void should_pass_if_actual_url_has_the_expected_authority() throws MalformedURLException {
// GIVEN
URL url = new URL("http://www.helloworld.org:8080");
String expectedAuthority = "www.helloworld.org:8080";
// WHEN/THEN
urls.assertHasAuthority(info, url, expectedAuthority);
}
@Test
void should_pass_if_actual_url_with_path_has_the_expected_authority() throws MalformedURLException {
// GIVEN
URL url = new URL("http://www.helloworld.org:8080/pages");
String expectedAuthority = "www.helloworld.org:8080";
// WHEN/THEN
urls.assertHasAuthority(info, url, expectedAuthority);
}
@Test
void should_fail_if_actual_authority_is_not_the_expected_one_because_ports_differ() throws MalformedURLException {
// GIVEN
URL url = new URL("http://example.com:8080/pages/");
String expectedAuthority = "example.com:8888";
// WHEN
var assertionError = expectAssertionError(() -> urls.assertHasAuthority(info, url, expectedAuthority));
// THEN
then(assertionError).hasMessage(shouldHaveAuthority(url, expectedAuthority).create());
}
@Test
void should_fail_if_actual_authority_is_not_the_expected_one_because_hosts_differ() throws MalformedURLException {
// GIVEN
URL url = new URL("http://example.com:8080/pages/");
String expectedAuthority = "example.org:8080";
// WHEN
var assertionError = expectAssertionError(() -> urls.assertHasAuthority(info, url, expectedAuthority));
// THEN
then(assertionError).hasMessage(shouldHaveAuthority(url, expectedAuthority).create());
}
}
|
Urls_assertHasAuthority_Test
|
java
|
quarkusio__quarkus
|
extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/config/build/MetricsBuildConfig.java
|
{
"start": 249,
"end": 904
}
|
interface ____ {
/**
* Enable metrics with OpenTelemetry.
* <p>
* This property is not available in the Open Telemetry SDK. It's Quarkus specific.
* <p>
* Support for OpenTelemetry Metrics will be enabled if this value is true.
* The OpenTelemetry SDK ( {@link io.quarkus.opentelemetry.runtime.config.build.OTelBuildConfig#enabled()} )
* is enabled by default and if disabled, OpenTelemetry Metrics will also be disabled.
*/
@WithDefault("false")
Optional<Boolean> enabled();
/**
* The Metrics exporter to use.
*/
@WithDefault(CDI_VALUE)
List<String> exporter();
}
|
MetricsBuildConfig
|
java
|
jhy__jsoup
|
src/main/java/org/jsoup/select/Evaluator.java
|
{
"start": 20781,
"end": 21446
}
|
class ____ extends Evaluator {
@Override
public boolean matches(Element root, Element element) {
final Element p = element.parent();
if (p==null || p instanceof Document) return false;
int pos = 0;
Element next = p.firstElementChild();
while (next != null) {
if (next.normalName().equals(element.normalName()))
pos++;
if (pos > 1)
break;
next = next.nextElementSibling();
}
return pos == 1;
}
@Override
public String toString() {
return ":only-of-type";
}
}
public static final
|
IsOnlyOfType
|
java
|
apache__camel
|
catalog/camel-route-parser/src/main/java/org/apache/camel/parser/helper/CamelJavaTreeParserHelper.java
|
{
"start": 3109,
"end": 18282
}
|
class ____ {
private final CamelCatalog camelCatalog = new DefaultCamelCatalog(true);
public List<CamelNodeDetails> parseCamelRouteTree(
JavaClassSource clazz, String fullyQualifiedFileName,
MethodSource<JavaClassSource> configureMethod) {
// find any from which is the start of the route
CamelNodeDetailsFactory nodeFactory = CamelNodeDetailsFactory.newInstance();
CamelNodeDetails route = nodeFactory.newNode(null, "route");
if (configureMethod != null) {
MethodDeclaration md = (MethodDeclaration) configureMethod.getInternal();
Block block = md.getBody();
if (block != null) {
for (Object statement : md.getBody().statements()) {
// must be a method call expression
if (statement instanceof ExpressionStatement es) {
Expression exp = es.getExpression();
boolean valid = isFromCamelRoute(exp);
if (valid) {
parseExpression(nodeFactory, fullyQualifiedFileName, clazz, configureMethod, block, exp, route);
}
}
}
}
}
List<CamelNodeDetails> answer = new ArrayList<>();
if (route.getOutputs() == null || route.getOutputs().isEmpty()) {
// okay no routes found
return answer;
}
// now parse the route node and build the correct model/tree structure of the EIPs
// re-create factory as we rebuild the tree
nodeFactory = CamelNodeDetailsFactory.newInstance();
CamelNodeDetails parent = route.getOutputs().get(0);
for (int i = 0; i < route.getOutputs().size(); i++) {
CamelNodeDetails node = route.getOutputs().get(i);
String name = node.getName();
if ("from".equals(name)) {
CamelNodeDetails from = nodeFactory.copyNode(null, "from", node);
from.setFileName(fullyQualifiedFileName);
answer.add(from);
parent = from;
} else if ("routeId".equals(name)) {
// should be set on the parent
parent.setRouteId(node.getRouteId());
} else if ("end".equals(name) || "endParent".equals(name) || "endRest".equals(name)
|| "endDoTry".equals(name)) {
// parent should be grand parent
if (parent.getParent() != null) {
parent = parent.getParent();
}
} else if ("endChoice".equals(name)) {
// we are in a choice block so parent should be the first choice up the parent tree
while (!"from".equals(parent.getName()) && !"choice".equals(parent.getName())) {
if (parent.getParent() != null) {
parent = parent.getParent();
} else {
break;
}
}
} else if ("choice".equals(name)) {
// special for some EIPs
CamelNodeDetails output = nodeFactory.copyNode(parent, name, node);
parent.addOutput(output);
parent = output;
} else if ("when".equals(name) || "otherwise".equals(name)) {
// we are in a choice block so parent should be the first choice up the parent tree
while (!"from".equals(parent.getName()) && !"choice".equals(parent.getName())) {
if (parent.getParent() != null) {
parent = parent.getParent();
} else {
break;
}
}
} else {
boolean hasOutput = hasOutput(name);
if (hasOutput) {
// has output so add as new child node
CamelNodeDetails output = nodeFactory.copyNode(parent, name, node);
parent.addOutput(output);
parent = output;
} else {
// add straight to itself
CamelNodeDetails output = nodeFactory.copyNode(parent, name, node);
parent.addOutput(output);
}
}
}
return answer;
}
private boolean isFromCamelRoute(Expression exp) {
String rootMethodName = null;
// find out if this is from a Camel route (eg from, route etc.)
Expression sub = exp;
while (sub instanceof MethodInvocation) {
sub = ((MethodInvocation) sub).getExpression();
if (sub instanceof MethodInvocation) {
Expression parent = ((MethodInvocation) sub).getExpression();
if (parent == null) {
break;
}
}
}
if (sub instanceof MethodInvocation methodInvocation) {
rootMethodName = methodInvocation.getName().getIdentifier();
} else if (sub instanceof SimpleName simpleName) {
rootMethodName = simpleName.getIdentifier();
} else if (sub == null && exp instanceof MethodInvocation methodInvocation) {
rootMethodName = methodInvocation.getName().getIdentifier();
}
// a route starts either via from or route
return "from".equals(rootMethodName) || "route".equals(rootMethodName);
}
private boolean hasOutput(String name) {
String json = camelCatalog.modelJSonSchema(name);
return JsonMapper.generateEipModel(json).isOutput();
}
private boolean hasInput(String name) {
String json = camelCatalog.modelJSonSchema(name);
return JsonMapper.generateEipModel(json).isInput();
}
private static CamelNodeDetails grandParent(CamelNodeDetails node, String parentName) {
if (node == null) {
return null;
}
if (parentName.equals(node.getName())) {
return node;
} else {
return grandParent(node.getParent(), parentName);
}
}
private void parseExpression(
CamelNodeDetailsFactory nodeFactory, String fullyQualifiedFileName,
JavaClassSource clazz, MethodSource<JavaClassSource> configureMethod, Block block,
Expression exp, CamelNodeDetails node) {
if (exp == null) {
return;
}
if (exp instanceof MethodInvocation mi) {
node = doParseCamelModels(nodeFactory, fullyQualifiedFileName, clazz, configureMethod, block, mi, node);
// if the method was called on another method, then recursive
exp = mi.getExpression();
parseExpression(nodeFactory, fullyQualifiedFileName, clazz, configureMethod, block, exp, node);
}
}
private CamelNodeDetails doParseCamelModels(
CamelNodeDetailsFactory nodeFactory, String fullyQualifiedFileName,
JavaClassSource clazz, MethodSource<JavaClassSource> configureMethod, Block block,
MethodInvocation mi, CamelNodeDetails node) {
String name = mi.getName().getIdentifier();
// special for Java DSL having some endXXX
boolean isEnd = "end".equals(name) || "endChoice".equals(name) || "endDoTry".equals(name)
|| "endParent".equals(name) || "endRest".equals(name);
boolean isRoute = "route".equals(name) || "from".equals(name) || "routeId".equals(name);
// must be an eip model that has either input or output as we only want to track processors (also accept from)
boolean isEip = camelCatalog.findModelNames().contains(name) && (hasInput(name) || hasOutput(name));
// only include if its a known Camel model (dont include languages)
if (isEnd || isRoute || isEip) {
CamelNodeDetails newNode = nodeFactory.newNode(node, name);
// include source code details
int pos = mi.getName().getStartPosition();
int line = findLineNumber(fullyQualifiedFileName, pos);
if (line > -1) {
newNode.setLineNumber(Integer.toString(line));
}
pos = mi.getName().getStartPosition() + mi.getName().getLength();
line = findLineNumber(fullyQualifiedFileName, pos);
if (line > -1) {
newNode.setLineNumberEnd(Integer.toString(line));
}
newNode.setFileName(fullyQualifiedFileName);
newNode.setClassName(clazz.getQualifiedName());
newNode.setMethodName(configureMethod.getName());
if ("routeId".equals(name)) {
// grab the route id
List<?> args = mi.arguments();
if (args != null && !args.isEmpty()) {
// the first argument has the route id
Expression exp = (Expression) args.get(0);
String routeId = getLiteralValue(clazz, block, exp);
if (routeId != null) {
newNode.setRouteId(routeId);
}
}
}
node.addPreliminaryOutput(newNode);
return node;
}
return node;
}
/**
* @deprecated currently not in use
*/
@Deprecated
public static String getLiteralValue(JavaClassSource clazz, Block block, Expression expression) {
// unwrap parenthesis
if (expression instanceof ParenthesizedExpression) {
expression = ((ParenthesizedExpression) expression).getExpression();
}
if (expression instanceof StringLiteral) {
return ((StringLiteral) expression).getLiteralValue();
} else if (expression instanceof BooleanLiteral) {
return String.valueOf(((BooleanLiteral) expression).booleanValue());
} else if (expression instanceof NumberLiteral) {
return ((NumberLiteral) expression).getToken();
} else if (expression instanceof TextBlock textBlock) {
return textBlock.getLiteralValue();
}
// if it's a method invocation then add a dummy value assuming the method invocation will return a valid response
if (expression instanceof MethodInvocation) {
String name = ((MethodInvocation) expression).getName().getIdentifier();
return "{{" + name + "}}";
}
// if it's a qualified name (usually a constant field in another class)
// then add a dummy value as we cannot find the field value in other classes and maybe even outside the
// source code we have access to
if (expression instanceof QualifiedName qn) {
String name = qn.getFullyQualifiedName();
return "{{" + name + "}}";
}
if (expression instanceof SimpleName) {
FieldSource<JavaClassSource> field = ParserCommon.getField(clazz, block, (SimpleName) expression);
if (field != null) {
// is the field annotated with a Camel endpoint
if (field.getAnnotations() != null) {
for (Annotation<JavaClassSource> ann : field.getAnnotations()) {
boolean valid = "org.apache.camel.EndpointInject".equals(ann.getQualifiedName())
|| "org.apache.camel.cdi.Uri".equals(ann.getQualifiedName());
if (valid) {
Expression exp = (Expression) ann.getInternal();
if (exp instanceof SingleMemberAnnotation) {
exp = ((SingleMemberAnnotation) exp).getValue();
} else if (exp instanceof NormalAnnotation) {
List<?> values = ((NormalAnnotation) exp).values();
for (Object value : values) {
MemberValuePair pair = (MemberValuePair) value;
if ("uri".equals(pair.getName().toString())) {
exp = pair.getValue();
break;
}
}
}
if (exp != null) {
return getLiteralValue(clazz, block, exp);
}
}
}
}
// is the field an org.apache.camel.Endpoint type?
return endpointTypeCheck(clazz, block, field);
} else {
// we could not find the field in this class/method, so its maybe from some other super class, so insert a dummy value
final String fieldName = ((SimpleName) expression).getIdentifier();
return "{{" + fieldName + "}}";
}
} else if (expression instanceof InfixExpression ie) {
return getValueFromExpression(clazz, block, ie);
}
return null;
}
private static String getValueFromExpression(JavaClassSource clazz, Block block, InfixExpression ie) {
String answer = null;
// is it a string that is concat together?
if (InfixExpression.Operator.PLUS.equals(ie.getOperator())) {
String val1 = getLiteralValue(clazz, block, ie.getLeftOperand());
String val2 = getLiteralValue(clazz, block, ie.getRightOperand());
// if numeric then we plus the values, otherwise we string concat
boolean numeric = ParserCommon.isNumericOperator(clazz, block, ie.getLeftOperand())
&& ParserCommon.isNumericOperator(clazz, block, ie.getRightOperand());
if (numeric) {
long num1 = val1 != null ? Long.parseLong(val1) : 0;
long num2 = val2 != null ? Long.parseLong(val2) : 0;
answer = Long.toString(num1 + num2);
} else {
answer = (val1 != null ? val1 : "") + (val2 != null ? val2 : "");
}
if (!answer.isEmpty()) {
// include extended when we concat on 2 or more lines
List<?> extended = ie.extendedOperands();
if (extended != null) {
StringBuilder answerBuilder = new StringBuilder(answer);
for (Object ext : extended) {
String val3 = getLiteralValue(clazz, block, (Expression) ext);
if (numeric) {
long num3 = val3 != null ? Long.parseLong(val3) : 0;
long num = Long.parseLong(answerBuilder.toString());
answerBuilder = new StringBuilder(Long.toString(num + num3));
} else {
answerBuilder.append(val3 != null ? val3 : "");
}
}
answer = answerBuilder.toString();
}
}
}
return answer;
}
}
|
CamelJavaTreeParserHelper
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/shell/TestHdfsTextCommand.java
|
{
"start": 1557,
"end": 1658
}
|
class ____ the logic for displaying the binary formats supported
* by the Text command.
*/
public
|
tests
|
java
|
apache__camel
|
components/camel-thrift/src/test/java/org/apache/camel/component/thrift/generated/Calculator.java
|
{
"start": 159468,
"end": 160535
}
|
class ____ extends org.apache.thrift.scheme.TupleScheme<zip_args> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, zip_args struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, zip_args struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
}
}
private static <S extends org.apache.thrift.scheme.IScheme> S scheme(org.apache.thrift.protocol.TProtocol proto) {
return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme())
? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme();
}
}
@SuppressWarnings({ "cast", "rawtypes", "serial", "unchecked", "unused" })
public static
|
zip_argsTupleScheme
|
java
|
apache__logging-log4j2
|
log4j-core/src/main/java/org/apache/logging/log4j/core/util/internal/HttpInputStreamUtil.java
|
{
"start": 7604,
"end": 8338
}
|
class ____ {
private byte @Nullable [] bytes = null;
private Status status;
public Result() {
this(Status.ERROR);
}
public Result(final Status status) {
this.status = status;
}
/**
* Returns the data if the status is {@link Status#SUCCESS}.
* <p>
* In any other case the result is {@code null}.
* </p>
* @return The contents of the HTTP response or null if empty.
*/
public @Nullable InputStream getInputStream() {
return bytes != null ? new ByteArrayInputStream(bytes) : null;
}
public Status getStatus() {
return status;
}
}
}
|
Result
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest-client/deployment/src/test/java/io/quarkus/rest/client/reactive/encoded/ClientWithQueryParamAndEncodedTest.java
|
{
"start": 3645,
"end": 3787
}
|
interface ____ {
@GET
String call(@QueryParam("subQuery") String subQuery);
}
@Encoded
public
|
SubClientWithoutEncoded
|
java
|
apache__rocketmq
|
client/src/test/java/org/apache/rocketmq/client/consumer/rebalance/AllocateMessageQueueByConfigTest.java
|
{
"start": 1079,
"end": 2796
}
|
class ____ extends TestCase {
public void testAllocateMessageQueueByConfig() {
List<String> consumerIdList = createConsumerIdList(2);
List<MessageQueue> messageQueueList = createMessageQueueList(4);
AllocateMessageQueueByConfig allocateStrategy = new AllocateMessageQueueByConfig();
allocateStrategy.setMessageQueueList(messageQueueList);
Map<String, int[]> consumerAllocateQueue = new HashMap<>(consumerIdList.size());
for (String consumerId : consumerIdList) {
List<MessageQueue> queues = allocateStrategy.allocate("", consumerId, messageQueueList, consumerIdList);
int[] queueIds = new int[queues.size()];
for (int i = 0; i < queues.size(); i++) {
queueIds[i] = queues.get(i).getQueueId();
}
consumerAllocateQueue.put(consumerId, queueIds);
}
Assert.assertArrayEquals(new int[] {0, 1, 2, 3}, consumerAllocateQueue.get("CID_PREFIX0"));
Assert.assertArrayEquals(new int[] {0, 1, 2, 3}, consumerAllocateQueue.get("CID_PREFIX1"));
}
private List<String> createConsumerIdList(int size) {
List<String> consumerIdList = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
consumerIdList.add("CID_PREFIX" + i);
}
return consumerIdList;
}
private List<MessageQueue> createMessageQueueList(int size) {
List<MessageQueue> messageQueueList = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
MessageQueue mq = new MessageQueue("topic", "brokerName", i);
messageQueueList.add(mq);
}
return messageQueueList;
}
}
|
AllocateMessageQueueByConfigTest
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/KafkaEndpointBuilderFactory.java
|
{
"start": 47893,
"end": 59061
}
|
class ____ of the partition assignment strategy that the client
* will use to distribute partition ownership amongst consumer instances
* when group management is used.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: org.apache.kafka.clients.consumer.RangeAssignor
* Group: consumer
*
* @param partitionAssignor the value to set
* @return the dsl builder
*/
default KafkaEndpointConsumerBuilder partitionAssignor(String partitionAssignor) {
doSetProperty("partitionAssignor", partitionAssignor);
return this;
}
/**
* What to do if kafka threw an exception while polling for new
* messages. Will by default use the value from the component
* configuration unless an explicit value has been configured on the
* endpoint level. DISCARD will discard the message and continue to poll
* the next message. ERROR_HANDLER will use Camel's error handler to
* process the exception, and afterwards continue to poll the next
* message. RECONNECT will re-connect the consumer and try polling the
* message again. RETRY will let the consumer retry poll the same
* message again. STOP will stop the consumer (it has to be manually
* started/restarted if the consumer should be able to consume messages
* again).
*
* The option is a:
* <code>org.apache.camel.component.kafka.PollOnError</code> type.
*
* Default: ERROR_HANDLER
* Group: consumer
*
* @param pollOnError the value to set
* @return the dsl builder
*/
default KafkaEndpointConsumerBuilder pollOnError(org.apache.camel.component.kafka.PollOnError pollOnError) {
doSetProperty("pollOnError", pollOnError);
return this;
}
/**
* What to do if kafka threw an exception while polling for new
* messages. Will by default use the value from the component
* configuration unless an explicit value has been configured on the
* endpoint level. DISCARD will discard the message and continue to poll
* the next message. ERROR_HANDLER will use Camel's error handler to
* process the exception, and afterwards continue to poll the next
* message. RECONNECT will re-connect the consumer and try polling the
* message again. RETRY will let the consumer retry poll the same
* message again. STOP will stop the consumer (it has to be manually
* started/restarted if the consumer should be able to consume messages
* again).
*
* The option will be converted to a
* <code>org.apache.camel.component.kafka.PollOnError</code> type.
*
* Default: ERROR_HANDLER
* Group: consumer
*
* @param pollOnError the value to set
* @return the dsl builder
*/
default KafkaEndpointConsumerBuilder pollOnError(String pollOnError) {
doSetProperty("pollOnError", pollOnError);
return this;
}
/**
* The timeout used when polling the KafkaConsumer.
*
* The option is a: <code>java.lang.Long</code> type.
*
* Default: 5000
* Group: consumer
*
* @param pollTimeoutMs the value to set
* @return the dsl builder
*/
default KafkaEndpointConsumerBuilder pollTimeoutMs(Long pollTimeoutMs) {
doSetProperty("pollTimeoutMs", pollTimeoutMs);
return this;
}
/**
* The timeout used when polling the KafkaConsumer.
*
* The option will be converted to a <code>java.lang.Long</code> type.
*
* Default: 5000
* Group: consumer
*
* @param pollTimeoutMs the value to set
* @return the dsl builder
*/
default KafkaEndpointConsumerBuilder pollTimeoutMs(String pollTimeoutMs) {
doSetProperty("pollTimeoutMs", pollTimeoutMs);
return this;
}
/**
* Whether to eager validate that broker host:port is valid and can be
* DNS resolved to known host during starting this consumer. If the
* validation fails, then an exception is thrown, which makes Camel fail
* fast. Disabling this will postpone the validation after the consumer
* is started, and Camel will keep re-connecting in case of validation
* or DNS resolution error.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: consumer
*
* @param preValidateHostAndPort the value to set
* @return the dsl builder
*/
default KafkaEndpointConsumerBuilder preValidateHostAndPort(boolean preValidateHostAndPort) {
doSetProperty("preValidateHostAndPort", preValidateHostAndPort);
return this;
}
/**
* Whether to eager validate that broker host:port is valid and can be
* DNS resolved to known host during starting this consumer. If the
* validation fails, then an exception is thrown, which makes Camel fail
* fast. Disabling this will postpone the validation after the consumer
* is started, and Camel will keep re-connecting in case of validation
* or DNS resolution error.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: true
* Group: consumer
*
* @param preValidateHostAndPort the value to set
* @return the dsl builder
*/
default KafkaEndpointConsumerBuilder preValidateHostAndPort(String preValidateHostAndPort) {
doSetProperty("preValidateHostAndPort", preValidateHostAndPort);
return this;
}
/**
* Set if KafkaConsumer should read from the beginning or the end on
* startup: SeekPolicy.BEGINNING: read from the beginning.
* SeekPolicy.END: read from the end.
*
* The option is a:
* <code>org.apache.camel.component.kafka.SeekPolicy</code> type.
*
* Group: consumer
*
* @param seekTo the value to set
* @return the dsl builder
*/
default KafkaEndpointConsumerBuilder seekTo(org.apache.camel.component.kafka.SeekPolicy seekTo) {
doSetProperty("seekTo", seekTo);
return this;
}
/**
* Set if KafkaConsumer should read from the beginning or the end on
* startup: SeekPolicy.BEGINNING: read from the beginning.
* SeekPolicy.END: read from the end.
*
* The option will be converted to a
* <code>org.apache.camel.component.kafka.SeekPolicy</code> type.
*
* Group: consumer
*
* @param seekTo the value to set
* @return the dsl builder
*/
default KafkaEndpointConsumerBuilder seekTo(String seekTo) {
doSetProperty("seekTo", seekTo);
return this;
}
/**
* The timeout used to detect failures when using Kafka's group
* management facilities.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Default: 45000
* Group: consumer
*
* @param sessionTimeoutMs the value to set
* @return the dsl builder
*/
default KafkaEndpointConsumerBuilder sessionTimeoutMs(Integer sessionTimeoutMs) {
doSetProperty("sessionTimeoutMs", sessionTimeoutMs);
return this;
}
/**
* The timeout used to detect failures when using Kafka's group
* management facilities.
*
* The option will be converted to a <code>java.lang.Integer</code>
* type.
*
* Default: 45000
* Group: consumer
*
* @param sessionTimeoutMs the value to set
* @return the dsl builder
*/
default KafkaEndpointConsumerBuilder sessionTimeoutMs(String sessionTimeoutMs) {
doSetProperty("sessionTimeoutMs", sessionTimeoutMs);
return this;
}
/**
* This enables the use of a specific Avro reader for use with the in
* multiple Schema registries documentation with Avro Deserializers
* implementation. This option is only available externally (not
* standard Apache Kafka).
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param specificAvroReader the value to set
* @return the dsl builder
*/
default KafkaEndpointConsumerBuilder specificAvroReader(boolean specificAvroReader) {
doSetProperty("specificAvroReader", specificAvroReader);
return this;
}
/**
* This enables the use of a specific Avro reader for use with the in
* multiple Schema registries documentation with Avro Deserializers
* implementation. This option is only available externally (not
* standard Apache Kafka).
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param specificAvroReader the value to set
* @return the dsl builder
*/
default KafkaEndpointConsumerBuilder specificAvroReader(String specificAvroReader) {
doSetProperty("specificAvroReader", specificAvroReader);
return this;
}
/**
* Whether the topic is a pattern (regular expression). This can be used
* to subscribe to dynamic number of topics matching the pattern.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param topicIsPattern the value to set
* @return the dsl builder
*/
default KafkaEndpointConsumerBuilder topicIsPattern(boolean topicIsPattern) {
doSetProperty("topicIsPattern", topicIsPattern);
return this;
}
/**
* Whether the topic is a pattern (regular expression). This can be used
* to subscribe to dynamic number of topics matching the pattern.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param topicIsPattern the value to set
* @return the dsl builder
*/
default KafkaEndpointConsumerBuilder topicIsPattern(String topicIsPattern) {
doSetProperty("topicIsPattern", topicIsPattern);
return this;
}
/**
* Deserializer
|
name
|
java
|
apache__flink
|
flink-tests/src/test/java/org/apache/flink/test/streaming/runtime/BufferTimeoutITCase.java
|
{
"start": 2310,
"end": 5959
}
|
class ____ extends AbstractTestBaseJUnit4 {
@Rule public final SharedObjects sharedObjects = SharedObjects.create();
/**
* The test verifies that it is possible to disable explicit buffer flushing. It checks that
* OutputFlasher thread would not be started when the task is running. But this doesn't
* guarantee that the unfinished buffers can not be flushed by another events.
*/
@Test
public void testDisablingBufferTimeout() throws Exception {
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(1);
env.setBufferTimeout(-1);
final SharedReference<ArrayList<Integer>> results = sharedObjects.add(new ArrayList<>());
env.fromSource(
createSingleElementIdleSource(),
WatermarkStrategy.<Integer>noWatermarks(),
"v2-src")
.slotSharingGroup("source")
.sinkTo(createResultCollectingSink(results))
.slotSharingGroup("sink");
final JobClient jobClient = env.executeAsync();
CommonTestUtils.waitForAllTaskRunning(
MINI_CLUSTER_RESOURCE.getMiniCluster(), jobClient.getJobID(), false);
assertTrue(
RecordWriter.DEFAULT_OUTPUT_FLUSH_THREAD_NAME + " thread is unexpectedly running",
Thread.getAllStackTraces().keySet().stream()
.noneMatch(
thread ->
thread.getName()
.startsWith(
RecordWriter
.DEFAULT_OUTPUT_FLUSH_THREAD_NAME)));
}
/** Sink V2 that collects results in shared reference. */
private static Sink<Integer> createResultCollectingSink(
SharedReference<ArrayList<Integer>> results) {
return context ->
new SinkWriter<>() {
@Override
public void write(Integer element, Context ctx) {
results.get().add(element);
}
@Override
public void flush(boolean endOfInput) {}
@Override
public void close() {}
};
}
/** Source V2 that emits one element then stays idle (unbounded). */
private static AbstractTestSource<Integer> createSingleElementIdleSource() {
return new AbstractTestSource<Integer>() {
@Override
public SourceReader<Integer, TestSplit> createReader(SourceReaderContext ctx) {
return new TestSourceReader<>(ctx) {
private boolean emitted = false;
@Override
public InputStatus pollNext(ReaderOutput<Integer> out) {
if (!emitted) {
out.collect(1);
emitted = true;
}
return InputStatus.NOTHING_AVAILABLE;
}
};
}
@Override
public SplitEnumerator<TestSplit, Void> createEnumerator(
SplitEnumeratorContext<TestSplit> context) {
return new SingleSplitEnumerator(context);
}
@Override
public Boundedness getBoundedness() {
return Boundedness.CONTINUOUS_UNBOUNDED;
}
};
}
}
|
BufferTimeoutITCase
|
java
|
quarkusio__quarkus
|
independent-projects/bootstrap/app-model/src/main/java/io/quarkus/bootstrap/workspace/DefaultArtifactSources.java
|
{
"start": 133,
"end": 1477
}
|
class ____ implements ArtifactSources, Serializable {
private static final long serialVersionUID = 2053702489268820757L;
private final String classifier;
private final Collection<SourceDir> sources;
private final Collection<SourceDir> resources;
public DefaultArtifactSources(String classifier, Collection<SourceDir> sources, Collection<SourceDir> resources) {
this.classifier = Objects.requireNonNull(classifier, "The classifier is null");
this.sources = sources;
this.resources = resources;
}
@Override
public String getClassifier() {
return classifier;
}
public void addSources(SourceDir src) {
this.sources.add(src);
}
@Override
public Collection<SourceDir> getSourceDirs() {
return sources;
}
public void addResources(SourceDir src) {
this.resources.add(src);
}
@Override
public Collection<SourceDir> getResourceDirs() {
return resources;
}
@Override
public String toString() {
final StringBuilder s = new StringBuilder();
s.append(classifier);
if (s.length() > 0) {
s.append(' ');
}
s.append("sources: ").append(sources);
s.append(" resources: ").append(resources);
return s.toString();
}
}
|
DefaultArtifactSources
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/query/sqm/mutation/internal/temptable/GlobalTemporaryTableInsertStrategy.java
|
{
"start": 1136,
"end": 3202
}
|
class ____ extends GlobalTemporaryTableStrategy implements SqmMultiTableInsertStrategy {
public GlobalTemporaryTableInsertStrategy(EntityMappingType rootEntityDescriptor, RuntimeModelCreationContext runtimeModelCreationContext) {
this(
rootEntityDescriptor,
requireGlobalTemporaryTableStrategy( runtimeModelCreationContext.getDialect() ),
runtimeModelCreationContext
);
}
private GlobalTemporaryTableInsertStrategy(
EntityMappingType rootEntityDescriptor,
TemporaryTableStrategy temporaryTableStrategy,
RuntimeModelCreationContext runtimeModelCreationContext) {
this(
TemporaryTable.createEntityTable(
runtimeModelCreationContext.getMetadata()
.getEntityBinding( rootEntityDescriptor.getEntityName() ),
basename -> temporaryTableStrategy.adjustTemporaryTableName( TemporaryTable.ENTITY_TABLE_PREFIX + basename ),
TemporaryTableKind.GLOBAL,
runtimeModelCreationContext.getDialect(),
runtimeModelCreationContext
),
runtimeModelCreationContext.getSessionFactory()
);
}
public GlobalTemporaryTableInsertStrategy(
TemporaryTable entityTable,
SessionFactoryImplementor sessionFactory) {
super( entityTable, sessionFactory );
}
@Override
public MultiTableHandlerBuildResult buildHandler(SqmInsertStatement<?> sqmInsertStatement, DomainParameterXref domainParameterXref, DomainQueryExecutionContext context) {
final MutableObject<JdbcParameterBindings> firstJdbcParameterBindings = new MutableObject<>();
final InsertHandler multiTableHandler = new TableBasedInsertHandler(
sqmInsertStatement,
domainParameterXref,
getTemporaryTable(),
getTemporaryTableStrategy(),
false,
// generally a global temp table should already track a Connection-specific uid,
// but just in case a particular env needs it...
session -> session.getSessionIdentifier().toString(),
context,
firstJdbcParameterBindings
);
return new MultiTableHandlerBuildResult( multiTableHandler, firstJdbcParameterBindings.get() );
}
}
|
GlobalTemporaryTableInsertStrategy
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/bugs/_1029/ErroneousIssue1029Mapper.java
|
{
"start": 1395,
"end": 2380
}
|
class ____ {
private Long id;
private LocalDateTime lastUpdated;
private String knownProp;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getKnownProp() {
return knownProp;
}
public void setKnownProp(String knownProp) {
this.knownProp = knownProp;
}
public LocalDateTime getLastUpdated() {
return lastUpdated;
}
public void setLastUpdated(LocalDateTime lastUpdated) {
this.lastUpdated = lastUpdated;
}
// COMPUTED getters
public boolean isOutdated() {
long daysBetween = ChronoUnit.DAYS.between( lastUpdated, LocalDateTime.now() );
return daysBetween > 30;
}
public Map<String, Integer> getComputedMapping() {
return new TreeMap<String, Integer>();
}
}
}
|
Deck
|
java
|
hibernate__hibernate-orm
|
hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/components/mappedsuperclass/AuditedEmbeddableWithDeclaredDataTest.java
|
{
"start": 889,
"end": 2442
}
|
class ____ {
private long id;
@BeforeClassTemplate
public void initData(EntityManagerFactoryScope scope) {
this.id = scope.fromTransaction( entityManager -> {
final EntityWithAuditedEmbeddableWithDeclaredData entity = new EntityWithAuditedEmbeddableWithDeclaredData();
entity.setName( "Entity 1" );
entity.setValue( new AuditedEmbeddableWithDeclaredData( 42, "Data" ) );
entityManager.persist( entity );
return entity.getId();
} );
}
@Test
public void testEmbeddableThatExtendsAuditedMappedSuperclass(EntityManagerFactoryScope scope) {
scope.inEntityManager( entityManager -> {
final EntityWithAuditedEmbeddableWithDeclaredData entity = entityManager.find(
EntityWithAuditedEmbeddableWithDeclaredData.class,
id
);
final AuditReader auditReader = AuditReaderFactory.get( entityManager );
final List<Number> revisions = auditReader.getRevisions( EntityWithAuditedEmbeddableWithDeclaredData.class, id );
assertThat( revisions ).hasSize( 1 );
final EntityWithAuditedEmbeddableWithDeclaredData entityRevision1 = auditReader.find(
EntityWithAuditedEmbeddableWithDeclaredData.class,
id,
revisions.get( 0 )
);
assertThat( entityRevision1.getName() ).isEqualTo( entity.getName() );
// All fields should be audited because the mapped superclass is annotated
assertThat( entity.getValue().getCodeart() ).isEqualTo( entityRevision1.getValue().getCodeart() );
assertThat( entityRevision1.getValue().getCode() ).isEqualTo( 42 );
} );
}
}
|
AuditedEmbeddableWithDeclaredDataTest
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/cluster/routing/allocation/MoveDecision.java
|
{
"start": 1292,
"end": 14907
}
|
class ____ extends AbstractAllocationDecision {
/** a constant representing no decision taken */
public static final MoveDecision NOT_TAKEN = new MoveDecision(null, null, AllocationDecision.NO_ATTEMPT, null, null, 0);
/** cached decisions so we don't have to recreate objects for common decisions when not in explain mode. */
private static final MoveDecision CACHED_STAY_DECISION = new MoveDecision(
null,
null,
AllocationDecision.NO_ATTEMPT,
Decision.YES,
null,
0
);
private static final MoveDecision CACHED_CANNOT_MOVE_DECISION = new MoveDecision(
null,
null,
AllocationDecision.NO,
Decision.NO,
null,
0
);
@Nullable
private final AllocationDecision canMoveDecision;
@Nullable
private final Decision canRemainDecision;
@Nullable
private final Decision clusterRebalanceDecision;
private final int currentNodeRanking;
private MoveDecision(
DiscoveryNode targetNode,
List<NodeAllocationResult> nodeDecisions,
AllocationDecision canMoveDecision,
Decision canRemainDecision,
Decision clusterRebalanceDecision,
int currentNodeRanking
) {
super(targetNode, nodeDecisions);
this.canMoveDecision = canMoveDecision;
this.canRemainDecision = canRemainDecision;
this.clusterRebalanceDecision = clusterRebalanceDecision;
this.currentNodeRanking = currentNodeRanking;
}
public MoveDecision(StreamInput in) throws IOException {
super(in);
canMoveDecision = in.readOptionalWriteable(AllocationDecision::readFrom);
canRemainDecision = in.readOptionalWriteable(Decision::readFrom);
clusterRebalanceDecision = in.readOptionalWriteable(Decision::readFrom);
currentNodeRanking = in.readVInt();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeOptionalWriteable(canMoveDecision);
out.writeOptionalWriteable(canRemainDecision);
out.writeOptionalWriteable(clusterRebalanceDecision);
out.writeVInt(currentNodeRanking);
}
/**
* Creates a move decision for the shard being able to remain on its current node, so the shard won't
* be forced to move to another node.
*/
public static MoveDecision createRemainYesDecision(Decision canRemainDecision) {
assert canRemainDecision.type() != Type.NO;
assert canRemainDecision.type() != Type.NOT_PREFERRED;
if (canRemainDecision == Decision.YES) {
return CACHED_STAY_DECISION;
}
return new MoveDecision(null, null, AllocationDecision.NO_ATTEMPT, canRemainDecision, null, 0);
}
/**
* Creates a move decision for the shard.
*
* @param canRemainDecision the decision for whether the shard is allowed to remain on its current node
* @param moveDecision the {@link AllocationDecision} for moving the shard to another node
* @param targetNode the node where the shard should move to
* @param nodeDecisions the node-level decisions that comprised the final decision, non-null iff explain is true
* @return the {@link MoveDecision} for moving the shard to another node
*/
public static MoveDecision move(
Decision canRemainDecision,
AllocationDecision moveDecision,
@Nullable DiscoveryNode targetNode,
@Nullable List<NodeAllocationResult> nodeDecisions
) {
assert canRemainDecision != null;
assert canRemainDecision.type() != Type.YES : "create decision with MoveDecision#stay instead";
if (nodeDecisions == null && moveDecision == AllocationDecision.NO) {
// the final decision is NO (no node to move the shard to) and we are not in explain mode, return a cached version
return CACHED_CANNOT_MOVE_DECISION;
} else {
assert ((targetNode == null) == (moveDecision != AllocationDecision.YES));
return new MoveDecision(targetNode, nodeDecisions, moveDecision, canRemainDecision, null, 0);
}
}
/**
* Creates a decision for whether to move the shard to a different node to form a better cluster balance.
*/
public static MoveDecision rebalance(
Decision canRemainDecision,
Decision canRebalanceDecision,
AllocationDecision canMoveDecision,
@Nullable DiscoveryNode targetNode,
int currentNodeRanking,
List<NodeAllocationResult> nodeDecisions
) {
return new MoveDecision(targetNode, nodeDecisions, canMoveDecision, canRemainDecision, canRebalanceDecision, currentNodeRanking);
}
@Override
public boolean isDecisionTaken() {
return canRemainDecision != null || clusterRebalanceDecision != null;
}
/**
* Returns {@code true} if the shard cannot remain on its current node and can be moved,
* returns {@code false} otherwise. If {@link #isDecisionTaken()} returns {@code false},
* then invoking this method will throw an {@code IllegalStateException}.
*/
public boolean cannotRemainAndCanMove() {
checkDecisionState();
return cannotRemain() && canMoveDecision == AllocationDecision.YES;
}
/**
* Returns {@code true} if the shard cannot remain on its current node and _cannot_ be moved.
* returns {@code false} otherwise. If {@link #isDecisionTaken()} returns {@code false},
* then invoking this method will throw an {@code IllegalStateException}.
*/
public boolean cannotRemainAndCannotMove() {
checkDecisionState();
return cannotRemain() && canMoveDecision != AllocationDecision.YES;
}
/**
* Returns {@code true} if the shard can remain on its current node, returns {@code false} otherwise.
* If {@link #isDecisionTaken()} returns {@code false}, then invoking this method will throw an {@code IllegalStateException}.
*/
public boolean canRemain() {
checkDecisionState();
return canRemainDecision.type() == Type.YES;
}
/**
* Returns {@code true} if the shard cannot remain on its current node, returns {@code false} if the shard can remain.
* If {@link #isDecisionTaken()} returns {@code false}, then invoking this method will throw an {@code IllegalStateException}.
*/
public boolean cannotRemain() {
return canRemain() == false;
}
/**
* Returns the decision for the shard being allowed to remain on its current node. If {@link #isDecisionTaken()}
* returns {@code false}, then invoking this method will throw an {@code IllegalStateException}.
*/
public Decision getCanRemainDecision() {
checkDecisionState();
return canRemainDecision;
}
/**
* Returns {@code true} if the shard is allowed to be rebalanced to another node in the cluster,
* returns {@code false} otherwise. If {@link #getClusterRebalanceDecision()} returns {@code null}, then
* the result of this method is meaningless, as no rebalance decision was taken. If {@link #isDecisionTaken()}
* returns {@code false}, then invoking this method will throw an {@code IllegalStateException}.
*/
// @VisibleForTesting
public boolean canRebalanceCluster() {
checkDecisionState();
return clusterRebalanceDecision != null && clusterRebalanceDecision.type() == Type.YES;
}
/**
* Returns the decision for being allowed to rebalance the shard. Invoking this method will return
* {@code null} if {@link #canRemain()} ()} returns {@code false}, which means the node is not allowed to
* remain on its current node, so the cluster is forced to attempt to move the shard to a different node,
* as opposed to attempting to rebalance the shard if a better cluster balance is possible by moving it.
* If {@link #isDecisionTaken()} returns {@code false}, then invoking this method will throw an
* {@code IllegalStateException}.
*/
// @VisibleForTesting
@Nullable
public Decision getClusterRebalanceDecision() {
checkDecisionState();
return clusterRebalanceDecision;
}
/**
* Returns the {@link AllocationDecision} for moving this shard to another node. If {@link #isDecisionTaken()} returns
* {@code false}, then invoking this method will throw an {@code IllegalStateException}.
*/
@Nullable
public AllocationDecision getAllocationDecision() {
return canMoveDecision;
}
/**
* Gets the current ranking of the node to which the shard is currently assigned, relative to the
* other nodes in the cluster as reported in {@link NodeAllocationResult#getWeightRanking()}. The
* ranking will only return a meaningful positive integer if {@link #getClusterRebalanceDecision()} returns
* a non-null value; otherwise, 0 will be returned. If {@link #isDecisionTaken()} returns
* {@code false}, then invoking this method will throw an {@code IllegalStateException}.
*/
public int getCurrentNodeRanking() {
checkDecisionState();
return currentNodeRanking;
}
@Override
public String getExplanation() {
checkDecisionState();
if (clusterRebalanceDecision != null) {
// it was a decision to rebalance the shard, because the shard was allowed to remain on its current node
if (canMoveDecision == AllocationDecision.AWAITING_INFO) {
return Explanations.Rebalance.AWAITING_INFO;
}
return switch (clusterRebalanceDecision.type()) {
case NO -> atLeastOneNodeWithYesDecision()
? Explanations.Rebalance.CANNOT_REBALANCE_CAN_ALLOCATE
: Explanations.Rebalance.CANNOT_REBALANCE_CANNOT_ALLOCATE;
case THROTTLE -> Explanations.Rebalance.CLUSTER_THROTTLE;
case YES, NOT_PREFERRED -> {
if (getTargetNode() != null) {
yield canMoveDecision == AllocationDecision.THROTTLED
? Explanations.Rebalance.NODE_THROTTLE
: Explanations.Rebalance.YES;
} else {
yield Explanations.Rebalance.ALREADY_BALANCED;
}
}
};
} else {
// it was a decision to force move the shard
assert cannotRemain();
return switch (canMoveDecision) {
case YES -> Explanations.Move.YES;
case THROTTLED -> Explanations.Move.THROTTLED;
case NO -> Explanations.Move.NO;
case WORSE_BALANCE, AWAITING_INFO, ALLOCATION_DELAYED, NO_VALID_SHARD_COPY, NO_ATTEMPT -> {
assert false : canMoveDecision;
yield canMoveDecision.toString();
}
};
}
}
@Override
public Iterator<? extends ToXContent> toXContentChunked(ToXContent.Params params) {
checkDecisionState();
return Iterators.concat(Iterators.single((builder, p) -> {
if (targetNode != null) {
builder.startObject("target_node");
discoveryNodeToXContent(targetNode, true, builder);
builder.endObject();
}
builder.field("can_remain_on_current_node", canRemain() ? "yes" : "no");
if (cannotRemain() && canRemainDecision.getDecisions().isEmpty() == false) {
builder.startArray("can_remain_decisions");
canRemainDecision.toXContent(builder, params);
builder.endArray();
}
if (clusterRebalanceDecision != null) {
AllocationDecision rebalanceDecision = AllocationDecision.fromDecisionType(clusterRebalanceDecision.type());
builder.field("can_rebalance_cluster", rebalanceDecision);
if (rebalanceDecision != AllocationDecision.YES && clusterRebalanceDecision.getDecisions().isEmpty() == false) {
builder.startArray("can_rebalance_cluster_decisions");
clusterRebalanceDecision.toXContent(builder, params);
builder.endArray();
}
}
if (clusterRebalanceDecision != null) {
builder.field("can_rebalance_to_other_node", canMoveDecision);
builder.field("rebalance_explanation", getExplanation());
} else {
builder.field("can_move_to_other_node", cannotRemainAndCanMove() ? "yes" : "no");
builder.field("move_explanation", getExplanation());
}
return builder;
}), nodeDecisionsToXContentChunked(nodeDecisions));
}
@Override
public boolean equals(Object other) {
if (super.equals(other) == false) {
return false;
}
if (other instanceof MoveDecision == false) {
return false;
}
MoveDecision that = (MoveDecision) other;
return Objects.equals(canMoveDecision, that.canMoveDecision)
&& Objects.equals(canRemainDecision, that.canRemainDecision)
&& Objects.equals(clusterRebalanceDecision, that.clusterRebalanceDecision)
&& currentNodeRanking == that.currentNodeRanking;
}
@Override
public int hashCode() {
return 31 * super.hashCode() + Objects.hash(canMoveDecision, canRemainDecision, clusterRebalanceDecision, currentNodeRanking);
}
}
|
MoveDecision
|
java
|
netty__netty
|
buffer/src/main/java/io/netty/buffer/AdaptivePoolingAllocator.java
|
{
"start": 8336,
"end": 16360
}
|
class ____ be a multiple of 32";
int sizeIndex = sizeIndexOf(sizeClass);
Arrays.fill(SIZE_INDEXES, lastIndex + 1, sizeIndex + 1, (byte) i);
lastIndex = sizeIndex;
}
}
private final ChunkAllocator chunkAllocator;
private final ChunkRegistry chunkRegistry;
private final MagazineGroup[] sizeClassedMagazineGroups;
private final MagazineGroup largeBufferMagazineGroup;
private final FastThreadLocal<MagazineGroup[]> threadLocalGroup;
AdaptivePoolingAllocator(ChunkAllocator chunkAllocator, boolean useCacheForNonEventLoopThreads) {
this.chunkAllocator = ObjectUtil.checkNotNull(chunkAllocator, "chunkAllocator");
chunkRegistry = new ChunkRegistry();
sizeClassedMagazineGroups = createMagazineGroupSizeClasses(this, false);
largeBufferMagazineGroup = new MagazineGroup(
this, chunkAllocator, new HistogramChunkControllerFactory(true), false);
boolean disableThreadLocalGroups = IS_LOW_MEM && DISABLE_THREAD_LOCAL_MAGAZINES_ON_LOW_MEM;
threadLocalGroup = disableThreadLocalGroups ? null : new FastThreadLocal<MagazineGroup[]>() {
@Override
protected MagazineGroup[] initialValue() {
if (useCacheForNonEventLoopThreads || ThreadExecutorMap.currentExecutor() != null) {
return createMagazineGroupSizeClasses(AdaptivePoolingAllocator.this, true);
}
return null;
}
@Override
protected void onRemoval(final MagazineGroup[] groups) throws Exception {
if (groups != null) {
for (MagazineGroup group : groups) {
group.free();
}
}
}
};
}
private static MagazineGroup[] createMagazineGroupSizeClasses(
AdaptivePoolingAllocator allocator, boolean isThreadLocal) {
MagazineGroup[] groups = new MagazineGroup[SIZE_CLASSES.length];
for (int i = 0; i < SIZE_CLASSES.length; i++) {
int segmentSize = SIZE_CLASSES[i];
groups[i] = new MagazineGroup(allocator, allocator.chunkAllocator,
new SizeClassChunkControllerFactory(segmentSize), isThreadLocal);
}
return groups;
}
/**
* Create a thread-safe multi-producer, multi-consumer queue to hold chunks that spill over from the
* internal Magazines.
* <p>
* Each Magazine can only hold two chunks at any one time: the chunk it currently allocates from,
* and the next-in-line chunk which will be used for allocation once the current one has been used up.
* This queue will be used by magazines to share any excess chunks they allocate, so that they don't need to
* allocate new chunks when their current and next-in-line chunks have both been used up.
* <p>
* The simplest implementation of this method is to return a new {@link ConcurrentLinkedQueue}.
* However, the {@code CLQ} is unbounded, and this means there's no limit to how many chunks can be cached in this
* queue.
* <p>
* Each chunk in this queue can be up to {@link #MAX_CHUNK_SIZE} in size, so it is recommended to use a bounded
* queue to limit the maximum memory usage.
* <p>
* The default implementation will create a bounded queue with a capacity of {@link #CHUNK_REUSE_QUEUE}.
*
* @return A new multi-producer, multi-consumer queue.
*/
private static Queue<Chunk> createSharedChunkQueue() {
return PlatformDependent.newFixedMpmcQueue(CHUNK_REUSE_QUEUE);
}
ByteBuf allocate(int size, int maxCapacity) {
return allocate(size, maxCapacity, Thread.currentThread(), null);
}
private AdaptiveByteBuf allocate(int size, int maxCapacity, Thread currentThread, AdaptiveByteBuf buf) {
AdaptiveByteBuf allocated = null;
if (size <= MAX_POOLED_BUF_SIZE) {
final int index = sizeClassIndexOf(size);
MagazineGroup[] magazineGroups;
if (!FastThreadLocalThread.currentThreadWillCleanupFastThreadLocals() ||
IS_LOW_MEM ||
(magazineGroups = threadLocalGroup.get()) == null) {
magazineGroups = sizeClassedMagazineGroups;
}
if (index < magazineGroups.length) {
allocated = magazineGroups[index].allocate(size, maxCapacity, currentThread, buf);
} else if (!IS_LOW_MEM) {
allocated = largeBufferMagazineGroup.allocate(size, maxCapacity, currentThread, buf);
}
}
if (allocated == null) {
allocated = allocateFallback(size, maxCapacity, currentThread, buf);
}
return allocated;
}
private static int sizeIndexOf(final int size) {
// this is aligning the size to the next multiple of 32 and dividing by 32 to get the size index.
return size + 31 >> 5;
}
static int sizeClassIndexOf(int size) {
int sizeIndex = sizeIndexOf(size);
if (sizeIndex < SIZE_INDEXES.length) {
return SIZE_INDEXES[sizeIndex];
}
return SIZE_CLASSES_COUNT;
}
static int[] getSizeClasses() {
return SIZE_CLASSES.clone();
}
private AdaptiveByteBuf allocateFallback(int size, int maxCapacity, Thread currentThread,
AdaptiveByteBuf buf) {
// If we don't already have a buffer, obtain one from the most conveniently available magazine.
Magazine magazine;
if (buf != null) {
Chunk chunk = buf.chunk;
if (chunk == null || chunk == Magazine.MAGAZINE_FREED || (magazine = chunk.currentMagazine()) == null) {
magazine = getFallbackMagazine(currentThread);
}
} else {
magazine = getFallbackMagazine(currentThread);
buf = magazine.newBuffer();
}
// Create a one-off chunk for this allocation.
AbstractByteBuf innerChunk = chunkAllocator.allocate(size, maxCapacity);
Chunk chunk = new Chunk(innerChunk, magazine, false, chunkSize -> true);
chunkRegistry.add(chunk);
try {
chunk.readInitInto(buf, size, size, maxCapacity);
} finally {
// As the chunk is an one-off we need to always call release explicitly as readInitInto(...)
// will take care of retain once when successful. Once The AdaptiveByteBuf is released it will
// completely release the Chunk and so the contained innerChunk.
chunk.release();
}
return buf;
}
private Magazine getFallbackMagazine(Thread currentThread) {
Magazine[] mags = largeBufferMagazineGroup.magazines;
return mags[(int) currentThread.getId() & mags.length - 1];
}
/**
* Allocate into the given buffer. Used by {@link AdaptiveByteBuf#capacity(int)}.
*/
void reallocate(int size, int maxCapacity, AdaptiveByteBuf into) {
AdaptiveByteBuf result = allocate(size, maxCapacity, Thread.currentThread(), into);
assert result == into: "Re-allocation created separate buffer instance";
}
long usedMemory() {
return chunkRegistry.totalCapacity();
}
// Ensure that we release all previous pooled resources when this object is finalized. This is needed as otherwise
// we might end up with leaks. While these leaks are usually harmless in reality it would still at least be
// very confusing for users.
@SuppressWarnings({"FinalizeDeclaration", "deprecation"})
@Override
protected void finalize() throws Throwable {
try {
super.finalize();
} finally {
free();
}
}
private void free() {
largeBufferMagazineGroup.free();
}
static int sizeToBucket(int size) {
return HistogramChunkController.sizeToBucket(size);
}
private static final
|
must
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/FileWithSnapshotFeature.java
|
{
"start": 1689,
"end": 9157
}
|
class ____ implements INode.Feature {
private final FileDiffList diffs;
private boolean isCurrentFileDeleted = false;
public FileWithSnapshotFeature(FileDiffList diffs) {
this.diffs = diffs != null? diffs: new FileDiffList();
}
public boolean isCurrentFileDeleted() {
return isCurrentFileDeleted;
}
/**
* We need to distinguish two scenarios:
* 1) the file is still in the current file directory, it has been modified
* before while it is included in some snapshot
* 2) the file is not in the current file directory (deleted), but it is in
* some snapshot, thus we still keep this inode
* For both scenarios the file has snapshot feature. We set
* {@link #isCurrentFileDeleted} to true for 2).
*/
public void deleteCurrentFile() {
isCurrentFileDeleted = true;
}
public FileDiffList getDiffs() {
return diffs;
}
/** @return the max replication factor in diffs */
public short getMaxBlockRepInDiffs(FileDiff excluded) {
short max = 0;
for(FileDiff d : getDiffs()) {
if (d != excluded && d.snapshotINode != null) {
final short replication = d.snapshotINode.getFileReplication();
if (replication > max) {
max = replication;
}
}
}
return max;
}
boolean changedBetweenSnapshots(INodeFile file, Snapshot from, Snapshot to) {
int[] diffIndexPair = diffs.changedBetweenSnapshots(from, to);
if (diffIndexPair == null) {
return false;
}
int earlierDiffIndex = diffIndexPair[0];
int laterDiffIndex = diffIndexPair[1];
final DiffList<FileDiff> diffList = diffs.asList();
final long earlierLength = diffList.get(earlierDiffIndex).getFileSize();
final long laterLength = laterDiffIndex == diffList.size() ? file
.computeFileSize(true, false) : diffList.get(laterDiffIndex)
.getFileSize();
if (earlierLength != laterLength) { // file length has been changed
return true;
}
INodeFileAttributes earlierAttr = null; // check the metadata
for (int i = earlierDiffIndex; i < laterDiffIndex; i++) {
FileDiff diff = diffList.get(i);
if (diff.snapshotINode != null) {
earlierAttr = diff.snapshotINode;
break;
}
}
if (earlierAttr == null) { // no meta-change at all, return false
return false;
}
INodeFileAttributes laterAttr = diffs.getSnapshotINode(
Math.max(Snapshot.getSnapshotId(from), Snapshot.getSnapshotId(to)),
file);
return !earlierAttr.metadataEquals(laterAttr);
}
public String getDetailedString() {
return (isCurrentFileDeleted()? "(DELETED), ": ", ") + diffs;
}
public void cleanFile(INode.ReclaimContext reclaimContext,
final INodeFile file, final int snapshotId, int priorSnapshotId,
byte storagePolicyId) {
final int snapshotToBeDeleted
= reclaimContext.getSnapshotIdToBeDeleted(snapshotId, file);
if (snapshotId == Snapshot.CURRENT_STATE_ID) {
// delete the current file while the file has snapshot feature
if (!isCurrentFileDeleted()
&& snapshotToBeDeleted == Snapshot.CURRENT_STATE_ID) {
file.recordModification(priorSnapshotId);
deleteCurrentFile();
}
final BlockStoragePolicy policy = reclaimContext.storagePolicySuite()
.getPolicy(storagePolicyId);
QuotaCounts old = file.storagespaceConsumed(policy);
collectBlocksAndClear(reclaimContext, file);
QuotaCounts current = file.storagespaceConsumed(policy);
reclaimContext.quotaDelta().add(old.subtract(current));
} else { // delete the snapshot
priorSnapshotId = getDiffs().updatePrior(snapshotId, priorSnapshotId);
diffs.deleteSnapshotDiff(reclaimContext, snapshotId, priorSnapshotId,
file);
}
}
public void clearDiffs() {
this.diffs.clear();
}
public void updateQuotaAndCollectBlocks(INode.ReclaimContext reclaimContext,
INodeFile file, FileDiff removed) {
byte storagePolicyID = file.getStoragePolicyID();
BlockStoragePolicy bsp = null;
if (storagePolicyID != HdfsConstants.BLOCK_STORAGE_POLICY_ID_UNSPECIFIED) {
bsp = reclaimContext.storagePolicySuite().
getPolicy(file.getStoragePolicyID());
}
QuotaCounts oldCounts;
if (removed.snapshotINode != null) {
oldCounts = new QuotaCounts.Builder().build();
// collect all distinct blocks
Set<BlockInfo> allBlocks = new HashSet<BlockInfo>();
if (file.getBlocks() != null) {
allBlocks.addAll(Arrays.asList(file.getBlocks()));
}
if (removed.getBlocks() != null) {
allBlocks.addAll(Arrays.asList(removed.getBlocks()));
}
for (FileDiff diff : diffs) {
BlockInfo[] diffBlocks = diff.getBlocks();
if (diffBlocks != null) {
allBlocks.addAll(Arrays.asList(diffBlocks));
}
}
for (BlockInfo b: allBlocks) {
short replication = b.getReplication();
long blockSize = b.isComplete() ? b.getNumBytes() : file
.getPreferredBlockSize();
oldCounts.addStorageSpace(blockSize * replication);
if (bsp != null) {
List<StorageType> oldTypeChosen = bsp.chooseStorageTypes(replication);
for (StorageType t : oldTypeChosen) {
if (t.supportTypeQuota()) {
oldCounts.addTypeSpace(t, blockSize);
}
}
}
}
AclFeature aclFeature = removed.getSnapshotINode().getAclFeature();
if (aclFeature != null) {
AclStorage.removeAclFeature(aclFeature);
}
} else {
oldCounts = file.storagespaceConsumed(null);
}
getDiffs().combineAndCollectSnapshotBlocks(reclaimContext, file, removed);
if (file.getBlocks() != null) {
short replInDiff = getMaxBlockRepInDiffs(removed);
short repl = (short) Math.max(file.getPreferredBlockReplication(),
replInDiff);
for (BlockInfo b : file.getBlocks()) {
if (repl != b.getReplication()) {
reclaimContext.collectedBlocks().addUpdateReplicationFactor(b, repl);
}
}
}
QuotaCounts current = file.storagespaceConsumed(bsp);
reclaimContext.quotaDelta().add(oldCounts.subtract(current));
}
/**
* If some blocks at the end of the block list no longer belongs to
* any inode, collect them and update the block list.
*/
public void collectBlocksAndClear(
INode.ReclaimContext reclaimContext, final INodeFile file) {
// check if everything is deleted.
if (isCurrentFileDeleted() && getDiffs().asList().isEmpty()) {
file.clearFile(reclaimContext);
return;
}
// find max file size.
final long max;
FileDiff diff = getDiffs().getLast();
if (isCurrentFileDeleted()) {
max = diff == null? 0: diff.getFileSize();
} else {
max = file.computeFileSize();
}
// Collect blocks that should be deleted
FileDiff last = diffs.getLast();
BlockInfo[] snapshotBlocks = last == null ? null : last.getBlocks();
if(snapshotBlocks == null)
file.collectBlocksBeyondMax(max, reclaimContext.collectedBlocks(), null);
else
file.collectBlocksBeyondSnapshot(snapshotBlocks,
reclaimContext.collectedBlocks());
}
@Override
public String toString() {
return "isCurrentFileDeleted? " + isCurrentFileDeleted + ", " + diffs;
}
}
|
FileWithSnapshotFeature
|
java
|
FasterXML__jackson-databind
|
src/main/java/tools/jackson/databind/deser/jdk/NumberDeserializers.java
|
{
"start": 7062,
"end": 8862
}
|
class ____
extends PrimitiveOrWrapperDeserializer<Boolean>
{
final static BooleanDeserializer primitiveInstance = new BooleanDeserializer(Boolean.TYPE, Boolean.FALSE);
final static BooleanDeserializer wrapperInstance = new BooleanDeserializer(Boolean.class, null);
public BooleanDeserializer(Class<Boolean> cls, Boolean nvl)
{
super(cls, LogicalType.Boolean, nvl, Boolean.FALSE);
}
@Override
public Boolean deserialize(JsonParser p, DeserializationContext ctxt) throws JacksonException
{
JsonToken t = p.currentToken();
if (t == JsonToken.VALUE_TRUE) {
return Boolean.TRUE;
}
if (t == JsonToken.VALUE_FALSE) {
return Boolean.FALSE;
}
if (_primitive) {
return _parseBooleanPrimitive(p, ctxt);
}
return _parseBoolean(p, ctxt, _valueClass);
}
// Since we can never have type info ("natural type"; String, Boolean, Integer, Double):
// (is it an error to even call this version?)
@Override
public Boolean deserializeWithType(JsonParser p, DeserializationContext ctxt,
TypeDeserializer typeDeserializer)
throws JacksonException
{
JsonToken t = p.currentToken();
if (t == JsonToken.VALUE_TRUE) {
return Boolean.TRUE;
}
if (t == JsonToken.VALUE_FALSE) {
return Boolean.FALSE;
}
if (_primitive) {
return _parseBooleanPrimitive(p, ctxt);
}
return _parseBoolean(p, ctxt, _valueClass);
}
}
@JacksonStdImpl
public static
|
BooleanDeserializer
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/schemavalidation/MySqlExistingEnumColumnValidationTest.java
|
{
"start": 2371,
"end": 2910
}
|
class ____ {
@Id
@GeneratedValue(strategy = IDENTITY)
@Column(name = "id", nullable = false, updatable = false)
private Integer id;
@Enumerated(EnumType.STRING)
@Column(name = "sign_position")
private SignPosition signPosition;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public SignPosition getSignPosition() {
return signPosition;
}
public void setSignPosition(SignPosition signPosition) {
this.signPosition = signPosition;
}
}
public static
|
EntityE
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/annotations/derivedidentities/e4/c/DerivedIdentitySimpleParentSimpleDepSecondPassOrderingTest.java
|
{
"start": 5312,
"end": 5927
}
|
class ____ implements Serializable {
@Id
@OneToOne
private EntityWithSimpleId idsource;
private String data;
public EntityWithOneToOneDerivedId() {
}
public EntityWithOneToOneDerivedId(EntityWithSimpleId idsource) {
this.idsource = idsource;
}
public EntityWithSimpleId getIdsource() {
return idsource;
}
public void setIdsource(EntityWithSimpleId idsource) {
this.idsource = idsource;
}
public String getData() {
return data;
}
public void setData(String data) {
this.data = data;
}
}
@Entity(name = "ref_oto_derived")
public static
|
EntityWithOneToOneDerivedId
|
java
|
google__auto
|
value/src/main/java/com/google/auto/value/processor/AutoValueishProcessor.java
|
{
"start": 57116,
"end": 57484
}
|
class ____ Foo then, for the annotation to be visible, either Foo must be in the
// same package as C or Foo must be a subclass of C. If the annotation is visible from Foo
// then it is also visible from our generated subclass AutoValue_Foo.
// The protected case only applies to method annotations. An annotation on the AutoValue_Foo
//
|
is
|
java
|
google__error-prone
|
check_api/src/main/java/com/google/errorprone/matchers/ChildMultiMatcher.java
|
{
"start": 1381,
"end": 1483
}
|
class ____<T extends Tree, N extends Tree>
implements MultiMatcher<T, N> {
public
|
ChildMultiMatcher
|
java
|
micronaut-projects__micronaut-core
|
http-server-netty/src/test/java/io/micronaut/http/server/exceptions/response/DefaultHtmlErrorResponseBodyProviderTest.java
|
{
"start": 6598,
"end": 6869
}
|
class ____ extends RuntimeException {
}
@Requires(property = "spec.name", value = "DefaultHtmlBodyErrorResponseProviderTest")
@Produces
@Singleton
@Requires(classes = {UserNotFoundException.class, ExceptionHandler.class})
static
|
UserNotFoundException
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/DiffListBySkipList.java
|
{
"start": 3572,
"end": 4430
}
|
class ____ {
static final SkipDiff[] EMPTY_ARRAY = {};
/**
* The references to the subsequent nodes.
*/
private SkipListNode skipTo;
/**
* combined diff over a skip Interval.
*/
private ChildrenDiff diff;
SkipDiff(ChildrenDiff diff) {
this.diff = diff;
}
public ChildrenDiff getDiff() {
return diff;
}
public SkipListNode getSkipTo() {
return skipTo;
}
public void setSkipTo(SkipListNode node) {
skipTo = node;
}
public void setDiff(ChildrenDiff diff) {
this.diff = diff;
}
@Override
public String toString() {
return skip2String(skipTo, diff);
}
}
/**
* SkipListNode is an implementation of a DirectoryDiff List node,
* which stores a Directory Diff and references to subsequent nodes.
*/
final static
|
SkipDiff
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/issue_3600/Issue3682.java
|
{
"start": 187,
"end": 397
}
|
class ____ extends TestCase {
public void test_for_issue() throws Exception {
Cid cid = JSON.parseObject(SOURCE, Cid.class);
System.out.println(cid);
}
@Data
static public
|
Issue3682
|
java
|
quarkusio__quarkus
|
extensions/smallrye-metrics/runtime/src/main/java/io/quarkus/smallrye/metrics/runtime/SmallRyeMetricsFactory.java
|
{
"start": 6729,
"end": 7179
}
|
class ____ implements TimeRecorder {
SimpleTimer timer;
SmallRyeTimeRecorder(SimpleTimer timer) {
this.timer = timer;
}
@Override
public void update(Duration duration) {
timer.update(duration);
}
@Override
public void update(long duration, TimeUnit unit) {
timer.update(Duration.ofNanos(unit.toNanos(duration)));
}
}
}
|
SmallRyeTimeRecorder
|
java
|
ReactiveX__RxJava
|
src/main/java/io/reactivex/rxjava3/core/FlowableTransformer.java
|
{
"start": 906,
"end": 1338
}
|
interface ____<@NonNull Upstream, @NonNull Downstream> {
/**
* Applies a function to the upstream {@link Flowable} and returns a {@link Publisher} with
* optionally different element type.
* @param upstream the upstream {@code Flowable} instance
* @return the transformed {@code Publisher} instance
*/
@NonNull
Publisher<Downstream> apply(@NonNull Flowable<Upstream> upstream);
}
|
FlowableTransformer
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/bytearray/ByteArrayAssert_asString_Test.java
|
{
"start": 1327,
"end": 3888
}
|
class ____ {
@Test
void should_convert_bytes_array_to_a_proper_string_with_default_encoding() {
// GIVEN
String foo = "foo";
// WHEN/THEN
assertThat(foo.getBytes()).asString()
.isEqualTo(foo);
}
@Test
void should_fail_if_actual_is_null() {
// GIVEN
byte[] bytes = null;
// WHEN
var error = expectAssertionError(() -> assertThat(bytes).asString());
// THEN
assertThat(error).hasMessage(actualIsNull());
}
@Test
void should_fail_if_actual_does_not_match() {
// GIVEN
String foo = "foo";
// WHEN
var assertionError = expectAssertionError(() -> assertThat(foo.getBytes()).asString().isEqualTo("bar"));
// THEN
assertThat(assertionError).hasMessage(shouldBeEqualMessage("\"foo\"", "\"bar\""))
.isExactlyInstanceOf(AssertionFailedError.class);
}
@Test
void should_pass_with_soft_assertions() {
// GIVEN
SoftAssertions softly = new SoftAssertions();
String foo = "foo";
// WHEN/THEN
softly.assertThat(foo.getBytes()).asString().isEqualTo(foo);
softly.assertAll();
}
@Test
void should_fail_with_soft_assertions_capturing_all_errors() {
// GIVEN
SoftAssertions softly = new SoftAssertions();
String foo = "foo";
// WHEN
softly.assertThat(foo.getBytes())
.asString()
.isEqualTo("bar")
.isBlank();
var assertionError = expectAssertionError(softly::assertAll);
// THEN
assertThat(assertionError).hasMessageContainingAll("Multiple Failures (2 failures)",
"-- failure 1 --",
shouldBeEqualMessage("\"foo\"", "\"bar\""),
"-- failure 2 --",
"Expecting blank but was: \"foo\"")
.isExactlyInstanceOf(AssertJMultipleFailuresError.class);
}
@Test
void should_ignore_test_when_assumption_for_internally_created_hex_string_assertion_fails() {
// GIVEN
String foo = "foo";
// WHEN/THEN
expectAssumptionNotMetException(() -> assumeThat(foo.getBytes()).asString().isEqualTo("bar"));
}
@Test
void should_run_test_when_assumption_for_internally_created_string_passes() {
// GIVEN
String foo = "foo";
// WHEN/THEN
assertThatCode(() -> assumeThat(foo.getBytes()).asString().startsWith("fo")).doesNotThrowAnyException();
}
}
|
ByteArrayAssert_asString_Test
|
java
|
ReactiveX__RxJava
|
src/test/java/io/reactivex/rxjava3/internal/operators/observable/ObservableRefCountTest.java
|
{
"start": 30079,
"end": 31274
}
|
class ____ extends ConnectableObservable<Object> {
int count;
@Override
public void connect(Consumer<? super Disposable> connection) {
try {
connection.accept(Disposable.empty());
} catch (Throwable ex) {
throw ExceptionHelper.wrapOrThrow(ex);
}
}
@Override
public void reset() {
// nothing to do in this test
}
@Override
protected void subscribeActual(Observer<? super Object> observer) {
if (++count == 1) {
observer.onSubscribe(Disposable.empty());
} else {
throw new TestException("subscribeActual");
}
}
}
@Test
@SuppressUndeliverable
public void badSourceSubscribe2() {
BadObservableSubscribe2 bo = new BadObservableSubscribe2();
Observable<Object> o = bo.refCount();
o.test();
try {
o.test();
fail("Should have thrown");
} catch (NullPointerException ex) {
assertTrue(ex.getCause() instanceof TestException);
}
}
static final
|
BadObservableSubscribe2
|
java
|
spring-projects__spring-boot
|
smoke-test/spring-boot-smoke-test-reactive-oauth2-client/src/main/java/smoketest/oauth2/client/SampleReactiveOAuth2ClientApplication.java
|
{
"start": 811,
"end": 976
}
|
class ____ {
public static void main(String[] args) {
SpringApplication.run(SampleReactiveOAuth2ClientApplication.class);
}
}
|
SampleReactiveOAuth2ClientApplication
|
java
|
apache__camel
|
components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/issues/contextscan2/SpringRouteIsComponentAnnotated2Test.java
|
{
"start": 1099,
"end": 1691
}
|
class ____ extends SpringTestSupport {
@Override
protected AbstractXmlApplicationContext createApplicationContext() {
return new ClassPathXmlApplicationContext(
"org/apache/camel/spring/issues/contextscan2/SpringRouteIsComponentAnnotated2Test.xml");
}
@Test
public void testSpringRouteIsComponentAnnotated() throws Exception {
getMockEndpoint("mock:result").expectedBodiesReceived("Hello World");
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
}
}
|
SpringRouteIsComponentAnnotated2Test
|
java
|
quarkusio__quarkus
|
extensions/security-jpa-reactive/runtime/src/main/java/io/quarkus/security/jpa/reactive/runtime/JpaReactiveTrustedIdentityProvider.java
|
{
"start": 674,
"end": 2512
}
|
class ____ implements IdentityProvider<TrustedAuthenticationRequest> {
private static final Logger LOG = Logger.getLogger(JpaReactiveTrustedIdentityProvider.class);
@Inject
Mutiny.SessionFactory sessionFactory;
@Override
public Class<TrustedAuthenticationRequest> getRequestType() {
return TrustedAuthenticationRequest.class;
}
@Override
public Uni<SecurityIdentity> authenticate(TrustedAuthenticationRequest request,
AuthenticationRequestContext authenticationRequestContext) {
return sessionFactory.withSession(new Function<Mutiny.Session, Uni<SecurityIdentity>>() {
@Override
public Uni<SecurityIdentity> apply(Mutiny.Session session) {
session.setFlushMode(FlushMode.MANUAL);
session.setDefaultReadOnly(true);
return authenticate(session, request)
.onFailure(new Predicate<Throwable>() {
@Override
public boolean test(Throwable throwable) {
return throwable instanceof SecurityException || throwable instanceof NonUniqueResultException;
}
})
.transform(new Function<Throwable, Throwable>() {
@Override
public Throwable apply(Throwable throwable) {
LOG.debug("Authentication failed", throwable);
return new AuthenticationFailedException(throwable);
}
});
}
});
}
public abstract Uni<SecurityIdentity> authenticate(Mutiny.Session session, TrustedAuthenticationRequest request);
}
|
JpaReactiveTrustedIdentityProvider
|
java
|
spring-projects__spring-framework
|
spring-web/src/main/java/org/springframework/web/util/UriComponents.java
|
{
"start": 10002,
"end": 10604
}
|
class ____ implements UriTemplateVariables {
private final Map<String, ? extends @Nullable Object> uriVariables;
public MapTemplateVariables(Map<String, ? extends @Nullable Object> uriVariables) {
this.uriVariables = uriVariables;
}
@Override
public @Nullable Object getValue(@Nullable String name) {
if (!this.uriVariables.containsKey(name)) {
throw new IllegalArgumentException("Map has no value for '" + name + "'");
}
return this.uriVariables.get(name);
}
}
/**
* URI template variables backed by a variable argument array.
*/
private static
|
MapTemplateVariables
|
java
|
apache__camel
|
components/camel-jetty/src/test/java/org/apache/camel/component/jetty/rest/UserService.java
|
{
"start": 858,
"end": 1220
}
|
class ____ {
public CountryPojo livesWhere(UserPojo user) {
CountryPojo answer = new CountryPojo();
if (user.getId() < 500) {
answer.setIso("EN");
answer.setCountry("England");
} else {
answer.setIso("SE");
answer.setCountry("Sweden");
}
return answer;
}
}
|
UserService
|
java
|
spring-projects__spring-boot
|
module/spring-boot-ldap/src/test/java/org/springframework/boot/ldap/autoconfigure/LdapAutoConfigurationTests.java
|
{
"start": 11844,
"end": 12059
}
|
class ____ {
@Bean
DirContextAuthenticationStrategy anotherCustomDirContextAuthenticationStrategy() {
return mock(DirContextAuthenticationStrategy.class);
}
}
}
|
AnotherCustomDirContextAuthenticationStrategy
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/jsontype/jdk/TypedContainerSerTest.java
|
{
"start": 1734,
"end": 2028
}
|
class ____<T extends Animal> {
@JsonSerialize
T animal;
public T getAnimal() {
return animal;
}
public void setAnimal(T animal) {
this.animal = animal;
}
}
@JsonTypeInfo(use=JsonTypeInfo.Id.CLASS, include=JsonTypeInfo.As.PROPERTY, property="@class")
static
|
Container2
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/bugs/_880/spring/DefaultsToProcessorOptionsMapper.java
|
{
"start": 279,
"end": 364
}
|
interface ____ {
Poodle metamorph(Object essence);
}
|
DefaultsToProcessorOptionsMapper
|
java
|
apache__camel
|
components/camel-jackson/src/test/java/org/apache/camel/component/jackson/JacksonNotUseDefaultObjectMapperTest.java
|
{
"start": 1309,
"end": 2692
}
|
class ____ extends CamelTestSupport {
private JacksonDataFormat df = new JacksonDataFormat();
@BindToRegistry("myMapper")
private ObjectMapper objectMapper = new ObjectMapper();
@Override
public void doPreSetup() {
df.setUseDefaultObjectMapper(false);
}
@Test
public void testMarshalAndUnmarshalMap() throws Exception {
Map<String, Object> in = new HashMap<>();
in.put("name", "Camel");
MockEndpoint mock = getMockEndpoint("mock:reverse");
mock.expectedMessageCount(1);
mock.message(0).body().isInstanceOf(Map.class);
mock.message(0).body().isEqualTo(in);
Object marshalled = template.requestBody("direct:in", in);
String marshalledAsString = context.getTypeConverter().convertTo(String.class, marshalled);
assertEquals("{\"name\":\"Camel\"}", marshalledAsString);
template.sendBody("direct:back", marshalled);
mock.assertIsSatisfied();
assertNotSame(objectMapper, df.getObjectMapper());
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:in").marshal(df);
from("direct:back").unmarshal(df).to("mock:reverse");
}
};
}
}
|
JacksonNotUseDefaultObjectMapperTest
|
java
|
elastic__elasticsearch
|
libs/x-content/src/test/java/org/elasticsearch/xcontent/ConstructingObjectParserTests.java
|
{
"start": 13119,
"end": 14116
}
|
class ____ {
public final String test;
TestStruct(String test) {
this.test = test;
}
}
ConstructingObjectParser<TestStruct, Void> objectParser = new ConstructingObjectParser<>(
"foo",
true,
a -> new TestStruct((String) a[0])
);
objectParser.declareString(constructorArg(), new ParseField("test"));
TestStruct s = objectParser.apply(parser, null);
assertEquals(s.test, "foo");
}
public void testConstructObjectUsingContext() throws IOException {
XContentParser parser = createParser(JsonXContent.jsonXContent, """
{ "animal": "dropbear", "mineral": -8 }""");
HasCtorArguments parsed = HasCtorArguments.PARSER_INT_CONTEXT.apply(parser, 42);
assertEquals(Integer.valueOf(42), parsed.vegetable);
assertEquals("dropbear", parsed.animal);
assertEquals(-8, parsed.mineral);
}
private static
|
TestStruct
|
java
|
apache__flink
|
flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/types/ClassDataTypeConverter.java
|
{
"start": 1409,
"end": 1473
}
|
class ____ {
/**
* @param clazz The
|
ClassDataTypeConverter
|
java
|
quarkusio__quarkus
|
independent-projects/resteasy-reactive/server/processor/src/main/java/org/jboss/resteasy/reactive/server/processor/generation/multipart/MultipartTransformer.java
|
{
"start": 478,
"end": 1534
}
|
class ____ implements BiFunction<String, ClassVisitor, ClassVisitor> {
static final String POPULATE_METHOD_NAME = "populate";
private static final String INJECTION_TARGET_BINARY_NAME = ResteasyReactiveInjectionTarget.class.getName()
.replace('.', '/');
private static final String INJECTION_CONTEXT_BINARY_NAME = ResteasyReactiveInjectionContext.class.getName()
.replace('.', '/');
private static final String INJECTION_CONTEXT_DESCRIPTOR = "L" + INJECTION_CONTEXT_BINARY_NAME + ";";
private static final String INJECT_METHOD_NAME = "__quarkus_rest_inject";
private static final String INJECT_METHOD_DESCRIPTOR = "(" + INJECTION_CONTEXT_DESCRIPTOR + ")V";
private final String populatorName;
public MultipartTransformer(String populatorName) {
this.populatorName = populatorName;
}
@Override
public ClassVisitor apply(String s, ClassVisitor visitor) {
return new MultipartClassVisitor(Gizmo.ASM_API_VERSION, visitor, populatorName);
}
static
|
MultipartTransformer
|
java
|
apache__camel
|
components/camel-debezium/camel-debezium-mysql/src/generated/java/org/apache/camel/component/debezium/mysql/configuration/MySqlConnectorEmbeddedDebeziumConfiguration.java
|
{
"start": 462,
"end": 16577
}
|
class ____
extends
EmbeddedDebeziumConfiguration {
private static final String LABEL_NAME = "consumer,mysql";
@UriParam(label = LABEL_NAME, defaultValue = "minimal")
private String snapshotLockingMode = "minimal";
@UriParam(label = LABEL_NAME)
private String messageKeyColumns;
@UriParam(label = LABEL_NAME, defaultValue = "io.debezium.pipeline.txmetadata.DefaultTransactionMetadataFactory")
private String transactionMetadataFactory = "io.debezium.pipeline.txmetadata.DefaultTransactionMetadataFactory";
@UriParam(label = LABEL_NAME)
private String customMetricTags;
@UriParam(label = LABEL_NAME, defaultValue = "source")
private String signalEnabledChannels = "source";
@UriParam(label = LABEL_NAME, defaultValue = "true")
private boolean includeSchemaChanges = true;
@UriParam(label = LABEL_NAME, defaultValue = "com.mysql.cj.jdbc.Driver")
private String databaseJdbcDriver = "com.mysql.cj.jdbc.Driver";
@UriParam(label = LABEL_NAME)
private String signalDataCollection;
@UriParam(label = LABEL_NAME)
private String databaseInitialStatements;
@UriParam(label = LABEL_NAME)
private String converters;
@UriParam(label = LABEL_NAME)
private int snapshotFetchSize;
@UriParam(label = LABEL_NAME)
private String openlineageIntegrationJobTags;
@UriParam(label = LABEL_NAME, defaultValue = "10s", javaType = "java.time.Duration")
private long snapshotLockTimeoutMs = 10000;
@UriParam(label = LABEL_NAME, defaultValue = "false")
private boolean useNongracefulDisconnect = false;
@UriParam(label = LABEL_NAME, defaultValue = "disabled")
private String snapshotTablesOrderByRowCount = "disabled";
@UriParam(label = LABEL_NAME)
private String gtidSourceExcludes;
@UriParam(label = LABEL_NAME)
private String snapshotSelectStatementOverrides;
@UriParam(label = LABEL_NAME)
private String databaseSslKeystore;
@UriParam(label = LABEL_NAME, defaultValue = "false")
private boolean incrementalSnapshotAllowSchemaChanges = false;
@UriParam(label = LABEL_NAME, defaultValue = "jdbc:mysql")
private String databaseProtocol = "jdbc:mysql";
@UriParam(label = LABEL_NAME, defaultValue = "1000")
private int minRowCountToStreamResults = 1000;
@UriParam(label = LABEL_NAME)
private String tableExcludeList;
@UriParam(label = LABEL_NAME)
private String databaseExcludeList;
@UriParam(label = LABEL_NAME, defaultValue = "true")
private boolean gtidSourceFilterDmlEvents = true;
@UriParam(label = LABEL_NAME, defaultValue = "2048")
private int maxBatchSize = 2048;
@UriParam(label = LABEL_NAME, defaultValue = "io.debezium.schema.SchemaTopicNamingStrategy")
private String topicNamingStrategy = "io.debezium.schema.SchemaTopicNamingStrategy";
@UriParam(label = LABEL_NAME, defaultValue = "initial")
private String snapshotMode = "initial";
@UriParam(label = LABEL_NAME, defaultValue = "30s", javaType = "java.time.Duration")
private int connectTimeoutMs = 30000;
@UriParam(label = LABEL_NAME, defaultValue = "false")
private boolean snapshotModeConfigurationBasedSnapshotData = false;
@UriParam(label = LABEL_NAME, defaultValue = "1024")
private int incrementalSnapshotChunkSize = 1024;
@UriParam(label = LABEL_NAME)
private String openlineageIntegrationJobOwners;
@UriParam(label = LABEL_NAME, defaultValue = "./openlineage.yml")
private String openlineageIntegrationConfigFilePath = "./openlineage.yml";
@UriParam(label = LABEL_NAME, defaultValue = "10s", javaType = "java.time.Duration")
private long retriableRestartConnectorWaitMs = 10000;
@UriParam(label = LABEL_NAME, defaultValue = "0ms", javaType = "java.time.Duration")
private long snapshotDelayMs = 0;
@UriParam(label = LABEL_NAME, defaultValue = "4s", javaType = "java.time.Duration")
private long executorShutdownTimeoutMs = 4000;
@UriParam(label = LABEL_NAME, defaultValue = "false")
private boolean snapshotModeConfigurationBasedSnapshotOnDataError = false;
@UriParam(label = LABEL_NAME)
private String schemaHistoryInternalFileFilename;
@UriParam(label = LABEL_NAME, defaultValue = "false")
private boolean tombstonesOnDelete = false;
@UriParam(label = LABEL_NAME, defaultValue = "precise")
private String decimalHandlingMode = "precise";
@UriParam(label = LABEL_NAME)
private String snapshotQueryModeCustomName;
@UriParam(label = LABEL_NAME)
private String openlineageIntegrationDatasetKafkaBootstrapServers;
@UriParam(label = LABEL_NAME, defaultValue = "true")
private boolean tableIgnoreBuiltin = true;
@UriParam(label = LABEL_NAME)
private String snapshotIncludeCollectionList;
@UriParam(label = LABEL_NAME, defaultValue = "false")
private boolean snapshotModeConfigurationBasedStartStream = false;
@UriParam(label = LABEL_NAME, defaultValue = "long")
private String bigintUnsignedHandlingMode = "long";
@UriParam(label = LABEL_NAME)
private long databaseServerId;
@UriParam(label = LABEL_NAME, defaultValue = "5s", javaType = "java.time.Duration")
private long signalPollIntervalMs = 5000;
@UriParam(label = LABEL_NAME)
private String notificationEnabledChannels;
@UriParam(label = LABEL_NAME, defaultValue = "fail")
private String eventProcessingFailureHandlingMode = "fail";
@UriParam(label = LABEL_NAME, defaultValue = "1")
private int snapshotMaxThreads = 1;
@UriParam(label = LABEL_NAME)
private String notificationSinkTopicName;
@UriParam(label = LABEL_NAME)
private String snapshotModeCustomName;
@UriParam(label = LABEL_NAME, defaultValue = "preferred")
private String databaseSslMode = "preferred";
@UriParam(label = LABEL_NAME, defaultValue = "none")
private String schemaNameAdjustmentMode = "none";
@UriParam(label = LABEL_NAME, defaultValue = "1m", javaType = "java.time.Duration")
private long connectKeepAliveIntervalMs = 60000;
@UriParam(label = LABEL_NAME)
private String tableIncludeList;
@UriParam(label = LABEL_NAME, defaultValue = "false")
private boolean includeQuery = false;
@UriParam(label = LABEL_NAME)
private String databaseIncludeList;
@UriParam(label = LABEL_NAME, defaultValue = "0ms", javaType = "java.time.Duration")
private long streamingDelayMs = 0;
@UriParam(label = LABEL_NAME)
private String openlineageIntegrationJobNamespace;
@UriParam(label = LABEL_NAME, defaultValue = "10m", javaType = "java.time.Duration")
private int databaseQueryTimeoutMs = 600000;
@UriParam(label = LABEL_NAME, defaultValue = "0")
private int queryFetchSize = 0;
@UriParam(label = LABEL_NAME)
private String gtidSourceIncludes;
@UriParam(label = LABEL_NAME)
private String heartbeatActionQuery;
@UriParam(label = LABEL_NAME, defaultValue = "500ms", javaType = "java.time.Duration")
private long pollIntervalMs = 500;
@UriParam(label = LABEL_NAME, defaultValue = "0")
private int guardrailCollectionsMax = 0;
@UriParam(label = LABEL_NAME, defaultValue = "__debezium-heartbeat")
private String heartbeatTopicsPrefix = "__debezium-heartbeat";
@UriParam(label = LABEL_NAME, defaultValue = "0")
private int binlogBufferSize = 0;
@UriParam(label = LABEL_NAME)
private String databaseUser;
@UriParam(label = LABEL_NAME)
private String datatypePropagateSourceType;
@UriParam(label = LABEL_NAME, defaultValue = "INSERT_INSERT")
private String incrementalSnapshotWatermarkingStrategy = "INSERT_INSERT";
@UriParam(label = LABEL_NAME, defaultValue = "0ms", javaType = "java.time.Duration")
private int heartbeatIntervalMs = 0;
@UriParam(label = LABEL_NAME)
private String databaseSslTruststorePassword;
@UriParam(label = LABEL_NAME, defaultValue = "false")
private boolean snapshotModeConfigurationBasedSnapshotOnSchemaError = false;
@UriParam(label = LABEL_NAME, defaultValue = "false")
private boolean schemaHistoryInternalSkipUnparseableDdl = false;
@UriParam(label = LABEL_NAME)
private String columnIncludeList;
@UriParam(label = LABEL_NAME, defaultValue = "true")
private boolean enableTimeAdjuster = true;
@UriParam(label = LABEL_NAME)
private String columnPropagateSourceType;
@UriParam(label = LABEL_NAME, defaultValue = "fail")
private String inconsistentSchemaHandlingMode = "fail";
@UriParam(label = LABEL_NAME, defaultValue = "-1")
private int errorsMaxRetries = -1;
@UriParam(label = LABEL_NAME)
@Metadata(required = true)
private String databasePassword;
@UriParam(label = LABEL_NAME, defaultValue = "t")
private String skippedOperations = "t";
@UriParam(label = LABEL_NAME, defaultValue = "Debezium change data capture job")
private String openlineageIntegrationJobDescription = "Debezium change data capture job";
@UriParam(label = LABEL_NAME, defaultValue = "true")
private boolean connectKeepAlive = true;
@UriParam(label = LABEL_NAME, defaultValue = "true")
private boolean extendedHeadersEnabled = true;
@UriParam(label = LABEL_NAME, defaultValue = "8192")
private int maxQueueSize = 8192;
@UriParam(label = LABEL_NAME, defaultValue = "warn")
private String guardrailCollectionsLimitAction = "warn";
@UriParam(label = LABEL_NAME, defaultValue = "false")
private boolean provideTransactionMetadata = false;
@UriParam(label = LABEL_NAME, defaultValue = "select_all")
private String snapshotQueryMode = "select_all";
@UriParam(label = LABEL_NAME, defaultValue = "false")
private boolean schemaHistoryInternalStoreOnlyCapturedTablesDdl = false;
@UriParam(label = LABEL_NAME, defaultValue = "false")
private boolean schemaHistoryInternalStoreOnlyCapturedDatabasesDdl = false;
@UriParam(label = LABEL_NAME)
@Metadata(required = true)
private String topicPrefix;
@UriParam(label = LABEL_NAME, defaultValue = "false")
private boolean includeSchemaComments = false;
@UriParam(label = LABEL_NAME, defaultValue = "io.debezium.connector.mysql.MySqlSourceInfoStructMaker")
private String sourceinfoStructMaker = "io.debezium.connector.mysql.MySqlSourceInfoStructMaker";
@UriParam(label = LABEL_NAME, defaultValue = "false")
private boolean openlineageIntegrationEnabled = false;
@UriParam(label = LABEL_NAME, defaultValue = "0")
private long maxQueueSizeInBytes = 0;
@UriParam(label = LABEL_NAME, defaultValue = "false")
private boolean snapshotModeConfigurationBasedSnapshotSchema = false;
@UriParam(label = LABEL_NAME, defaultValue = "adaptive_time_microseconds")
private String timePrecisionMode = "adaptive_time_microseconds";
@UriParam(label = LABEL_NAME, defaultValue = "fail")
private String eventDeserializationFailureHandlingMode = "fail";
@UriParam(label = LABEL_NAME)
private String postProcessors;
@UriParam(label = LABEL_NAME, defaultValue = "3306")
private int databasePort = 3306;
@UriParam(label = LABEL_NAME)
private String databaseSslTruststore;
@UriParam(label = LABEL_NAME)
private String databaseSslKeystorePassword;
@UriParam(label = LABEL_NAME, defaultValue = "io.debezium.storage.kafka.history.KafkaSchemaHistory")
private String schemaHistoryInternal = "io.debezium.storage.kafka.history.KafkaSchemaHistory";
@UriParam(label = LABEL_NAME)
private String columnExcludeList;
@UriParam(label = LABEL_NAME)
private String databaseHostname;
@UriParam(label = LABEL_NAME, defaultValue = "10000")
private long databaseServerIdOffset = 10000;
@UriParam(label = LABEL_NAME, defaultValue = "1m", javaType = "java.time.Duration")
private long connectionValidationTimeoutMs = 60000;
/**
* Controls how long the connector holds onto the global read lock while it
* is performing a snapshot. The default is 'minimal', which means the
* connector holds the global read lock (and thus prevents any updates) for
* just the initial portion of the snapshot while the database schemas and
* other metadata are being read. The remaining work in a snapshot involves
* selecting all rows from each table, and this can be done using the
* snapshot process' REPEATABLE READ transaction even when the lock is no
* longer held and other operations are updating the database. However, in
* some cases it may be desirable to block all writes for the entire
* duration of the snapshot; in such cases set this property to 'extended'.
* Using a value of 'none' will prevent the connector from acquiring any
* table locks during the snapshot process. This mode can only be used in
* combination with snapshot.mode values of 'schema_only' or
* 'schema_only_recovery' and is only safe to use if no schema changes are
* happening while the snapshot is taken.
*/
public void setSnapshotLockingMode(String snapshotLockingMode) {
this.snapshotLockingMode = snapshotLockingMode;
}
public String getSnapshotLockingMode() {
return snapshotLockingMode;
}
/**
* A semicolon-separated list of expressions that match fully-qualified
* tables and column(s) to be used as message key. Each expression must
* match the pattern '<fully-qualified table name>:<key columns>', where the
* table names could be defined as (DB_NAME.TABLE_NAME) or
* (SCHEMA_NAME.TABLE_NAME), depending on the specific connector, and the
* key columns are a comma-separated list of columns representing the custom
* key. For any table without an explicit key configuration the table's
* primary key column(s) will be used as message key. Example:
* dbserver1.inventory.orderlines:orderId,orderLineId;dbserver1.inventory.orders:id
*/
public void setMessageKeyColumns(String messageKeyColumns) {
this.messageKeyColumns = messageKeyColumns;
}
public String getMessageKeyColumns() {
return messageKeyColumns;
}
/**
* Class to make transaction context & transaction struct/schemas
*/
public void setTransactionMetadataFactory(String transactionMetadataFactory) {
this.transactionMetadataFactory = transactionMetadataFactory;
}
public String getTransactionMetadataFactory() {
return transactionMetadataFactory;
}
/**
* The custom metric tags will accept key-value pairs to customize the MBean
* object name which should be appended the end of regular name, each key
* would represent a tag for the MBean object name, and the corresponding
* value would be the value of that tag the key is. For example: k1=v1,k2=v2
*/
public void setCustomMetricTags(String customMetricTags) {
this.customMetricTags = customMetricTags;
}
public String getCustomMetricTags() {
return customMetricTags;
}
/**
* List of channels names that are enabled. Source channel is enabled by
* default
*/
public void setSignalEnabledChannels(String signalEnabledChannels) {
this.signalEnabledChannels = signalEnabledChannels;
}
public String getSignalEnabledChannels() {
return signalEnabledChannels;
}
/**
* Whether the connector should publish changes in the database schema to a
* Kafka topic with the same name as the database server ID. Each schema
* change will be recorded using a key that contains the database name and
* whose value include logical description of the new schema and optionally
* the DDL statement(s). The default is 'true'. This is independent of how
* the connector internally records database schema history.
*/
public void setIncludeSchemaChanges(boolean includeSchemaChanges) {
this.includeSchemaChanges = includeSchemaChanges;
}
public boolean isIncludeSchemaChanges() {
return includeSchemaChanges;
}
/**
* JDBC Driver
|
MySqlConnectorEmbeddedDebeziumConfiguration
|
java
|
quarkusio__quarkus
|
independent-projects/qute/debug/src/main/java/io/quarkus/qute/debug/agent/scopes/GlobalsScope.java
|
{
"start": 536,
"end": 2042
}
|
class ____ extends RemoteScope {
/** The resolution context from which global variables are extracted. */
private final transient ResolutionContext context;
/**
* Creates a new global scope.
*
* @param context the resolution context to extract global variables from
* @param frame the stack frame associated with this scope
* @param variablesRegistry the registry managing all debugger variables
*/
public GlobalsScope(ResolutionContext context, RemoteStackFrame frame, VariablesRegistry variablesRegistry) {
super("Globals", frame, variablesRegistry);
this.context = context;
}
/**
* Creates the variables for the global scope.
* <p>
* This method traverses the resolution context to the top-most parent,
* then fills variables from that context using {@link RemoteScope#fillVariables}.
* </p>
*
* @return a collection of {@link Variable} representing the global variables
*/
@Override
protected Collection<Variable> createVariables() {
Collection<Variable> variables = new ArrayList<>();
// Navigate to the top-most parent context
var globalContext = context;
while (globalContext.getParent() != null) {
globalContext = globalContext.getParent();
}
// Fill variables from the global context
fillVariables(getStackFrame(), globalContext, variables, getVariablesRegistry());
return variables;
}
}
|
GlobalsScope
|
java
|
micronaut-projects__micronaut-core
|
inject-java/src/test/groovy/io/micronaut/aop/introduction/SuperRepo.java
|
{
"start": 649,
"end": 708
}
|
interface ____ {
Iterable<Integer> findAll();
}
|
SuperRepo
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/IgnoredPureGetterTest.java
|
{
"start": 2936,
"end": 3061
}
|
class ____ {
public abstract String name();
public abstract int legs();
public
|
Animal
|
java
|
apache__camel
|
components/camel-infinispan/camel-infinispan-common/src/main/java/org/apache/camel/component/infinispan/InfinispanQueryBuilder.java
|
{
"start": 973,
"end": 1163
}
|
interface ____ {
Query<?> build(BasicCache<?, ?> cache);
static InfinispanQueryBuilder create(String query) {
return cache -> cache.query(query);
}
}
|
InfinispanQueryBuilder
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/resource/basic/resource/MultiInterfaceResLocatorResource.java
|
{
"start": 152,
"end": 343
}
|
class ____ {
@Produces("text/plain")
@Path("test")
public Object resourceLocator() {
return new MultiInterfaceResLocatorSubresource();
}
}
|
MultiInterfaceResLocatorResource
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/component/bean/BeanMethodWithEmptyParameterAndNoMethodWithNoParameterIssueTest.java
|
{
"start": 3252,
"end": 3551
}
|
class ____ {
public static void doSomething(Exchange exchange) {
exchange.getIn().setHeader("foo", "bar");
}
public static void doSomething(Exchange exchange, String foo, String bar) {
exchange.getIn().setHeader(foo, bar);
}
}
}
|
MyOtherBean
|
java
|
elastic__elasticsearch
|
server/src/internalClusterTest/java/org/elasticsearch/cluster/NodeUsageStatsForThreadPoolsCollectorIT.java
|
{
"start": 1395,
"end": 7833
}
|
class ____ extends ESIntegTestCase {
@Override
protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) {
return Settings.builder()
.put(super.nodeSettings(nodeOrdinal, otherSettings))
// Need to enable write load decider to enable node usage stats collection
.put(
WriteLoadConstraintSettings.WRITE_LOAD_DECIDER_ENABLED_SETTING.getKey(),
WriteLoadConstraintSettings.WriteLoadDeciderStatus.ENABLED
)
.build();
}
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return CollectionUtils.appendToCopy(super.nodePlugins(), MockTransportService.TestPlugin.class);
}
public void testMostRecentValueIsUsedWhenNodeRequestFails() {
final var dataNodeName = internalCluster().startDataOnlyNode();
final var dataNodeClusterService = internalCluster().getInstance(ClusterService.class, dataNodeName);
final var dataNodeTransportService = MockTransportService.getInstance(dataNodeName);
final var threadPoolName = randomFrom(ThreadPool.Names.GENERIC, ThreadPool.Names.WRITE, ThreadPool.Names.SEARCH);
// Intercept the node request and return some fake values
final int totalThreadPoolThreads = randomIntBetween(2, 40);
final float averageThreadPoolUtilization = randomFloatBetween(0.0f, 1.0f, true);
final long maxThreadPoolQueueLatencyMillis = randomLongBetween(0, 1000);
mockThreadPoolUsageStats(
dataNodeTransportService,
threadPoolName,
totalThreadPoolThreads,
averageThreadPoolUtilization,
maxThreadPoolQueueLatencyMillis
);
// This info should contain our fake values
refreshClusterInfoAndAssertThreadPoolHasStats(
dataNodeClusterService.localNode().getId(),
threadPoolName,
totalThreadPoolThreads,
averageThreadPoolUtilization,
maxThreadPoolQueueLatencyMillis
);
// Now simulate an error
dataNodeTransportService.clearInboundRules();
dataNodeTransportService.addRequestHandlingBehavior(
TransportNodeUsageStatsForThreadPoolsAction.NAME + "[n]",
(handler, request, channel, task) -> {
channel.sendResponse(new Exception("simulated error"));
}
);
// The next response should also contain our fake values
refreshClusterInfoAndAssertThreadPoolHasStats(
dataNodeClusterService.localNode().getId(),
threadPoolName,
totalThreadPoolThreads,
averageThreadPoolUtilization,
maxThreadPoolQueueLatencyMillis
);
// Now start returning values again
final int newTotalThreadPoolThreads = randomIntBetween(2, 40);
final float newAverageThreadPoolUtilization = randomFloatBetween(0.0f, 1.0f, true);
final long newMaxThreadPoolQueueLatencyMillis = randomLongBetween(0, 1000);
mockThreadPoolUsageStats(
dataNodeTransportService,
threadPoolName,
newTotalThreadPoolThreads,
newAverageThreadPoolUtilization,
newMaxThreadPoolQueueLatencyMillis
);
// The next response should contain the current values again
refreshClusterInfoAndAssertThreadPoolHasStats(
dataNodeClusterService.localNode().getId(),
threadPoolName,
newTotalThreadPoolThreads,
newAverageThreadPoolUtilization,
newMaxThreadPoolQueueLatencyMillis
);
}
private static void mockThreadPoolUsageStats(
MockTransportService dataNodeTransportService,
String threadPoolName,
int totalThreadPoolThreads,
float averageThreadPoolUtilization,
long maxThreadPoolQueueLatencyMillis
) {
dataNodeTransportService.clearInboundRules();
dataNodeTransportService.addRequestHandlingBehavior(
TransportNodeUsageStatsForThreadPoolsAction.NAME + "[n]",
(handler, request, channel, task) -> {
NodeUsageStatsForThreadPoolsAction.NodeResponse response = safeAwait(
l -> handler.messageReceived(
request,
new TestTransportChannel(l.map(res -> (NodeUsageStatsForThreadPoolsAction.NodeResponse) res)),
task
)
);
final var responseStats = response.getNodeUsageStatsForThreadPools();
channel.sendResponse(
new NodeUsageStatsForThreadPoolsAction.NodeResponse(
response.getNode(),
new NodeUsageStatsForThreadPools(
responseStats.nodeId(),
Maps.copyMapWithAddedOrReplacedEntry(
responseStats.threadPoolUsageStatsMap(),
threadPoolName,
new NodeUsageStatsForThreadPools.ThreadPoolUsageStats(
totalThreadPoolThreads,
averageThreadPoolUtilization,
maxThreadPoolQueueLatencyMillis
)
)
)
)
);
}
);
}
private void refreshClusterInfoAndAssertThreadPoolHasStats(
String nodeId,
String threadPoolName,
int totalThreadPoolThreads,
float averageThreadPoolUtilization,
long maxThreadPoolQueueLatencyMillis
) {
final var clusterInfo = Objects.requireNonNull(refreshClusterInfo());
final var usageStatsMap = clusterInfo.getNodeUsageStatsForThreadPools().get(nodeId).threadPoolUsageStatsMap();
assertThat(usageStatsMap, hasKey(threadPoolName));
final var threadPoolStats = usageStatsMap.get(threadPoolName);
assertThat(threadPoolStats.totalThreadPoolThreads(), equalTo(totalThreadPoolThreads));
assertThat(threadPoolStats.averageThreadPoolUtilization(), equalTo(averageThreadPoolUtilization));
assertThat(threadPoolStats.maxThreadPoolQueueLatencyMillis(), equalTo(maxThreadPoolQueueLatencyMillis));
}
}
|
NodeUsageStatsForThreadPoolsCollectorIT
|
java
|
FasterXML__jackson-core
|
src/test/java/tools/jackson/core/unittest/io/BufferRecyclerPoolTest.java
|
{
"start": 597,
"end": 3252
}
|
class ____ extends JacksonCoreTestBase
{
@Test
void noOp() throws Exception {
// no-op pool doesn't actually pool anything, so avoid checking it
checkBufferRecyclerPoolImpl(JsonRecyclerPools.nonRecyclingPool(), false, true);
}
@Test
void threadLocal() throws Exception {
checkBufferRecyclerPoolImpl(JsonRecyclerPools.threadLocalPool(), true, false);
}
@Test
void concurrentDequeue() throws Exception {
checkBufferRecyclerPoolImpl(JsonRecyclerPools.newConcurrentDequePool(), true, true);
}
@Test
void bounded() throws Exception {
checkBufferRecyclerPoolImpl(JsonRecyclerPools.newBoundedPool(1), true, true);
}
@Test
void pluggingPool() throws Exception {
checkBufferRecyclerPoolImpl(new TestPool(), true, true);
}
private void checkBufferRecyclerPoolImpl(RecyclerPool<BufferRecycler> pool,
boolean checkPooledResource,
boolean implementsClear)
throws Exception
{
JsonFactory jsonFactory = JsonFactory.builder()
.recyclerPool(pool)
.build();
BufferRecycler usedBufferRecycler = write("test", jsonFactory, 6);
if (checkPooledResource) {
// acquire the pooled BufferRecycler again and check if it is the same instance used before
BufferRecycler pooledBufferRecycler = pool.acquireAndLinkPooled();
assertSame(usedBufferRecycler, pooledBufferRecycler);
// might as well return it back
pooledBufferRecycler.releaseToPool();
}
// Also: check `clear()` method -- optional, but supported by all impls
// except for ThreadLocal-based one
if (implementsClear) {
assertTrue(pool.clear());
// cannot easily verify anything else except that we do NOT get the same recycled instance
BufferRecycler br2 = pool.acquireAndLinkPooled();
assertNotNull(br2);
assertNotSame(usedBufferRecycler, br2);
} else {
assertFalse(pool.clear());
}
}
protected final BufferRecycler write(String value, JsonFactory jsonFactory, int expectedSize) {
BufferRecycler bufferRecycler;
NopOutputStream out = new NopOutputStream();
try (JsonGenerator gen = jsonFactory.createGenerator(ObjectWriteContext.empty(), out)) {
bufferRecycler = ((GeneratorBase) gen).ioContext().bufferRecycler();
gen.writeString(value);
}
assertEquals(expectedSize, out.size);
return bufferRecycler;
}
private static
|
BufferRecyclerPoolTest
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/SnmpEndpointBuilderFactory.java
|
{
"start": 57765,
"end": 71105
}
|
interface ____
extends
SnmpEndpointConsumerBuilder,
SnmpEndpointProducerBuilder {
default AdvancedSnmpEndpointBuilder advanced() {
return (AdvancedSnmpEndpointBuilder) this;
}
/**
* Defines which values you are interested in. Please have a look at the
* Wikipedia to get a better understanding. You may provide a single OID
* or a coma separated list of OIDs. Example:
* oids=1.3.6.1.2.1.1.3.0,1.3.6.1.2.1.25.3.2.1.5.1,1.3.6.1.2.1.25.3.5.1.1.1,1.3.6.1.2.1.43.5.1.1.11.1.
*
* The option is a: <code>org.apache.camel.component.snmp.OIDList</code>
* type.
*
* Group: common
*
* @param oids the value to set
* @return the dsl builder
*/
default SnmpEndpointBuilder oids(org.apache.camel.component.snmp.OIDList oids) {
doSetProperty("oids", oids);
return this;
}
/**
* Defines which values you are interested in. Please have a look at the
* Wikipedia to get a better understanding. You may provide a single OID
* or a coma separated list of OIDs. Example:
* oids=1.3.6.1.2.1.1.3.0,1.3.6.1.2.1.25.3.2.1.5.1,1.3.6.1.2.1.25.3.5.1.1.1,1.3.6.1.2.1.43.5.1.1.11.1.
*
* The option will be converted to a
* <code>org.apache.camel.component.snmp.OIDList</code> type.
*
* Group: common
*
* @param oids the value to set
* @return the dsl builder
*/
default SnmpEndpointBuilder oids(String oids) {
doSetProperty("oids", oids);
return this;
}
/**
* Here you can select which protocol to use. You can use either udp or
* tcp.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: udp
* Group: common
*
* @param protocol the value to set
* @return the dsl builder
*/
default SnmpEndpointBuilder protocol(String protocol) {
doSetProperty("protocol", protocol);
return this;
}
/**
* Defines how often a retry is made before canceling the request.
*
* The option is a: <code>int</code> type.
*
* Default: 2
* Group: common
*
* @param retries the value to set
* @return the dsl builder
*/
default SnmpEndpointBuilder retries(int retries) {
doSetProperty("retries", retries);
return this;
}
/**
* Defines how often a retry is made before canceling the request.
*
* The option will be converted to a <code>int</code> type.
*
* Default: 2
* Group: common
*
* @param retries the value to set
* @return the dsl builder
*/
default SnmpEndpointBuilder retries(String retries) {
doSetProperty("retries", retries);
return this;
}
/**
* Sets the community octet string for the snmp request.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: public
* Group: common
*
* @param snmpCommunity the value to set
* @return the dsl builder
*/
default SnmpEndpointBuilder snmpCommunity(String snmpCommunity) {
doSetProperty("snmpCommunity", snmpCommunity);
return this;
}
/**
* Sets the context engine ID field of the scoped PDU.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param snmpContextEngineId the value to set
* @return the dsl builder
*/
default SnmpEndpointBuilder snmpContextEngineId(String snmpContextEngineId) {
doSetProperty("snmpContextEngineId", snmpContextEngineId);
return this;
}
/**
* Sets the context name field of this scoped PDU.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param snmpContextName the value to set
* @return the dsl builder
*/
default SnmpEndpointBuilder snmpContextName(String snmpContextName) {
doSetProperty("snmpContextName", snmpContextName);
return this;
}
/**
* Sets the snmp version for the request. The value 0 means SNMPv1, 1
* means SNMPv2c, and the value 3 means SNMPv3.
*
* The option is a: <code>int</code> type.
*
* Default: 0
* Group: common
*
* @param snmpVersion the value to set
* @return the dsl builder
*/
default SnmpEndpointBuilder snmpVersion(int snmpVersion) {
doSetProperty("snmpVersion", snmpVersion);
return this;
}
/**
* Sets the snmp version for the request. The value 0 means SNMPv1, 1
* means SNMPv2c, and the value 3 means SNMPv3.
*
* The option will be converted to a <code>int</code> type.
*
* Default: 0
* Group: common
*
* @param snmpVersion the value to set
* @return the dsl builder
*/
default SnmpEndpointBuilder snmpVersion(String snmpVersion) {
doSetProperty("snmpVersion", snmpVersion);
return this;
}
/**
* Sets the timeout value for the request in millis.
*
* The option is a: <code>int</code> type.
*
* Default: 1500
* Group: common
*
* @param timeout the value to set
* @return the dsl builder
*/
default SnmpEndpointBuilder timeout(int timeout) {
doSetProperty("timeout", timeout);
return this;
}
/**
* Sets the timeout value for the request in millis.
*
* The option will be converted to a <code>int</code> type.
*
* Default: 1500
* Group: common
*
* @param timeout the value to set
* @return the dsl builder
*/
default SnmpEndpointBuilder timeout(String timeout) {
doSetProperty("timeout", timeout);
return this;
}
/**
* Which operation to perform such as poll, trap, etc.
*
* The option is a:
* <code>org.apache.camel.component.snmp.SnmpActionType</code> type.
*
* Group: common
*
* @param type the value to set
* @return the dsl builder
*/
default SnmpEndpointBuilder type(org.apache.camel.component.snmp.SnmpActionType type) {
doSetProperty("type", type);
return this;
}
/**
* Which operation to perform such as poll, trap, etc.
*
* The option will be converted to a
* <code>org.apache.camel.component.snmp.SnmpActionType</code> type.
*
* Group: common
*
* @param type the value to set
* @return the dsl builder
*/
default SnmpEndpointBuilder type(String type) {
doSetProperty("type", type);
return this;
}
/**
* The authentication passphrase. If not null, authenticationProtocol
* must also be not null. RFC3414 11.2 requires passphrases to have a
* minimum length of 8 bytes. If the length of authenticationPassphrase
* is less than 8 bytes an IllegalArgumentException is thrown.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param authenticationPassphrase the value to set
* @return the dsl builder
*/
default SnmpEndpointBuilder authenticationPassphrase(String authenticationPassphrase) {
doSetProperty("authenticationPassphrase", authenticationPassphrase);
return this;
}
/**
* Authentication protocol to use if security level is set to enable
* authentication The possible values are: MD5, SHA1.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param authenticationProtocol the value to set
* @return the dsl builder
*/
default SnmpEndpointBuilder authenticationProtocol(String authenticationProtocol) {
doSetProperty("authenticationProtocol", authenticationProtocol);
return this;
}
/**
* The privacy passphrase. If not null, privacyProtocol must also be not
* null. RFC3414 11.2 requires passphrases to have a minimum length of 8
* bytes. If the length of authenticationPassphrase is less than 8 bytes
* an IllegalArgumentException is thrown.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param privacyPassphrase the value to set
* @return the dsl builder
*/
default SnmpEndpointBuilder privacyPassphrase(String privacyPassphrase) {
doSetProperty("privacyPassphrase", privacyPassphrase);
return this;
}
/**
* The privacy protocol ID to be associated with this user. If set to
* null, this user only supports unencrypted messages.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param privacyProtocol the value to set
* @return the dsl builder
*/
default SnmpEndpointBuilder privacyProtocol(String privacyProtocol) {
doSetProperty("privacyProtocol", privacyProtocol);
return this;
}
/**
* Sets the security level for this target. The supplied security level
* must be supported by the security model dependent information
* associated with the security name set for this target. The value 1
* means: No authentication and no encryption. Anyone can create and
* read messages with this security level The value 2 means:
* Authentication and no encryption. Only the one with the right
* authentication key can create messages with this security level, but
* anyone can read the contents of the message. The value 3 means:
* Authentication and encryption. Only the one with the right
* authentication key can create messages with this security level, and
* only the one with the right encryption/decryption key can read the
* contents of the message.
*
* The option is a: <code>int</code> type.
*
* Default: 3
* Group: security
*
* @param securityLevel the value to set
* @return the dsl builder
*/
default SnmpEndpointBuilder securityLevel(int securityLevel) {
doSetProperty("securityLevel", securityLevel);
return this;
}
/**
* Sets the security level for this target. The supplied security level
* must be supported by the security model dependent information
* associated with the security name set for this target. The value 1
* means: No authentication and no encryption. Anyone can create and
* read messages with this security level The value 2 means:
* Authentication and no encryption. Only the one with the right
* authentication key can create messages with this security level, but
* anyone can read the contents of the message. The value 3 means:
* Authentication and encryption. Only the one with the right
* authentication key can create messages with this security level, and
* only the one with the right encryption/decryption key can read the
* contents of the message.
*
* The option will be converted to a <code>int</code> type.
*
* Default: 3
* Group: security
*
* @param securityLevel the value to set
* @return the dsl builder
*/
default SnmpEndpointBuilder securityLevel(String securityLevel) {
doSetProperty("securityLevel", securityLevel);
return this;
}
/**
* Sets the security name to be used with this target.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param securityName the value to set
* @return the dsl builder
*/
default SnmpEndpointBuilder securityName(String securityName) {
doSetProperty("securityName", securityName);
return this;
}
}
/**
* Advanced builder for endpoint for the SNMP component.
*/
public
|
SnmpEndpointBuilder
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/context/bean/override/mockito/MockitoBeanManuallyRegisteredSingletonTests.java
|
{
"start": 1971,
"end": 2053
}
|
class ____ {
String getMessage() {
return "production";
}
}
}
|
MessageService
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/index/mapper/ShortSyntheticSourceNativeArrayIntegrationTests.java
|
{
"start": 586,
"end": 987
}
|
class ____ extends NativeArrayIntegrationTestCase {
@Override
protected String getFieldTypeName() {
return "short";
}
@Override
protected Short getRandomValue() {
return randomShort();
}
@Override
protected String getMalformedValue() {
return RandomStrings.randomAsciiOfLength(random(), 8);
}
}
|
ShortSyntheticSourceNativeArrayIntegrationTests
|
java
|
apache__flink
|
flink-test-utils-parent/flink-connector-test-utils/src/main/java/org/apache/flink/connector/testutils/source/TestingJobInfo.java
|
{
"start": 1003,
"end": 1453
}
|
class ____ implements JobInfo {
private final JobID jobID;
private final String jobName;
public TestingJobInfo(JobID jobID, String jobName) {
this.jobID = jobID;
this.jobName = jobName;
}
@Override
public JobID getJobId() {
return jobID;
}
@Override
public String getJobName() {
return jobName;
}
/** Builder for {@link TestingJobInfo}. */
public static
|
TestingJobInfo
|
java
|
apache__flink
|
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/logical/RowType.java
|
{
"start": 2290,
"end": 2731
}
|
class ____ extends LogicalType {
private static final long serialVersionUID = 1L;
public static final String FORMAT = "ROW<%s>";
private static final Set<String> INPUT_OUTPUT_CONVERSION =
conversionSet(Row.class.getName(), RowData.class.getName());
private static final Class<?> DEFAULT_CONVERSION = Row.class;
/** Describes a field of a {@link RowType}. */
@PublicEvolving
public static final
|
RowType
|
java
|
apache__camel
|
components/camel-kafka/src/main/java/org/apache/camel/component/kafka/KafkaComponent.java
|
{
"start": 11536,
"end": 11719
}
|
class ____)
PropertyBindingSupport.bindProperties(getCamelContext(), map, configuration.getAdditionalProperties());
configuration.setAdditionalProperties(map);
}
}
|
etc
|
java
|
apache__maven
|
its/core-it-suite/src/test/java/org/apache/maven/it/MavenITmng5965ParallelBuildMultipliesWorkTest.java
|
{
"start": 1165,
"end": 2894
}
|
class ____ extends AbstractMavenIntegrationTestCase {
@Test
public void testItShouldOnlyRunEachTaskOnce() throws Exception {
File testDir = extractResources("/mng-5965-parallel-build-multiplies-work");
Verifier verifier = newVerifier(testDir.getAbsolutePath());
verifier.setAutoclean(false);
verifier.setLogFileName("log-only.txt");
verifier.addCliArgument("-T1");
// include an aggregator task so that the two goals end up in different task segments
verifier.addCliArguments("clean", "install:help");
verifier.execute();
verifier.verifyErrorFreeLog();
List<String> logLines = verifier.loadLines("log-only.txt");
List<String> cleanGoalsExecuted = findCleanExecutions(logLines);
// clean only executed once per module
assertNoRepeatedLines(cleanGoalsExecuted);
// clean executed in the 3 modules
assertEquals(cleanGoalsExecuted.size(), 3);
}
private void assertNoRepeatedLines(List<String> logLines) throws VerificationException {
Set<String> uniqueLines = new LinkedHashSet<>();
for (String line : logLines) {
if (uniqueLines.contains(line)) {
throw new VerificationException("Goal executed twice: " + line);
}
uniqueLines.add(line);
}
}
private List<String> findCleanExecutions(List<String> fullLog) {
List<String> cleanExecutions = new ArrayList<>();
for (String line : fullLog) {
if (line.contains("(default-clean)")) {
cleanExecutions.add(line);
}
}
return cleanExecutions;
}
}
|
MavenITmng5965ParallelBuildMultipliesWorkTest
|
java
|
alibaba__nacos
|
address/src/main/java/com/alibaba/nacos/address/misc/Loggers.java
|
{
"start": 799,
"end": 929
}
|
class ____ {
public static final Logger ADDRESS_LOGGER = LoggerFactory.getLogger("com.alibaba.nacos.address.main");
}
|
Loggers
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/any/annotations/CharProperty.java
|
{
"start": 369,
"end": 1073
}
|
class ____ implements Property {
private Integer id;
private String name;
private Character value;
public CharProperty() {
super();
}
public CharProperty(String name, Character value) {
super();
this.name = name;
this.value = value;
}
public String asString() {
return Character.toString( value );
}
public String getName() {
return name;
}
@Id
@GeneratedValue
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
@Column(name = "`value`")
public Character getValue() {
return value;
}
public void setValue(Character value) {
this.value = value;
}
public void setName(String name) {
this.name = name;
}
}
|
CharProperty
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/sql/ast/SqlTreeCreationLogger.java
|
{
"start": 386,
"end": 538
}
|
interface ____ {
String LOGGER_NAME = SubSystemLogging.BASE + ".sql.ast.create";
Logger LOGGER = Logger.getLogger( LOGGER_NAME );
}
|
SqlTreeCreationLogger
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/JobModelSnapshotUpgrader.java
|
{
"start": 3314,
"end": 11992
}
|
class ____ {
private static final Duration FLUSH_PROCESS_CHECK_FREQUENCY = Duration.ofSeconds(1);
private static final Logger logger = LogManager.getLogger(JobModelSnapshotUpgrader.class);
private final SnapshotUpgradeTask task;
private final Job job;
private final String jobId;
private final String snapshotId;
private final AutodetectParams params;
private final Client client;
private final Consumer<Exception> onFinish;
private final Supplier<Boolean> continueRunning;
private final ThreadPool threadPool;
private final AutodetectProcessFactory autodetectProcessFactory;
private final JobResultsPersister jobResultsPersister;
private final NativeStorageProvider nativeStorageProvider;
// Not volatile as only used in synchronized methods
private AutodetectProcess process;
private JobSnapshotUpgraderResultProcessor processor;
JobModelSnapshotUpgrader(
SnapshotUpgradeTask task,
Job job,
AutodetectParams params,
ThreadPool threadPool,
AutodetectProcessFactory autodetectProcessFactory,
JobResultsPersister jobResultsPersister,
Client client,
NativeStorageProvider nativeStorageProvider,
Consumer<Exception> onFinish,
Supplier<Boolean> continueRunning
) {
this.task = Objects.requireNonNull(task);
this.job = Objects.requireNonNull(job);
this.params = Objects.requireNonNull(params);
this.threadPool = Objects.requireNonNull(threadPool);
this.autodetectProcessFactory = Objects.requireNonNull(autodetectProcessFactory);
this.jobResultsPersister = Objects.requireNonNull(jobResultsPersister);
this.nativeStorageProvider = Objects.requireNonNull(nativeStorageProvider);
this.client = Objects.requireNonNull(client);
this.onFinish = Objects.requireNonNull(onFinish);
this.continueRunning = Objects.requireNonNull(continueRunning);
this.jobId = task.getJobId();
this.snapshotId = task.getSnapshotId();
}
synchronized void start() {
if (task.setJobModelSnapshotUpgrader(this) == false) {
this.killProcess(task.getReasonCancelled());
return;
}
// A TP with no queue, so that we fail immediately if there are no threads available
ExecutorService autodetectExecutorService = threadPool.executor(MachineLearning.JOB_COMMS_THREAD_POOL_NAME);
process = autodetectProcessFactory.createAutodetectProcess(
jobId + "-" + snapshotId,
job,
params,
autodetectExecutorService,
(reason) -> {
setTaskToFailed(reason, ActionListener.wrap(t -> {}, task::markAsFailed));
try {
nativeStorageProvider.cleanupLocalTmpStorage(task.getDescription());
} catch (IOException e) {
logger.error(() -> format("[%s] [%s] failed to delete temporary files snapshot upgrade", jobId, snapshotId), e);
}
}
);
processor = new JobSnapshotUpgraderResultProcessor(jobId, snapshotId, jobResultsPersister, process);
ProcessWorkerExecutorService autodetectWorkerExecutor;
try (ThreadContext.StoredContext ignore = threadPool.getThreadContext().stashContext()) {
autodetectWorkerExecutor = new AutodetectWorkerExecutorService(threadPool.getThreadContext());
autodetectExecutorService.submit(autodetectWorkerExecutor::start);
autodetectExecutorService.submit(processor::process);
} catch (EsRejectedExecutionException e) {
// If submitting the operation to read the results from the process fails we need to close
// the process too, so that other submitted operations to threadpool are stopped.
try {
IOUtils.close(process);
process = null;
processor = null;
} catch (IOException ioe) {
logger.error("Can't close autodetect", ioe);
}
onFinish.accept(e);
return;
}
StateStreamer stateStreamer = new StateStreamer(client);
Executor executor = new Executor(stateStreamer, processor, autodetectWorkerExecutor, process);
if (continueRunning.get() == false) {
onFinish.accept(null);
return;
}
executor.execute();
}
private void removeDuplicateModelSnapshotDoc(Consumer<Exception> runAfter) {
String snapshotDocId = jobId + "_model_snapshot_" + snapshotId;
client.prepareSearch(AnomalyDetectorsIndex.jobResultsIndexPattern())
.setQuery(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds(snapshotDocId)))
.setSize(2)
.addSort(ModelSnapshot.MIN_VERSION.getPreferredName(), org.elasticsearch.search.sort.SortOrder.ASC)
.execute(ActionListener.wrap(searchResponse -> {
if (searchResponse.getHits().getTotalHits().value() > 1) {
deleteOlderSnapshotDoc(searchResponse, runAfter);
} else {
onFinish.accept(null);
}
}, e -> {
logger.warn(() -> format("[%s] [%s] error during search for model snapshot documents", jobId, snapshotId), e);
onFinish.accept(null);
}));
}
private void deleteOlderSnapshotDoc(SearchResponse searchResponse, Consumer<Exception> runAfter) {
SearchHit firstHit = searchResponse.getHits().getAt(0);
logger.debug(() -> format("[%s] deleting duplicate model snapshot doc [%s]", jobId, firstHit.getId()));
client.prepareDelete()
.setIndex(firstHit.getIndex())
.setId(firstHit.getId())
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)
.execute(ActionListener.runAfter(ActionListener.wrap(deleteResponse -> {
if ((deleteResponse.getResult() == DocWriteResponse.Result.DELETED) == false) {
logger.warn(
() -> format(
"[%s] [%s] failed to delete old snapshot [%s] result document, document not found",
jobId,
snapshotId,
ModelSizeStats.RESULT_TYPE_FIELD.getPreferredName()
)
);
}
}, e -> {
logger.warn(() -> {
String baseMessage = format(
"[%s] [%s] failed to delete old snapshot [%s] result document",
jobId,
snapshotId,
ModelSizeStats.RESULT_TYPE_FIELD.getPreferredName()
);
if (e instanceof org.elasticsearch.cluster.block.ClusterBlockException) {
return baseMessage
+ ". Remove the write block from the results index to delete the model snapshot. See "
+ ReferenceDocs.DELETE_INDEX_BLOCK
+ " for details.";
}
return baseMessage;
}, e);
}), () -> runAfter.accept(null)));
}
void setTaskToFailed(String reason, ActionListener<PersistentTask<?>> listener) {
SnapshotUpgradeTaskState taskState = new SnapshotUpgradeTaskState(SnapshotUpgradeState.FAILED, task.getAllocationId(), reason);
task.updatePersistentTaskState(taskState, ActionListener.wrap(listener::onResponse, f -> {
logger.warn(() -> format("[%s] [%s] failed to set task to failed", task.getJobId(), task.getSnapshotId()), f);
listener.onFailure(f);
}));
}
public synchronized void killProcess(String reason) {
if (process != null) {
try {
logger.debug("[{}] killing upgrade process for model snapshot [{}]: reason [{}]", jobId, snapshotId, reason);
if (processor != null) {
processor.setProcessKilled();
}
process.kill(true);
process = null;
processor = null;
} catch (IOException e) {
logger.error(() -> format("[%s] failed to kill upgrade process for model snapshot [%s]", jobId, snapshotId), e);
}
} else {
logger.warn("[{}] attempt to kill upgrade process for model snapshot [{}] when no such process exists", jobId, snapshotId);
}
}
private
|
JobModelSnapshotUpgrader
|
java
|
dropwizard__dropwizard
|
dropwizard-auth/src/main/java/io/dropwizard/auth/basic/BasicCredentialAuthFilter.java
|
{
"start": 2608,
"end": 2888
}
|
class ____<P extends Principal> extends
AuthFilterBuilder<BasicCredentials, P, BasicCredentialAuthFilter<P>> {
@Override
protected BasicCredentialAuthFilter<P> newInstance() {
return new BasicCredentialAuthFilter<>();
}
}
}
|
Builder
|
java
|
spring-projects__spring-security
|
config/src/main/java/org/springframework/security/config/http/HttpConfigurationBuilder.java
|
{
"start": 5779,
"end": 5939
}
|
class ____ helps HttpSecurityBDP to create the configuration for the
* <http> element.
*
* @author Luke Taylor
* @author Rob Winch
* @since 3.0
*/
|
which
|
java
|
spring-projects__spring-framework
|
spring-beans/src/test/java/org/springframework/beans/factory/support/AutowireUtilsTests.java
|
{
"start": 4140,
"end": 4616
}
|
class ____ of Object.class, but this
// information is not available at run-time due to type erasure.
Map<Integer, Boolean> map = new HashMap<>();
map.put(0, false);
map.put(1, true);
Method extractMagicValue = ReflectionUtils.findMethod(MyTypeWithMethods.class, "extractMagicValue", Map.class);
assertThat(AutowireUtils.resolveReturnTypeForFactoryMethod(extractMagicValue, new Object[]{map}, getClass().getClassLoader())).isEqualTo(Object.class);
}
public
|
instead
|
java
|
apache__flink
|
flink-streaming-java/src/test/java/org/apache/flink/streaming/runtime/tasks/StreamTaskTest.java
|
{
"start": 104339,
"end": 105894
}
|
class ____ implements StreamInputProcessor {
private volatile boolean isFinished;
public EmptyInputProcessor() {
this(true);
}
public EmptyInputProcessor(boolean startFinished) {
isFinished = startFinished;
}
@Override
public DataInputStatus processInput() throws Exception {
return isFinished ? DataInputStatus.END_OF_INPUT : DataInputStatus.NOTHING_AVAILABLE;
}
@Override
public CompletableFuture<Void> prepareSnapshot(
ChannelStateWriter channelStateWriter, long checkpointId)
throws CheckpointException {
return FutureUtils.completedVoidFuture();
}
@Override
public void close() throws IOException {}
@Override
public CompletableFuture<?> getAvailableFuture() {
return AVAILABLE;
}
public void finishInput() {
isFinished = true;
}
}
private static MockStreamTask createMockStreamTask(
Environment env, OperatorChain<String, AbstractStreamOperator<String>> operatorChain)
throws Exception {
return new MockStreamTask(env, operatorChain, FatalExitExceptionHandler.INSTANCE);
}
/**
* Source that instantiates the operator state backend and the keyed state backend. The created
* state backends can be retrieved from the static fields to check if the CloseableRegistry
* closed them correctly.
*/
public static
|
EmptyInputProcessor
|
java
|
alibaba__fastjson
|
src/main/java/com/alibaba/fastjson/JSONPath.java
|
{
"start": 103176,
"end": 104422
}
|
class ____ extends PropertyFilter {
private final Long[] values;
private final boolean not;
public IntObjInSegement(String propertyName, boolean function, Long[] values, boolean not){
super(propertyName, function);
this.values = values;
this.not = not;
}
public boolean apply(JSONPath path, Object rootObject, Object currentObject, Object item) {
Object propertyValue = get(path, rootObject, item);
if (propertyValue == null) {
for (Long value : values) {
if (value == null) {
return !not;
}
}
return not;
}
if (propertyValue instanceof Number) {
long longPropertyValue = TypeUtils.longExtractValue((Number) propertyValue);
for (Long value : values) {
if (value == null) {
continue;
}
if (value.longValue() == longPropertyValue) {
return !not;
}
}
}
return not;
}
}
static
|
IntObjInSegement
|
java
|
quarkusio__quarkus
|
extensions/agroal/deployment/src/main/java/io/quarkus/agroal/deployment/AgroalProcessor.java
|
{
"start": 3252,
"end": 12692
}
|
class ____ {
private static final Logger log = Logger.getLogger(AgroalProcessor.class);
private static final String OPEN_TELEMETRY_DRIVER = "io.opentelemetry.instrumentation.jdbc.OpenTelemetryDriver";
private static final DotName DATA_SOURCE = DotName.createSimple(javax.sql.DataSource.class.getName());
private static final DotName AGROAL_DATA_SOURCE = DotName.createSimple(AgroalDataSource.class.getName());
@BuildStep
void agroal(BuildProducer<FeatureBuildItem> feature) {
feature.produce(new FeatureBuildItem(Feature.AGROAL));
}
@BuildStep
void build(
DataSourcesBuildTimeConfig dataSourcesBuildTimeConfig,
DataSourcesJdbcBuildTimeConfig dataSourcesJdbcBuildTimeConfig,
List<DefaultDataSourceDbKindBuildItem> defaultDbKinds,
List<JdbcDriverBuildItem> jdbcDriverBuildItems,
BuildProducer<ReflectiveClassBuildItem> reflectiveClass,
BuildProducer<NativeImageResourceBuildItem> resource,
BuildProducer<ServiceProviderBuildItem> service,
Capabilities capabilities,
BuildProducer<ExtensionSslNativeSupportBuildItem> sslNativeSupport,
BuildProducer<AggregatedDataSourceBuildTimeConfigBuildItem> aggregatedConfig,
BuildProducer<AdditionalBeanBuildItem> additionalBeans,
CurateOutcomeBuildItem curateOutcomeBuildItem) throws Exception {
if (dataSourcesBuildTimeConfig.driver().isPresent() || dataSourcesBuildTimeConfig.url().isPresent()) {
throw new ConfigurationException(
"quarkus.datasource.url and quarkus.datasource.driver have been deprecated in Quarkus 1.3 and removed in 1.9. "
+ "Please use the new datasource configuration as explained in https://quarkus.io/guides/datasource.");
}
List<AggregatedDataSourceBuildTimeConfigBuildItem> aggregatedDataSourceBuildTimeConfigs = getAggregatedConfigBuildItems(
dataSourcesBuildTimeConfig,
dataSourcesJdbcBuildTimeConfig, curateOutcomeBuildItem,
jdbcDriverBuildItems, defaultDbKinds);
if (aggregatedDataSourceBuildTimeConfigs.isEmpty()) {
log.warn("The Agroal dependency is present but no JDBC datasources have been defined.");
return;
}
boolean otelJdbcInstrumentationActive = false;
for (AggregatedDataSourceBuildTimeConfigBuildItem aggregatedDataSourceBuildTimeConfig : aggregatedDataSourceBuildTimeConfigs) {
validateBuildTimeConfig(aggregatedDataSourceBuildTimeConfig);
if (aggregatedDataSourceBuildTimeConfig.getJdbcConfig().telemetry()) {
otelJdbcInstrumentationActive = true;
}
reflectiveClass
.produce(ReflectiveClassBuildItem.builder(aggregatedDataSourceBuildTimeConfig.getResolvedDriverClass())
.methods().build());
aggregatedConfig.produce(aggregatedDataSourceBuildTimeConfig);
}
if (otelJdbcInstrumentationActive && capabilities.isPresent(OPENTELEMETRY_TRACER)) {
// at least one datasource is using OpenTelemetry JDBC instrumentation,
// therefore we register the OpenTelemetry data source wrapper bean
additionalBeans.produce(new AdditionalBeanBuildItem.Builder()
.addBeanClass(AgroalOpenTelemetryWrapper.class)
.setDefaultScope(DotNames.SINGLETON).build());
}
// For now, we can't push the security providers to Agroal so we need to include
// the service file inside the image. Hopefully, we will get an entry point to
// resolve them at build time and push them to Agroal soon.
resource.produce(new NativeImageResourceBuildItem(
"META-INF/services/" + io.agroal.api.security.AgroalSecurityProvider.class.getName()));
// accessed through io.quarkus.agroal.runtime.DataSources.loadDriversInTCCL
service.produce(ServiceProviderBuildItem.allProvidersFromClassPath(Driver.class.getName()));
reflectiveClass.produce(ReflectiveClassBuildItem.builder(io.agroal.pool.ConnectionHandler[].class.getName(),
io.agroal.pool.ConnectionHandler.class.getName(),
io.agroal.api.security.AgroalDefaultSecurityProvider.class.getName(),
io.agroal.api.security.AgroalKerberosSecurityProvider.class.getName(),
java.sql.Statement[].class.getName(),
java.sql.Statement.class.getName(),
java.sql.ResultSet.class.getName(),
java.sql.ResultSet[].class.getName()).build());
// Enable SSL support by default
sslNativeSupport.produce(new ExtensionSslNativeSupportBuildItem(Feature.AGROAL.getName()));
}
private static void validateBuildTimeConfig(AggregatedDataSourceBuildTimeConfigBuildItem aggregatedConfig) {
DataSourceJdbcBuildTimeConfig jdbcBuildTimeConfig = aggregatedConfig.getJdbcConfig();
String fullDataSourceName = aggregatedConfig.isDefault() ? "default datasource"
: "datasource named '" + aggregatedConfig.getName() + "'";
String driverName = aggregatedConfig.getResolvedDriverClass();
Class<?> driver;
try {
driver = Class.forName(driverName, true, Thread.currentThread().getContextClassLoader());
} catch (ClassNotFoundException e) {
throw new ConfigurationException(
"Unable to load the datasource driver " + driverName + " for the " + fullDataSourceName, e);
}
if (jdbcBuildTimeConfig.transactions() == TransactionIntegration.XA) {
if (!XADataSource.class.isAssignableFrom(driver)) {
throw new ConfigurationException(
"Driver is not an XA dataSource, while XA has been enabled in the configuration of the "
+ fullDataSourceName + ": either disable XA or switch the driver to an XADataSource");
}
} else {
if (driver != null && !javax.sql.DataSource.class.isAssignableFrom(driver)
&& !Driver.class.isAssignableFrom(driver)) {
if (aggregatedConfig.isDefault()) {
throw new ConfigurationException(
"Driver " + driverName
+ " is an XA datasource, but XA transactions have not been enabled on the default datasource; please either set 'quarkus.datasource.jdbc.transactions=xa' or switch to a standard non-XA JDBC driver implementation");
} else {
throw new ConfigurationException(
"Driver " + driverName
+ " is an XA datasource, but XA transactions have not been enabled on the datasource named '"
+ fullDataSourceName + "'; please either set 'quarkus.datasource." + fullDataSourceName
+ ".jdbc.transactions=xa' or switch to a standard non-XA JDBC driver implementation");
}
}
}
}
private AgroalDataSourceSupport getDataSourceSupport(
List<AggregatedDataSourceBuildTimeConfigBuildItem> aggregatedBuildTimeConfigBuildItems,
SslNativeConfigBuildItem sslNativeConfig, Capabilities capabilities) {
Map<String, AgroalDataSourceSupport.Entry> dataSourceSupportEntries = new HashMap<>();
for (AggregatedDataSourceBuildTimeConfigBuildItem aggregatedDataSourceBuildTimeConfig : aggregatedBuildTimeConfigBuildItems) {
String dataSourceName = aggregatedDataSourceBuildTimeConfig.getName();
dataSourceSupportEntries.put(dataSourceName,
new AgroalDataSourceSupport.Entry(dataSourceName, aggregatedDataSourceBuildTimeConfig.getDbKind(),
aggregatedDataSourceBuildTimeConfig.getDataSourceConfig().dbVersion(),
aggregatedDataSourceBuildTimeConfig.getResolvedDriverClass(),
aggregatedDataSourceBuildTimeConfig.isDefault()));
}
return new AgroalDataSourceSupport(sslNativeConfig.isExplicitlyDisabled(),
capabilities.isPresent(Capability.METRICS), dataSourceSupportEntries);
}
@Record(ExecutionTime.STATIC_INIT)
@BuildStep
void generateDataSourceSupportBean(AgroalRecorder recorder,
List<AggregatedDataSourceBuildTimeConfigBuildItem> aggregatedBuildTimeConfigBuildItems,
SslNativeConfigBuildItem sslNativeConfig,
Capabilities capabilities,
BuildProducer<AdditionalBeanBuildItem> additionalBeans,
BuildProducer<SyntheticBeanBuildItem> syntheticBeanBuildItemBuildProducer,
BuildProducer<UnremovableBeanBuildItem> unremovableBeans) {
additionalBeans.produce(new AdditionalBeanBuildItem(JdbcDriver.class));
if (aggregatedBuildTimeConfigBuildItems.isEmpty()) {
// No datasource has been configured so bail out
return;
}
// make a DataSources bean
additionalBeans.produce(AdditionalBeanBuildItem.builder().addBeanClasses(DataSources.class).setUnremovable()
.setDefaultScope(DotNames.SINGLETON).build());
// add the @DataSource
|
AgroalProcessor
|
java
|
spring-projects__spring-boot
|
module/spring-boot-elasticsearch/src/main/java/org/springframework/boot/elasticsearch/testcontainers/ElasticsearchContainerConnectionDetailsFactory.java
|
{
"start": 2069,
"end": 2612
}
|
class ____
extends ContainerConnectionDetailsFactory<ElasticsearchContainer, ElasticsearchConnectionDetails> {
private static final int DEFAULT_PORT = 9200;
@Override
protected ElasticsearchConnectionDetails getContainerConnectionDetails(
ContainerConnectionSource<ElasticsearchContainer> source) {
return new ElasticsearchContainerConnectionDetails(source);
}
/**
* {@link ElasticsearchConnectionDetails} backed by a
* {@link ContainerConnectionSource}.
*/
private static final
|
ElasticsearchContainerConnectionDetailsFactory
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/EclatMapReducer.java
|
{
"start": 3258,
"end": 3520
}
|
class ____ extends AbstractItemSetMapReducer<
HashBasedTransactionStore,
ImmutableTransactionStore,
HashBasedTransactionStore,
EclatMapReducer.EclatResult> {
private static final int ITERATION_CHECK_INTERVAL = 100000;
static
|
EclatMapReducer
|
java
|
apache__camel
|
components/camel-cm-sms/src/generated/java/org/apache/camel/component/cm/CMComponentConfigurer.java
|
{
"start": 729,
"end": 2275
}
|
class ____ extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
CMComponent target = (CMComponent) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "autowiredenabled":
case "autowiredEnabled": target.setAutowiredEnabled(property(camelContext, boolean.class, value)); return true;
case "lazystartproducer":
case "lazyStartProducer": target.setLazyStartProducer(property(camelContext, boolean.class, value)); return true;
default: return false;
}
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "autowiredenabled":
case "autowiredEnabled": return boolean.class;
case "lazystartproducer":
case "lazyStartProducer": return boolean.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
CMComponent target = (CMComponent) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "autowiredenabled":
case "autowiredEnabled": return target.isAutowiredEnabled();
case "lazystartproducer":
case "lazyStartProducer": return target.isLazyStartProducer();
default: return null;
}
}
}
|
CMComponentConfigurer
|
java
|
spring-projects__spring-boot
|
smoke-test/spring-boot-smoke-test-secure-webflux/src/test/java/smoketest/secure/webflux/SampleSecureWebFluxApplicationTests.java
|
{
"start": 1350,
"end": 2699
}
|
class ____ {
@Autowired
private WebTestClient webClient;
@Test
void userDefinedMappingsSecureByDefault() {
this.webClient.get()
.uri("/")
.accept(MediaType.APPLICATION_JSON)
.exchange()
.expectStatus()
.isEqualTo(HttpStatus.UNAUTHORIZED);
}
@Test
void healthInsecureByDefault() {
this.webClient.get()
.uri("/actuator/health")
.accept(MediaType.APPLICATION_JSON)
.exchange()
.expectStatus()
.isOk();
}
@Test
void otherActuatorsSecureByDefault() {
this.webClient.get()
.uri("/actuator/env")
.accept(MediaType.APPLICATION_JSON)
.exchange()
.expectStatus()
.isUnauthorized();
}
@Test
void userDefinedMappingsAccessibleOnLogin() {
this.webClient.get()
.uri("/")
.accept(MediaType.APPLICATION_JSON)
.header("Authorization", getBasicAuth())
.exchange()
.expectBody(String.class)
.isEqualTo("Hello user");
}
@Test
void actuatorsAccessibleOnLogin() {
this.webClient.get()
.uri("/actuator/health")
.accept(MediaType.APPLICATION_JSON)
.header("Authorization", getBasicAuth())
.exchange()
.expectBody(String.class)
.isEqualTo("{\"groups\":[\"liveness\",\"readiness\"],\"status\":\"UP\"}");
}
private String getBasicAuth() {
return "Basic " + Base64.getEncoder().encodeToString("user:password".getBytes());
}
}
|
SampleSecureWebFluxApplicationTests
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/script/Script.java
|
{
"start": 5269,
"end": 29865
}
|
class ____ {
private ScriptType type;
private String lang;
private String idOrCode;
private Map<String, String> options;
private Map<String, Object> params;
private Builder() {
// This cannot default to an empty map because options are potentially added at multiple points.
this.options = new HashMap<>();
this.params = Collections.emptyMap();
}
/**
* Since inline scripts can accept code rather than just an id, they must also be able
* to handle template parsing, hence the need for custom parsing code. Templates can
* consist of either an {@link String} or a JSON object. If a JSON object is discovered
* then the content type option must also be saved as a compiler option.
*/
private void setInline(XContentParser parser) {
try {
if (type != null) {
throwOnlyOneOfType();
}
type = ScriptType.INLINE;
if (parser.currentToken() == Token.START_OBJECT) {
// this is really for search templates, that need to be converted to json format
XContentBuilder builder = XContentFactory.jsonBuilder();
idOrCode = Strings.toString(builder.copyCurrentStructure(parser));
options.put(CONTENT_TYPE_OPTION, XContentType.JSON.mediaType());
} else {
idOrCode = parser.text();
}
} catch (IOException exception) {
throw new UncheckedIOException(exception);
}
}
/**
* Set both the id and the type of the stored script.
*/
private void setStored(String idOrCode) {
if (type != null) {
throwOnlyOneOfType();
}
type = ScriptType.STORED;
this.idOrCode = idOrCode;
}
/**
* Helper method to throw an exception if more than one type of {@link Script} is specified.
*/
private static void throwOnlyOneOfType() {
throw new IllegalArgumentException(
"must only use one of ["
+ ScriptType.INLINE.getParseField().getPreferredName()
+ ", "
+ ScriptType.STORED.getParseField().getPreferredName()
+ "]"
+ " when specifying a script"
);
}
private void setLang(String lang) {
this.lang = lang;
}
/**
* Options may have already been added if an inline template was specified.
* Appends the user-defined compiler options with the internal compiler options.
*/
private void setOptions(Map<String, String> options) {
this.options.putAll(options);
}
private void setParams(Map<String, Object> params) {
this.params = params;
}
/**
* Validates the parameters and creates an {@link Script}.
* @param defaultLang The default lang is not a compile-time constant and must be provided
* at run-time this way in case a legacy default language is used from
* previously stored queries.
*/
private Script build(String defaultLang) {
if (type == null) {
throw new IllegalArgumentException("must specify either [source] for an inline script or [id] for a stored script");
}
if (type == ScriptType.INLINE) {
if (lang == null) {
lang = defaultLang;
}
if (idOrCode == null) {
throw new IllegalArgumentException("must specify <id> for an inline script");
}
if (options.size() > 1 || options.size() == 1 && options.get(CONTENT_TYPE_OPTION) == null) {
options.remove(CONTENT_TYPE_OPTION);
throw new IllegalArgumentException("illegal compiler options [" + options + "] specified");
}
} else if (type == ScriptType.STORED) {
if (lang != null) {
throw new IllegalArgumentException("illegally specified <lang> for a stored script");
}
if (idOrCode == null) {
throw new IllegalArgumentException("must specify <code> for a stored script");
}
if (options.isEmpty()) {
options = null;
} else {
throw new IllegalArgumentException(
"field [" + OPTIONS_PARSE_FIELD.getPreferredName() + "] " + "cannot be specified using a stored script"
);
}
}
return new Script(type, lang, idOrCode, options, params);
}
}
private static final ObjectParser<Builder, Void> PARSER = new ObjectParser<>("script", Builder::new);
static {
// Defines the fields necessary to parse a Script as XContent using an ObjectParser.
PARSER.declareField(Builder::setInline, parser -> parser, ScriptType.INLINE.getParseField(), ValueType.OBJECT_OR_STRING);
PARSER.declareString(Builder::setStored, ScriptType.STORED.getParseField());
PARSER.declareString(Builder::setLang, LANG_PARSE_FIELD);
PARSER.declareField(Builder::setOptions, XContentParser::mapStrings, OPTIONS_PARSE_FIELD, ValueType.OBJECT);
PARSER.declareField(Builder::setParams, XContentParser::map, PARAMS_PARSE_FIELD, ValueType.OBJECT);
}
/**
* Declare a script field on an {@link ObjectParser} with the standard name ({@code script}).
* @param <T> Whatever type the {@linkplain ObjectParser} is parsing.
* @param parser the parser itself
* @param consumer the consumer for the script
*/
public static <T> void declareScript(AbstractObjectParser<T, ?> parser, BiConsumer<T, Script> consumer) {
declareScript(parser, consumer, Script.SCRIPT_PARSE_FIELD);
}
/**
* Declare a script field on an {@link ObjectParser}.
* @param <T> Whatever type the {@linkplain ObjectParser} is parsing.
* @param parser the parser itself
* @param consumer the consumer for the script
* @param parseField the field name
*/
public static <T> void declareScript(AbstractObjectParser<T, ?> parser, BiConsumer<T, Script> consumer, ParseField parseField) {
parser.declareField(consumer, (p, c) -> Script.parse(p), parseField, ValueType.OBJECT_OR_STRING);
}
/**
* Convenience method to call {@link Script#parse(XContentParser, String)}
* using the default scripting language.
*/
public static Script parse(XContentParser parser) throws IOException {
return parse(parser, DEFAULT_SCRIPT_LANG);
}
/**
* Parse the script configured in the given settings.
*/
public static Script parse(Settings settings) {
try (XContentBuilder builder = JsonXContent.contentBuilder()) {
builder.startObject();
settings.toXContent(builder, ToXContent.EMPTY_PARAMS);
builder.endObject();
try (
XContentParser parser = XContentHelper.createParserNotCompressed(
LoggingDeprecationHandler.XCONTENT_PARSER_CONFIG,
BytesReference.bytes(builder),
XContentType.JSON
)
) {
return parse(parser);
}
} catch (IOException e) {
// it should not happen since we are not actually reading from a stream but an in-memory byte[]
throw new IllegalStateException(e);
}
}
/**
* This will parse XContent into a {@link Script}. The following formats can be parsed:
*
* The simple format defaults to an {@link ScriptType#INLINE} with no compiler options or user-defined params:
*
* Example:
* {@code
* "return Math.log(doc.popularity) * 100;"
* }
*
* The complex format where {@link ScriptType} and idOrCode are required while lang, options and params are not required.
*
* {@code
* {
* // Exactly one of "id" or "source" must be specified
* "id" : "<id>",
* // OR
* "source": "<source>",
* "lang" : "<lang>",
* "options" : {
* "option0" : "<option0>",
* "option1" : "<option1>",
* ...
* },
* "params" : {
* "param0" : "<param0>",
* "param1" : "<param1>",
* ...
* }
* }
* }
*
* Example:
* {@code
* {
* "source" : "return Math.log(doc.popularity) * params.multiplier",
* "lang" : "painless",
* "params" : {
* "multiplier" : 100.0
* }
* }
* }
*
* This also handles templates in a special way. If a complexly formatted query is specified as another complex
* JSON object the query is assumed to be a template, and the format will be preserved.
*
* {@code
* {
* "source" : { "query" : ... },
* "lang" : "<lang>",
* "options" : {
* "option0" : "<option0>",
* "option1" : "<option1>",
* ...
* },
* "params" : {
* "param0" : "<param0>",
* "param1" : "<param1>",
* ...
* }
* }
* }
*
* @param parser The {@link XContentParser} to be used.
* @param defaultLang The default language to use if no language is specified. The default language isn't necessarily
* the one defined by {@link Script#DEFAULT_SCRIPT_LANG} due to backwards compatibility requirements
* related to stored queries using previously default languages.
*
* @return The parsed {@link Script}.
*/
public static Script parse(XContentParser parser, String defaultLang) throws IOException {
Objects.requireNonNull(defaultLang);
Token token = parser.currentToken();
if (token == null) {
token = parser.nextToken();
}
if (token == Token.VALUE_STRING) {
return new Script(ScriptType.INLINE, defaultLang, parser.text(), Collections.emptyMap());
}
return PARSER.apply(parser, null).build(defaultLang);
}
/**
* Parse a {@link Script} from an {@link Object}, that can either be a {@link String} or a {@link Map}.
* @see #parse(XContentParser, String)
* @param config The object to parse the script from.
* @return The parsed {@link Script}.
*/
@SuppressWarnings("unchecked")
public static Script parse(Object config) {
Objects.requireNonNull(config, "Script must not be null");
if (config instanceof String) {
return new Script((String) config);
} else if (config instanceof Map) {
Map<String, Object> configMap = (Map<String, Object>) config;
String script = null;
ScriptType type = null;
String lang = null;
Map<String, Object> params = Collections.emptyMap();
Map<String, String> options = Collections.emptyMap();
for (Map.Entry<String, Object> entry : configMap.entrySet()) {
String parameterName = entry.getKey();
Object parameterValue = entry.getValue();
if (Script.LANG_PARSE_FIELD.match(parameterName, LoggingDeprecationHandler.INSTANCE)) {
if (parameterValue instanceof String || parameterValue == null) {
lang = (String) parameterValue;
} else {
throw new ElasticsearchParseException("Value must be of type String: [" + parameterName + "]");
}
} else if (Script.PARAMS_PARSE_FIELD.match(parameterName, LoggingDeprecationHandler.INSTANCE)) {
if (parameterValue instanceof Map || parameterValue == null) {
params = (Map<String, Object>) parameterValue;
} else {
throw new ElasticsearchParseException("Value must be of type Map: [" + parameterName + "]");
}
} else if (Script.OPTIONS_PARSE_FIELD.match(parameterName, LoggingDeprecationHandler.INSTANCE)) {
if (parameterValue instanceof Map || parameterValue == null) {
options = (Map<String, String>) parameterValue;
} else {
throw new ElasticsearchParseException("Value must be of type Map: [" + parameterName + "]");
}
} else if (ScriptType.INLINE.getParseField().match(parameterName, LoggingDeprecationHandler.INSTANCE)) {
if (parameterValue instanceof String || parameterValue == null) {
script = (String) parameterValue;
type = ScriptType.INLINE;
} else {
throw new ElasticsearchParseException("Value must be of type String: [" + parameterName + "]");
}
} else if (ScriptType.STORED.getParseField().match(parameterName, LoggingDeprecationHandler.INSTANCE)) {
if (parameterValue instanceof String || parameterValue == null) {
script = (String) parameterValue;
type = ScriptType.STORED;
} else {
throw new ElasticsearchParseException("Value must be of type String: [" + parameterName + "]");
}
} else {
throw new ElasticsearchParseException("Unsupported field [" + parameterName + "]");
}
}
if (script == null) {
throw new ElasticsearchParseException(
"Expected one of [{}] or [{}] fields, but found none",
ScriptType.INLINE.getParseField().getPreferredName(),
ScriptType.STORED.getParseField().getPreferredName()
);
}
assert type != null : "if script is not null, type should definitely not be null";
if (type == ScriptType.STORED) {
if (lang != null) {
throw new IllegalArgumentException(
"[" + Script.LANG_PARSE_FIELD.getPreferredName() + "] cannot be specified for stored scripts"
);
}
return new Script(type, null, script, null, params);
} else {
return new Script(type, lang == null ? DEFAULT_SCRIPT_LANG : lang, script, options, params);
}
} else {
throw new IllegalArgumentException("Script value should be a String or a Map");
}
}
private final ScriptType type;
private final String lang;
private final String idOrCode;
private final Map<String, String> options;
private final Map<String, Object> params;
/**
* Constructor for simple script using the default language and default type.
* @param idOrCode The id or code to use dependent on the default script type.
*/
public Script(String idOrCode) {
this(DEFAULT_SCRIPT_TYPE, DEFAULT_SCRIPT_LANG, idOrCode, Collections.emptyMap(), Collections.emptyMap());
}
/**
* Constructor for a script that does not need to use compiler options.
* @param type The {@link ScriptType}.
* @param lang The language for this {@link Script} if the {@link ScriptType} is {@link ScriptType#INLINE}.
* For {@link ScriptType#STORED} scripts this should be null, but can
* be specified to access scripts stored as part of the stored scripts deprecated API.
* @param idOrCode The id for this {@link Script} if the {@link ScriptType} is {@link ScriptType#STORED}.
* The code for this {@link Script} if the {@link ScriptType} is {@link ScriptType#INLINE}.
* @param params The user-defined params to be bound for script execution.
*/
public Script(ScriptType type, String lang, String idOrCode, Map<String, Object> params) {
this(type, lang, idOrCode, type == ScriptType.INLINE ? Collections.emptyMap() : null, params);
}
/**
* Constructor for a script that requires the use of compiler options.
* @param type The {@link ScriptType}.
* @param lang The language for this {@link Script} if the {@link ScriptType} is {@link ScriptType#INLINE}.
* For {@link ScriptType#STORED} scripts this should be null, but can
* be specified to access scripts stored as part of the stored scripts deprecated API.
* @param idOrCode The id for this {@link Script} if the {@link ScriptType} is {@link ScriptType#STORED}.
* The code for this {@link Script} if the {@link ScriptType} is {@link ScriptType#INLINE}.
* @param options The map of compiler options for this {@link Script} if the {@link ScriptType}
* is {@link ScriptType#INLINE}, {@code null} otherwise.
* @param params The user-defined params to be bound for script execution.
*/
public Script(ScriptType type, String lang, String idOrCode, Map<String, String> options, Map<String, Object> params) {
this.type = Objects.requireNonNull(type);
this.idOrCode = Objects.requireNonNull(idOrCode);
this.params = Collections.unmodifiableMap(Objects.requireNonNull(params));
if (type == ScriptType.INLINE) {
this.lang = Objects.requireNonNull(lang);
this.options = Collections.unmodifiableMap(Objects.requireNonNull(options));
} else if (type == ScriptType.STORED) {
if (lang != null) {
throw new IllegalArgumentException("lang cannot be specified for stored scripts");
}
this.lang = null;
if (options != null) {
throw new IllegalStateException("options cannot be specified for stored scripts");
}
this.options = null;
} else {
throw new IllegalStateException("unknown script type [" + type.getName() + "]");
}
}
/**
* Creates a {@link Script} read from an input stream.
*/
public Script(StreamInput in) throws IOException {
this.type = ScriptType.readFrom(in);
this.lang = in.readOptionalString();
this.idOrCode = in.readString();
@SuppressWarnings("unchecked")
Map<String, String> options = (Map<String, String>) (Map) in.readGenericMap();
this.options = options;
this.params = in.readGenericMap();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
type.writeTo(out);
out.writeOptionalString(lang);
out.writeString(idOrCode);
@SuppressWarnings("unchecked")
Map<String, Object> options = (Map<String, Object>) (Map) this.options;
out.writeMapWithConsistentOrder(options);
out.writeMapWithConsistentOrder(params);
}
/**
* This will build scripts into the following XContent structure:
*
* {@code
* {
* "<(id, source)>" : "<idOrCode>",
* "lang" : "<lang>",
* "options" : {
* "option0" : "<option0>",
* "option1" : "<option1>",
* ...
* },
* "params" : {
* "param0" : "<param0>",
* "param1" : "<param1>",
* ...
* }
* }
* }
*
* Example:
* {@code
* {
* "source" : "return Math.log(doc.popularity) * params.multiplier;",
* "lang" : "painless",
* "params" : {
* "multiplier" : 100.0
* }
* }
* }
*
* Note that lang, options, and params will only be included if there have been any specified.
*
* This also handles templates in a special way. If the {@link Script#CONTENT_TYPE_OPTION} option
* is provided and the {@link ScriptType#INLINE} is specified then the template will be preserved as a raw field.
*
* {@code
* {
* "source" : { "query" : ... },
* "lang" : "<lang>",
* "options" : {
* "option0" : "<option0>",
* "option1" : "<option1>",
* ...
* },
* "params" : {
* "param0" : "<param0>",
* "param1" : "<param1>",
* ...
* }
* }
* }
*/
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params builderParams) throws IOException {
builder.startObject();
String contentType = options == null ? null : options.get(CONTENT_TYPE_OPTION);
if (type == ScriptType.INLINE) {
if (contentType != null && builder.contentType().mediaType().equals(contentType)) {
try (InputStream stream = new BytesArray(idOrCode).streamInput()) {
builder.rawField(SOURCE_PARSE_FIELD.getPreferredName(), stream);
}
} else {
builder.field(SOURCE_PARSE_FIELD.getPreferredName(), idOrCode);
}
} else {
builder.field("id", idOrCode);
}
if (lang != null) {
builder.field(LANG_PARSE_FIELD.getPreferredName(), lang);
}
if (options != null && options.isEmpty() == false) {
builder.field(OPTIONS_PARSE_FIELD.getPreferredName(), options);
}
if (params.isEmpty() == false) {
builder.field(PARAMS_PARSE_FIELD.getPreferredName(), params);
}
builder.endObject();
return builder;
}
/**
* @return The {@link ScriptType} for this {@link Script}.
*/
public ScriptType getType() {
return type;
}
/**
* @return The language for this {@link Script} if the {@link ScriptType} is {@link ScriptType#INLINE}.
* For {@link ScriptType#STORED} scripts this should be null, but can
* be specified to access scripts stored as part of the stored scripts deprecated API.
*/
public String getLang() {
return lang;
}
/**
* @return The id for this {@link Script} if the {@link ScriptType} is {@link ScriptType#STORED}.
* The code for this {@link Script} if the {@link ScriptType} is {@link ScriptType#INLINE}.
*/
public String getIdOrCode() {
return idOrCode;
}
/**
* @return The map of compiler options for this {@link Script} if the {@link ScriptType}
* is {@link ScriptType#INLINE}, {@code null} otherwise.
*/
public Map<String, String> getOptions() {
return options;
}
/**
* @return The map of user-defined params for this {@link Script}.
*/
public Map<String, Object> getParams() {
return params;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Script script = (Script) o;
return type == script.type
&& Objects.equals(lang, script.lang)
&& Objects.equals(idOrCode, script.idOrCode)
&& Objects.equals(options, script.options)
&& Objects.equals(params, script.params);
}
@Override
public int hashCode() {
int result = type.hashCode();
result = 31 * result + (lang != null ? lang.hashCode() : 0);
result = 31 * result + idOrCode.hashCode();
result = 31 * result + (options != null ? options.hashCode() : 0);
result = 31 * result + params.hashCode();
return result;
}
@Override
public String toString() {
return "Script{"
+ "type="
+ type
+ ", lang='"
+ lang
+ '\''
+ ", idOrCode='"
+ idOrCode
+ '\''
+ ", options="
+ options
+ ", params="
+ params
+ '}';
}
}
|
Builder
|
java
|
spring-projects__spring-boot
|
module/spring-boot-webflux/src/main/java/org/springframework/boot/webflux/autoconfigure/ResourceHandlerRegistrationCustomizer.java
|
{
"start": 928,
"end": 1166
}
|
interface ____ {
/**
* Customize the given {@link ResourceHandlerRegistration}.
* @param registration the registration to customize
*/
void customize(ResourceHandlerRegistration registration);
}
|
ResourceHandlerRegistrationCustomizer
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/filter/wall/WallSelectWhereTest4.java
|
{
"start": 793,
"end": 1160
}
|
class ____ extends TestCase {
private String sql = "select * from t WHERE FID = 256 OR CHR(67)||CHR(65)||CHR(84) = 'CAT'";
// public void testMySql() throws Exception {
// assertFalse(WallUtils.isValidateMySql(sql));
// }
public void testORACLE() throws Exception {
assertFalse(WallUtils.isValidateOracle(sql));
}
}
|
WallSelectWhereTest4
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhancement/detached/initialization/DetachedNestedInitializationBatchFetchTest.java
|
{
"start": 5915,
"end": 6055
}
|
class ____ {
@Id
private Long id;
@ManyToOne
private EntityB b;
}
@BatchSize( size = 10 )
@Entity(name = "EntityB")
static
|
EntityA
|
java
|
micronaut-projects__micronaut-core
|
inject-java/src/test/groovy/io/micronaut/inject/configproperties/MyHibernateConfig2.java
|
{
"start": 827,
"end": 1167
}
|
class ____ {
private Map<String, String> properties;
public Map<String, String> getProperties() {
return properties;
}
public void setProperties(
@MapFormat(transformation = MapFormat.MapTransformation.FLAT) Map<String, String> properties) {
this.properties = properties;
}
}
|
MyHibernateConfig2
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/AlreadyCheckedTest.java
|
{
"start": 12704,
"end": 13222
}
|
class ____ {
private final String a = "foo";
public void test(String a) {
if (this.a.equals(a)) {
// BUG: Diagnostic contains:
if (this.a.equals(a)) {}
}
}
}
""")
.doTest();
}
@Test
public void knownQuantityPassedToMethod() {
helper
.addSourceLines(
"Test.java",
"""
import com.google.auto.value.AutoValue;
|
Test
|
java
|
spring-projects__spring-boot
|
module/spring-boot-webflux/src/test/java/org/springframework/boot/webflux/autoconfigure/error/DefaultErrorWebExceptionHandlerIntegrationTests.java
|
{
"start": 20292,
"end": 21363
}
|
class ____ {
@Bean
@Order(-1)
ErrorWebExceptionHandler errorWebExceptionHandler(ErrorAttributes errorAttributes, WebProperties webProperties,
ObjectProvider<ViewResolver> viewResolvers, ServerCodecConfigurer serverCodecConfigurer,
ApplicationContext applicationContext) {
DefaultErrorWebExceptionHandler exceptionHandler = new DefaultErrorWebExceptionHandler(errorAttributes,
webProperties.getResources(), webProperties.getError(), applicationContext) {
@Override
protected ErrorAttributeOptions getErrorAttributeOptions(ServerRequest request, MediaType mediaType) {
return super.getErrorAttributeOptions(request, mediaType).excluding(Include.STATUS, Include.ERROR);
}
};
exceptionHandler.setViewResolvers(viewResolvers.orderedStream().toList());
exceptionHandler.setMessageWriters(serverCodecConfigurer.getWriters());
exceptionHandler.setMessageReaders(serverCodecConfigurer.getReaders());
return exceptionHandler;
}
}
@Configuration(proxyBeanMethods = false)
static
|
CustomErrorWebExceptionHandlerWithoutStatus
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/embeddable/EmbeddableWithIdenticallyNamedAssociationTest.java
|
{
"start": 4143,
"end": 4901
}
|
class ____ {
@Id
private Integer id;
@OneToOne
@JoinColumn(name = "entityA_id")
private EntityA entityA;
@Embedded
private EmbeddableB embeddableB;
@Override
public String toString() {
return "EntityB{" +
"id=" + id +
", entityA=" + entityA.getId() +
", embeddableB=" + embeddableB +
'}';
}
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public EntityA getEntityA() {
return entityA;
}
public void setEntityA(EntityA a) {
this.entityA = a;
}
public EmbeddableB getEmbeddableB() {
return embeddableB;
}
public void setEmbeddableB(EmbeddableB embeddableB) {
this.embeddableB = embeddableB;
}
}
@Embeddable
public static
|
EntityB
|
java
|
spring-projects__spring-framework
|
spring-core/src/main/java/org/springframework/aot/generate/ValueCodeGeneratorDelegates.java
|
{
"start": 7986,
"end": 8368
}
|
class ____ implements Delegate {
@Override
public @Nullable CodeBlock generateCode(ValueCodeGenerator codeGenerator, Object value) {
if (value instanceof Enum<?> enumValue) {
return CodeBlock.of("$T.$L", enumValue.getDeclaringClass(),
enumValue.name());
}
return null;
}
}
/**
* {@link Delegate} for {@link Class} types.
*/
private static
|
EnumDelegate
|
java
|
apache__spark
|
sql/core/src/test/java/test/org/apache/spark/sql/JavaDatasetSuite.java
|
{
"start": 31760,
"end": 32220
}
|
class ____ {
String value;
KryoSerializable(String value) {
this.value = value;
}
@Override
public boolean equals(Object other) {
if (this == other) return true;
if (other == null || getClass() != other.getClass()) return false;
return this.value.equals(((KryoSerializable) other).value);
}
@Override
public int hashCode() {
return this.value.hashCode();
}
}
public static
|
KryoSerializable
|
java
|
elastic__elasticsearch
|
libs/simdvec/src/main/java/org/elasticsearch/simdvec/VectorScorerFactoryImpl.java
|
{
"start": 828,
"end": 1559
}
|
class ____ implements VectorScorerFactory {
static final VectorScorerFactoryImpl INSTANCE = null;
@Override
public Optional<RandomVectorScorerSupplier> getInt7SQVectorScorerSupplier(
VectorSimilarityType similarityType,
IndexInput input,
QuantizedByteVectorValues values,
float scoreCorrectionConstant
) {
throw new UnsupportedOperationException("should not reach here");
}
@Override
public Optional<RandomVectorScorer> getInt7SQVectorScorer(
VectorSimilarityFunction sim,
QuantizedByteVectorValues values,
float[] queryVector
) {
throw new UnsupportedOperationException("should not reach here");
}
}
|
VectorScorerFactoryImpl
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/inheritance/discriminator/embeddable/JoinedSubclassWithEmbeddableTest.java
|
{
"start": 3749,
"end": 4351
}
|
class ____ implements Serializable {
@ManyToOne
@JoinColumn(name = "employee_emb_alert_contact")
private Person alertContact;
@OneToMany
@JoinColumn(name = "employee_emb_alert_contact")
private Set<Employee> alerteeContacts = new HashSet<>();
@ManyToMany
@OrderColumn(name = "list_idx")
@JoinTable(name = "employee_emb_person_list")
private List<Person> personList = new ArrayList<>();
@ManyToMany
@CollectionTable(name = "employee_emb_person_map")
@MapKeyColumn(name = "person_key", length = 20)
private Map<String, Person> personMap = new HashMap<>();
}
}
|
EmployeeContact
|
java
|
resilience4j__resilience4j
|
resilience4j-retry/src/test/java/io/github/resilience4j/retry/internal/RunnableRetryTest.java
|
{
"start": 1434,
"end": 6928
}
|
class ____ {
private HelloWorldService helloWorldService;
private long sleptTime = 0L;
@Before
public void setUp() {
helloWorldService = mock(HelloWorldService.class);
RetryImpl.sleepFunction = sleep -> sleptTime += sleep;
}
@Test
public void shouldNotRetry() {
Retry retryContext = Retry.ofDefaults("id");
Runnable runnable = Retry.decorateRunnable(retryContext, helloWorldService::sayHelloWorld);
runnable.run();
then(helloWorldService).should().sayHelloWorld();
assertThat(sleptTime).isZero();
}
@Test
public void testDecorateRunnable() {
willThrow(new HelloWorldException()).given(helloWorldService).sayHelloWorld();
Retry retry = Retry.ofDefaults("id");
Runnable runnable = Retry.decorateRunnable(retry, helloWorldService::sayHelloWorld);
Try<Void> result = Try.run(runnable::run);
then(helloWorldService).should(times(3)).sayHelloWorld();
assertThat(result.isFailure()).isTrue();
assertThat(result.failed().get()).isInstanceOf(HelloWorldException.class);
assertThat(sleptTime).isEqualTo(RetryConfig.DEFAULT_WAIT_DURATION * 2);
}
@Test
public void testExecuteRunnable() {
Retry retry = Retry.ofDefaults("id");
retry.executeRunnable(helloWorldService::sayHelloWorld);
then(helloWorldService).should().sayHelloWorld();
assertThat(sleptTime).isZero();
}
@Test
public void shouldReturnAfterThreeAttempts() {
willThrow(new HelloWorldException()).given(helloWorldService).sayHelloWorld();
Retry retry = Retry.ofDefaults("id");
CheckedRunnable retryableRunnable = Retry
.decorateCheckedRunnable(retry, helloWorldService::sayHelloWorld);
Try<Void> result = Try.run(() -> retryableRunnable.run());
then(helloWorldService).should(times(3)).sayHelloWorld();
assertThat(result.isFailure()).isTrue();
assertThat(result.failed().get()).isInstanceOf(HelloWorldException.class);
assertThat(sleptTime).isEqualTo(RetryConfig.DEFAULT_WAIT_DURATION * 2);
}
@Test
public void shouldReturnAfterOneAttempt() {
willThrow(new HelloWorldException()).given(helloWorldService).sayHelloWorld();
RetryConfig config = RetryConfig.custom().maxAttempts(1).build();
Retry retry = Retry.of("id", config);
CheckedRunnable retryableRunnable = Retry
.decorateCheckedRunnable(retry, helloWorldService::sayHelloWorld);
Try<Void> result = Try.run(() -> retryableRunnable.run());
then(helloWorldService).should().sayHelloWorld();
assertThat(result.isFailure()).isTrue();
assertThat(result.failed().get()).isInstanceOf(HelloWorldException.class);
assertThat(sleptTime).isZero();
}
@Test
public void shouldReturnAfterOneAttemptAndIgnoreException() {
willThrow(new HelloWorldException()).given(helloWorldService).sayHelloWorld();
RetryConfig config = RetryConfig.custom()
.retryOnException(throwable -> Match(throwable).of(
Case($(Predicates.instanceOf(HelloWorldException.class)), false),
Case($(), true)))
.build();
Retry retry = Retry.of("id", config);
CheckedRunnable retryableRunnable = Retry
.decorateCheckedRunnable(retry, helloWorldService::sayHelloWorld);
Try<Void> result = Try.run(() -> retryableRunnable.run());
// because the exception should be rethrown immediately
then(helloWorldService).should().sayHelloWorld();
assertThat(result.isFailure()).isTrue();
assertThat(result.failed().get()).isInstanceOf(HelloWorldException.class);
assertThat(sleptTime).isZero();
}
@Test
public void shouldTakeIntoAccountBackoffFunction() {
willThrow(new HelloWorldException()).given(helloWorldService).sayHelloWorld();
RetryConfig config = RetryConfig
.custom()
.intervalFunction(IntervalFunction.of(Duration.ofMillis(500), x -> x * x))
.build();
Retry retry = Retry.of("id", config);
CheckedRunnable retryableRunnable = Retry
.decorateCheckedRunnable(retry, helloWorldService::sayHelloWorld);
Try.run(() -> retryableRunnable.run());
then(helloWorldService).should(times(3)).sayHelloWorld();
assertThat(sleptTime).isEqualTo(
RetryConfig.DEFAULT_WAIT_DURATION +
RetryConfig.DEFAULT_WAIT_DURATION * RetryConfig.DEFAULT_WAIT_DURATION);
}
@Test
public void shouldTakeIntoAccountRetryOnResult() {
AtomicInteger value = new AtomicInteger(0);
final int targetValue = 2;
RetryConfig config = RetryConfig
.custom()
.retryOnResult(result -> value.get() != targetValue)
.build();
Retry retry = Retry.of("shouldTakeIntoAccountRetryOnResult", config);
CheckedRunnable retryableRunnable = Retry
.decorateCheckedRunnable(retry, () -> {
helloWorldService.sayHelloWorld();
value.incrementAndGet();
});
Try.run(() -> retryableRunnable.run());
then(helloWorldService).should(times(targetValue)).sayHelloWorld();
System.out.println(sleptTime);
assertThat(sleptTime).isEqualTo(RetryConfig.DEFAULT_WAIT_DURATION);
}
}
|
RunnableRetryTest
|
java
|
google__guava
|
android/guava-testlib/src/com/google/common/collect/testing/testers/CollectionSizeTester.java
|
{
"start": 1286,
"end": 1453
}
|
class ____<E> extends AbstractCollectionTester<E> {
public void testSize() {
assertEquals("size():", getNumElements(), collection.size());
}
}
|
CollectionSizeTester
|
java
|
google__truth
|
core/src/main/java/com/google/common/truth/LongSubject.java
|
{
"start": 916,
"end": 1518
}
|
class ____ extends ComparableSubject<Long> {
private final @Nullable Long actual;
/**
* The constructor is for use by subclasses only. If you want to create an instance of this class
* itself, call {@link Subject#check(String, Object...) check(...)}{@code .that(actual)}.
*/
protected LongSubject(FailureMetadata metadata, @Nullable Long actual) {
super(metadata, actual);
this.actual = actual;
}
/**
* A partially specified check about an approximate relationship to a {@code long} value using a
* tolerance.
*
* @since 1.2
*/
public static final
|
LongSubject
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/context/config/DefaultLoaderBeanOverridingExplicitConfigClassesInheritedTests.java
|
{
"start": 1239,
"end": 1636
}
|
class ____ extends
DefaultLoaderExplicitConfigClassesBaseTests {
@Test
@Override
void verifyEmployeeSetFromBaseContextConfig() {
assertThat(this.employee).as("The employee should have been autowired.").isNotNull();
assertThat(this.employee.getName()).as("The employee bean should have been overridden.").isEqualTo("Yoda");
}
}
|
DefaultLoaderBeanOverridingExplicitConfigClassesInheritedTests
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/AppReportFetcher.java
|
{
"start": 2012,
"end": 5979
}
|
enum ____ {RM, AHS}
private final Configuration conf;
private ApplicationHistoryProtocol historyManager;
private String ahsAppPageUrlBase;
private final RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
private boolean isAHSEnabled;
/**
* Create a new Connection to the RM/Application History Server to fetch Application reports.
*
* @param conf the conf to use to know where the RM is.
*/
public AppReportFetcher(Configuration conf) {
this.conf = conf;
if (conf.getBoolean(YarnConfiguration.APPLICATION_HISTORY_ENABLED,
YarnConfiguration.DEFAULT_APPLICATION_HISTORY_ENABLED)) {
this.isAHSEnabled = true;
String scheme = WebAppUtils.getHttpSchemePrefix(conf);
String historyUrl = WebAppUtils.getAHSWebAppURLWithoutScheme(conf);
this.ahsAppPageUrlBase = StringHelper.pjoin(scheme + historyUrl, "applicationhistory", "app");
}
try {
if (this.isAHSEnabled) {
this.historyManager = getAHSProxy(conf);
} else {
this.historyManager = null;
}
} catch (IOException e) {
throw new YarnRuntimeException(e);
}
}
protected ApplicationHistoryProtocol getAHSProxy(Configuration configuration)
throws IOException {
InetSocketAddress addr = configuration.getSocketAddr(YarnConfiguration.TIMELINE_SERVICE_ADDRESS,
YarnConfiguration.DEFAULT_TIMELINE_SERVICE_ADDRESS,
YarnConfiguration.DEFAULT_TIMELINE_SERVICE_PORT);
return AHSProxy.createAHSProxy(configuration, ApplicationHistoryProtocol.class, addr);
}
/**
* Get an application report for the specified application id from the RM and
* fall back to the Application History Server if not found in RM.
* @param appId id of the application to get.
* @return the ApplicationReport for the appId.
* @throws YarnException on any error.
* @throws IOException
*/
public abstract FetchedAppReport getApplicationReport(ApplicationId appId)
throws YarnException, IOException;
/**
* Get an application report for the specified application id from the RM and
* fall back to the Application History Server if not found in RM.
*
* @param applicationsManager what to use to get the RM reports.
* @param appId id of the application to get.
* @return the ApplicationReport for the appId.
* @throws YarnException on any error.
* @throws IOException connection exception.
*/
protected FetchedAppReport getApplicationReport(ApplicationClientProtocol applicationsManager,
ApplicationId appId) throws YarnException, IOException {
GetApplicationReportRequest request =
this.recordFactory.newRecordInstance(GetApplicationReportRequest.class);
request.setApplicationId(appId);
ApplicationReport appReport;
FetchedAppReport fetchedAppReport;
try {
appReport = applicationsManager.getApplicationReport(request).getApplicationReport();
fetchedAppReport = new FetchedAppReport(appReport, AppReportSource.RM);
} catch (ApplicationNotFoundException e) {
if (!isAHSEnabled) {
// Just throw it as usual if historyService is not enabled.
throw e;
}
//Fetch the application report from AHS
appReport = historyManager.getApplicationReport(request).getApplicationReport();
fetchedAppReport = new FetchedAppReport(appReport, AppReportSource.AHS);
}
return fetchedAppReport;
}
public abstract String getRmAppPageUrlBase(ApplicationId appId) throws IOException, YarnException;
public String getAhsAppPageUrlBase() {
return this.ahsAppPageUrlBase;
}
protected Configuration getConf() {
return this.conf;
}
public void stop() {
if (this.historyManager != null) {
RPC.stopProxy(this.historyManager);
}
}
@VisibleForTesting
public void setHistoryManager(ApplicationHistoryProtocol historyManager) {
this.historyManager = historyManager;
}
/*
* This
|
AppReportSource
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/serializer/JSONFieldTest3.java
|
{
"start": 444,
"end": 968
}
|
class ____ {
private int id;
@JSONField(serialize = false)
private boolean _flag;
@JSONField(serialize = false)
private int _id2;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public boolean isFlag() {
return _flag;
}
public void setFlag(boolean flag) {
this._flag = flag;
}
public int getId2() {
return _id2;
}
public void setId2(int id2) {
this._id2 = id2;
}
}
}
|
VO
|
java
|
apache__commons-lang
|
src/main/java/org/apache/commons/lang3/builder/StandardToStringStyle.java
|
{
"start": 1017,
"end": 1146
}
|
class ____ intended to be used as a singleton.
* There is no need to instantiate a new style each time.
* Simply instantiate the
|
is
|
java
|
apache__rocketmq
|
remoting/src/main/java/org/apache/rocketmq/remoting/RemotingClient.java
|
{
"start": 1448,
"end": 3709
}
|
interface ____ extends RemotingService {
void updateNameServerAddressList(final List<String> addrs);
List<String> getNameServerAddressList();
List<String> getAvailableNameSrvList();
RemotingCommand invokeSync(final String addr, final RemotingCommand request,
final long timeoutMillis) throws InterruptedException, RemotingConnectException,
RemotingSendRequestException, RemotingTimeoutException;
void invokeAsync(final String addr, final RemotingCommand request, final long timeoutMillis,
final InvokeCallback invokeCallback) throws InterruptedException, RemotingConnectException,
RemotingTooMuchRequestException, RemotingTimeoutException, RemotingSendRequestException;
void invokeOneway(final String addr, final RemotingCommand request, final long timeoutMillis)
throws InterruptedException, RemotingConnectException, RemotingTooMuchRequestException,
RemotingTimeoutException, RemotingSendRequestException;
default CompletableFuture<RemotingCommand> invoke(final String addr, final RemotingCommand request,
final long timeoutMillis) {
CompletableFuture<RemotingCommand> future = new CompletableFuture<>();
try {
invokeAsync(addr, request, timeoutMillis, new InvokeCallback() {
@Override
public void operationComplete(ResponseFuture responseFuture) {
}
@Override
public void operationSucceed(RemotingCommand response) {
future.complete(response);
}
@Override
public void operationFail(Throwable throwable) {
future.completeExceptionally(throwable);
}
});
} catch (Throwable t) {
future.completeExceptionally(t);
}
return future;
}
void registerProcessor(final int requestCode, final NettyRequestProcessor processor,
final ExecutorService executor);
void setCallbackExecutor(final ExecutorService callbackExecutor);
boolean isChannelWritable(final String addr);
boolean isAddressReachable(final String addr);
void closeChannels(final List<String> addrList);
}
|
RemotingClient
|
java
|
quarkusio__quarkus
|
extensions/redis-client/runtime/src/main/java/io/quarkus/redis/datasource/topk/ReactiveTopKCommands.java
|
{
"start": 677,
"end": 6987
}
|
interface ____<K, V> extends ReactiveRedisCommands {
/**
* Execute the command <a href="https://redis.io/commands/topk.add">TOPK.ADD</a>.
* Summary: Adds an item to the data structure. Multiple items can be added at once.
* If an item enters the Top-K list, the item which is expelled is returned. This allows dynamic heavy-hitter
* detection of items being entered or expelled from Top-K list.
* Group: top-k
* <p>
*
* @param key the name of list where item is added, must not be {@code null}
* @param item the item to add, must not be {@code null}
* @return a uni producing the item that get expelled if any, emit {@code null} otherwise
**/
Uni<V> topkAdd(K key, V item);
/**
* Execute the command <a href="https://redis.io/commands/topk.add">TOPK.ADD</a>.
* Summary: Adds an item to the data structure. Multiple items can be added at once.
* If an item enters the Top-K list, the item which is expelled is returned. This allows dynamic heavy-hitter
* detection of items being entered or expelled from Top-K list.
* Group: top-k
* <p>
*
* @param key the name of list where item is added, must not be {@code null}
* @param items the items to add, must not be {@code null}, must not be empty, must not contain {@code null}
* @return a uni producing a list containing for each corresponding added item the expelled item if any,
* {@code null} otherwise.
**/
Uni<List<V>> topkAdd(K key, V... items);
/**
* Execute the command <a href="https://redis.io/commands/topk.incrby">TOPK.INCRBY</a>.
* Summary: Increase the score of an item in the data structure by increment. Multiple items' score can be increased
* at once. If an item enters the Top-K list, the item which is expelled is returned.
* Group: top-k
* <p>
*
* @param key the name of list where item is added, must not be {@code null}
* @param item the item to add, must not be {@code null}
* @param increment increment to current item score. Increment must be greater or equal to 1. Increment is
* limited to 100,000 to avoid server freeze.
* @return a uni producing the item that get expelled if any, emit {@code null} otherwise
**/
Uni<V> topkIncrBy(K key, V item, int increment);
/**
* Execute the command <a href="https://redis.io/commands/topk.incrby">TOPK.INCRBY</a>.
* Summary: Increase the score of an item in the data structure by increment. Multiple items' score can be increased
* at once. If an item enters the Top-K list, the item which is expelled is returned.
* Group: top-k
* <p>
*
* @param key the name of list where item is added, must not be {@code null}
* @param couples The map containing the item / increment, must not be {@code null}, must not be empty
* @return a uni producing a map containing for each added item the expelled item if any, {@code null} otherwise
**/
Uni<Map<V, V>> topkIncrBy(K key, Map<V, Integer> couples);
/**
* Execute the command <a href="https://redis.io/commands/topk.list/">TOPK.LIST</a>.
* Summary: Return full list of items in Top K list.
* Group: top-k
* <p>
*
* @param key the name of list, must not be {@code null}
* @return a uni producing the list of items
**/
Uni<List<V>> topkList(K key);
/**
* Execute the command <a href="https://redis.io/commands/topk.list/">TOPK.LIST</a>.
* Summary: Return full list of items in Top K list.
* Group: top-k
* <p>
*
* @param key the name of list, must not be {@code null}
* @return a uni producing the Map of items with the associated count
**/
Uni<Map<V, Integer>> topkListWithCount(K key);
/**
* Execute the command <a href="https://redis.io/commands/topk.query/">TOPK.QUERY</a>.
* Summary: Checks whether an item is one of Top-K items. Multiple items can be checked at once.
* Group: top-k
* <p>
*
* @param key the name of list, must not be {@code null}
* @param item the item to check, must not be {@code null}
* @return a uni producing {@code true} if the item is in the list, {@code false} otherwise
**/
Uni<Boolean> topkQuery(K key, V item);
/**
* Execute the command <a href="https://redis.io/commands/topk.query/">TOPK.QUERY</a>.
* Summary: Checks whether an item is one of Top-K items. Multiple items can be checked at once.
* Group: top-k
* <p>
*
* @param key the name of list, must not be {@code null}
* @param items the items to check, must not be {@code null}, must not contain {@code null}, must not be empty
* @return a uni producing a list containing {@code true} if the corresponding item is in the list, {@code false}
* otherwise
**/
Uni<List<Boolean>> topkQuery(K key, V... items);
/**
* Execute the command <a href="https://redis.io/commands/topk.reserve/">TOPK.RESERVE</a>.
* Summary: Initializes a TopK with specified parameters.
* Group: top-k
* <p>
*
* @param key the name of list, must not be {@code null}
* @param topk the number of top occurring items to keep.
* @return a uni producing {@code null} once the operation completes
**/
Uni<Void> topkReserve(K key, int topk);
/**
* Execute the command <a href="https://redis.io/commands/topk.reserve/">TOPK.RESERVE</a>.
* Summary: Initializes a TopK with specified parameters.
* Group: top-k
* <p>
*
* @param key the name of list, must not be {@code null}
* @param topk the number of top occurring items to keep.
* @param width the number of counters kept in each array. (Default 8)
* @param depth the number of arrays. (Default 7)
* @param decay the probability of reducing a counter in an occupied bucket. It is raised to power of it's counter
* (decay ^ bucket[i].counter). Therefore, as the counter gets higher, the chance of a reduction is
* being reduced. (Default 0.9)
* @return a uni producing {@code null} once the operation completes
**/
Uni<Void> topkReserve(K key, int topk, int width, int depth, double decay);
}
|
ReactiveTopKCommands
|
java
|
mybatis__mybatis-3
|
src/test/java/org/apache/ibatis/submitted/typebasedtypehandlerresolution/User.java
|
{
"start": 778,
"end": 2050
}
|
class ____ {
private Integer id;
private FuzzyBean<String> strvalue;
private FuzzyBean<Integer> intvalue;
private LocalDate datevalue;
private LocalDate datevalue2;
private List<String> strings;
private List<Integer> integers;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public FuzzyBean<String> getStrvalue() {
return strvalue;
}
public void setStrvalue(FuzzyBean<String> strvalue) {
this.strvalue = strvalue;
}
public FuzzyBean<Integer> getIntvalue() {
return intvalue;
}
public void setIntvalue(FuzzyBean<Integer> intvalue) {
this.intvalue = intvalue;
}
public LocalDate getDatevalue() {
return datevalue;
}
public void setDatevalue(LocalDate datevalue) {
this.datevalue = datevalue;
}
public LocalDate getDatevalue2() {
return datevalue2;
}
public void setDatevalue2(LocalDate datevalue2) {
this.datevalue2 = datevalue2;
}
public List<String> getStrings() {
return strings;
}
public void setStrings(List<String> strings) {
this.strings = strings;
}
public List<Integer> getIntegers() {
return integers;
}
public void setIntegers(List<Integer> integers) {
this.integers = integers;
}
}
|
User
|
java
|
netty__netty
|
codec-http/src/main/java/io/netty/handler/codec/spdy/SpdySynReplyFrame.java
|
{
"start": 724,
"end": 956
}
|
interface ____ extends SpdyHeadersFrame {
@Override
SpdySynReplyFrame setStreamId(int streamID);
@Override
SpdySynReplyFrame setLast(boolean last);
@Override
SpdySynReplyFrame setInvalid();
}
|
SpdySynReplyFrame
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/parser/JSONReaderScannerTest_bytes.java
|
{
"start": 210,
"end": 670
}
|
class ____ extends TestCase {
public void test_e() throws Exception {
VO vo = new VO();
vo.setValue("ABC".getBytes("UTF-8"));
String text = JSON.toJSONString(vo);
JSONReader reader = new JSONReader(new StringReader(text));
VO vo2 = reader.readObject(VO.class);
Assert.assertEquals("ABC", new String(vo2.getValue()));
reader.close();
}
public static
|
JSONReaderScannerTest_bytes
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/RandomDatum.java
|
{
"start": 2508,
"end": 3010
}
|
class ____ {
Random random;
private RandomDatum key;
private RandomDatum value;
public Generator() { random = new Random(); }
public Generator(int seed) { random = new Random(seed); }
public RandomDatum getKey() { return key; }
public RandomDatum getValue() { return value; }
public void next() {
key = new RandomDatum(random);
value = new RandomDatum(random);
}
}
/** A WritableComparator optimized for RandomDatum. */
public static
|
Generator
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/resourcemanager/utils/TestingResourceManagerGateway.java
|
{
"start": 3785,
"end": 24133
}
|
class ____ implements ResourceManagerGateway {
private final ResourceManagerId resourceManagerId;
private final ResourceID ownResourceId;
private final String address;
private final String hostname;
private volatile QuadFunction<
JobMasterId, ResourceID, String, JobID, CompletableFuture<RegistrationResponse>>
registerJobManagerFunction;
private volatile Consumer<Tuple3<JobID, JobStatus, Throwable>> disconnectJobManagerConsumer;
private volatile Function<TaskExecutorRegistration, CompletableFuture<RegistrationResponse>>
registerTaskExecutorFunction;
private volatile Function<Tuple2<ResourceID, FileType>, CompletableFuture<TransientBlobKey>>
requestTaskManagerFileUploadByTypeFunction;
private volatile Function<
Tuple3<ResourceID, String, FileType>, CompletableFuture<TransientBlobKey>>
requestTaskManagerFileUploadByNameAndTypeFunction;
private volatile Consumer<Tuple2<ResourceID, Throwable>> disconnectTaskExecutorConsumer;
private volatile Function<
Tuple3<ResourceID, InstanceID, SlotReport>, CompletableFuture<Acknowledge>>
sendSlotReportFunction;
private volatile BiFunction<ResourceID, TaskExecutorHeartbeatPayload, CompletableFuture<Void>>
taskExecutorHeartbeatFunction;
private volatile Consumer<Tuple3<InstanceID, SlotID, AllocationID>> notifySlotAvailableConsumer;
private volatile Function<ResourceID, CompletableFuture<Collection<LogInfo>>>
requestTaskManagerLogListFunction;
private volatile Function<ResourceID, CompletableFuture<TaskManagerInfoWithSlots>>
requestTaskManagerDetailsInfoFunction;
private volatile Function<ResourceID, CompletableFuture<TaskExecutorThreadInfoGateway>>
requestTaskExecutorThreadInfoGateway;
private volatile Function<ResourceID, CompletableFuture<ThreadDumpInfo>>
requestThreadDumpFunction;
private volatile Function<ResourceID, CompletableFuture<ProfilingInfo>>
requestProfilingFunction;
private volatile Function<ResourceID, CompletableFuture<Collection<ProfilingInfo>>>
requestProfilingListFunction;
private volatile BiFunction<JobMasterId, ResourceRequirements, CompletableFuture<Acknowledge>>
declareRequiredResourcesFunction =
(ignoredA, ignoredB) ->
FutureUtils.completedExceptionally(new UnsupportedOperationException());
private volatile Function<ResourceID, CompletableFuture<Void>> jobMasterHeartbeatFunction;
private volatile Function<Collection<BlockedNode>, CompletableFuture<Acknowledge>>
notifyNewBlockedNodesFunction =
ignored -> CompletableFuture.completedFuture(Acknowledge.get());
public TestingResourceManagerGateway() {
this(
ResourceManagerId.generate(),
ResourceID.generate(),
"localhost/" + UUID.randomUUID(),
"localhost");
}
public TestingResourceManagerGateway(
ResourceManagerId resourceManagerId,
ResourceID resourceId,
String address,
String hostname) {
this.resourceManagerId = Preconditions.checkNotNull(resourceManagerId);
this.ownResourceId = Preconditions.checkNotNull(resourceId);
this.address = Preconditions.checkNotNull(address);
this.hostname = Preconditions.checkNotNull(hostname);
}
public ResourceID getOwnResourceId() {
return ownResourceId;
}
public void setRegisterJobManagerFunction(
QuadFunction<
JobMasterId,
ResourceID,
String,
JobID,
CompletableFuture<RegistrationResponse>>
registerJobManagerFunction) {
this.registerJobManagerFunction = registerJobManagerFunction;
}
public void setDisconnectJobManagerConsumer(
Consumer<Tuple3<JobID, JobStatus, Throwable>> disconnectJobManagerConsumer) {
this.disconnectJobManagerConsumer = disconnectJobManagerConsumer;
}
public void setRegisterTaskExecutorFunction(
Function<TaskExecutorRegistration, CompletableFuture<RegistrationResponse>>
registerTaskExecutorFunction) {
this.registerTaskExecutorFunction = registerTaskExecutorFunction;
}
public void setRequestTaskManagerFileUploadByTypeFunction(
Function<Tuple2<ResourceID, FileType>, CompletableFuture<TransientBlobKey>>
requestTaskManagerFileUploadByTypeFunction) {
this.requestTaskManagerFileUploadByTypeFunction =
requestTaskManagerFileUploadByTypeFunction;
}
public void setRequestTaskManagerFileUploadByNameAndTypeFunction(
Function<Tuple3<ResourceID, String, FileType>, CompletableFuture<TransientBlobKey>>
requestTaskManagerFileUploadByNameAndTypeFunction) {
this.requestTaskManagerFileUploadByNameAndTypeFunction =
requestTaskManagerFileUploadByNameAndTypeFunction;
}
public void setRequestTaskManagerLogListFunction(
Function<ResourceID, CompletableFuture<Collection<LogInfo>>>
requestTaskManagerLogListFunction) {
this.requestTaskManagerLogListFunction = requestTaskManagerLogListFunction;
}
public void setRequestTaskManagerDetailsInfoFunction(
Function<ResourceID, CompletableFuture<TaskManagerInfoWithSlots>>
requestTaskManagerDetailsInfoFunction) {
this.requestTaskManagerDetailsInfoFunction = requestTaskManagerDetailsInfoFunction;
}
public void setRequestTaskExecutorGatewayFunction(
Function<ResourceID, CompletableFuture<TaskExecutorThreadInfoGateway>>
requestTaskExecutorThreadInfoGateway) {
this.requestTaskExecutorThreadInfoGateway = requestTaskExecutorThreadInfoGateway;
}
public void setDisconnectTaskExecutorConsumer(
Consumer<Tuple2<ResourceID, Throwable>> disconnectTaskExecutorConsumer) {
this.disconnectTaskExecutorConsumer = disconnectTaskExecutorConsumer;
}
public void setSendSlotReportFunction(
Function<Tuple3<ResourceID, InstanceID, SlotReport>, CompletableFuture<Acknowledge>>
sendSlotReportFunction) {
this.sendSlotReportFunction = sendSlotReportFunction;
}
public void setTaskExecutorHeartbeatFunction(
BiFunction<ResourceID, TaskExecutorHeartbeatPayload, CompletableFuture<Void>>
taskExecutorHeartbeatFunction) {
this.taskExecutorHeartbeatFunction = taskExecutorHeartbeatFunction;
}
public void setJobMasterHeartbeatFunction(
Function<ResourceID, CompletableFuture<Void>> jobMasterHeartbeatFunction) {
this.jobMasterHeartbeatFunction = jobMasterHeartbeatFunction;
}
public void setNotifySlotAvailableConsumer(
Consumer<Tuple3<InstanceID, SlotID, AllocationID>> notifySlotAvailableConsumer) {
this.notifySlotAvailableConsumer = notifySlotAvailableConsumer;
}
public void setRequestThreadDumpFunction(
Function<ResourceID, CompletableFuture<ThreadDumpInfo>> requestThreadDumpFunction) {
this.requestThreadDumpFunction = requestThreadDumpFunction;
}
public void setRequestProfilingFunction(
Function<ResourceID, CompletableFuture<ProfilingInfo>> requestProfilingFunction) {
this.requestProfilingFunction = requestProfilingFunction;
}
public void setRequestProfilingListFunction(
Function<ResourceID, CompletableFuture<Collection<ProfilingInfo>>>
requestProfilingListFunction) {
this.requestProfilingListFunction = requestProfilingListFunction;
}
public void setDeclareRequiredResourcesFunction(
BiFunction<JobMasterId, ResourceRequirements, CompletableFuture<Acknowledge>>
declareRequiredResourcesFunction) {
this.declareRequiredResourcesFunction = declareRequiredResourcesFunction;
}
public void setNotifyNewBlockedNodesFunction(
Function<Collection<BlockedNode>, CompletableFuture<Acknowledge>>
notifyNewBlockedNodesFunction) {
this.notifyNewBlockedNodesFunction = notifyNewBlockedNodesFunction;
}
@Override
public CompletableFuture<RegistrationResponse> registerJobMaster(
JobMasterId jobMasterId,
ResourceID jobMasterResourceId,
String jobMasterAddress,
JobID jobId,
Duration timeout) {
final QuadFunction<
JobMasterId,
ResourceID,
String,
JobID,
CompletableFuture<RegistrationResponse>>
currentConsumer = registerJobManagerFunction;
if (currentConsumer != null) {
return currentConsumer.apply(jobMasterId, jobMasterResourceId, jobMasterAddress, jobId);
}
return CompletableFuture.completedFuture(getJobMasterRegistrationSuccess());
}
public JobMasterRegistrationSuccess getJobMasterRegistrationSuccess() {
return new JobMasterRegistrationSuccess(resourceManagerId, ownResourceId);
}
@Override
public CompletableFuture<Acknowledge> declareRequiredResources(
JobMasterId jobMasterId, ResourceRequirements resourceRequirements, Duration timeout) {
return declareRequiredResourcesFunction.apply(jobMasterId, resourceRequirements);
}
@Override
public CompletableFuture<Acknowledge> sendSlotReport(
ResourceID taskManagerResourceId,
InstanceID taskManagerRegistrationId,
SlotReport slotReport,
Duration timeout) {
final Function<Tuple3<ResourceID, InstanceID, SlotReport>, CompletableFuture<Acknowledge>>
currentSendSlotReportFunction = sendSlotReportFunction;
if (currentSendSlotReportFunction != null) {
return currentSendSlotReportFunction.apply(
Tuple3.of(taskManagerResourceId, taskManagerRegistrationId, slotReport));
} else {
return CompletableFuture.completedFuture(Acknowledge.get());
}
}
@Override
public CompletableFuture<RegistrationResponse> registerTaskExecutor(
TaskExecutorRegistration taskExecutorRegistration, Duration timeout) {
final Function<TaskExecutorRegistration, CompletableFuture<RegistrationResponse>>
currentFunction = registerTaskExecutorFunction;
if (currentFunction != null) {
return currentFunction.apply(taskExecutorRegistration);
} else {
return CompletableFuture.completedFuture(
new TaskExecutorRegistrationSuccess(
new InstanceID(),
ownResourceId,
new ClusterInformation("localhost", 1234),
null));
}
}
@Override
public void notifySlotAvailable(
InstanceID instanceId, SlotID slotID, AllocationID oldAllocationId) {
final Consumer<Tuple3<InstanceID, SlotID, AllocationID>>
currentNotifySlotAvailableConsumer = notifySlotAvailableConsumer;
if (currentNotifySlotAvailableConsumer != null) {
currentNotifySlotAvailableConsumer.accept(
Tuple3.of(instanceId, slotID, oldAllocationId));
}
}
@Override
public CompletableFuture<Acknowledge> deregisterApplication(
ApplicationStatus finalStatus, String diagnostics) {
return CompletableFuture.completedFuture(Acknowledge.get());
}
@Override
public CompletableFuture<Integer> getNumberOfRegisteredTaskManagers() {
return CompletableFuture.completedFuture(0);
}
@Override
public CompletableFuture<Void> heartbeatFromTaskManager(
ResourceID heartbeatOrigin, TaskExecutorHeartbeatPayload heartbeatPayload) {
final BiFunction<ResourceID, TaskExecutorHeartbeatPayload, CompletableFuture<Void>>
currentTaskExecutorHeartbeatConsumer = taskExecutorHeartbeatFunction;
if (currentTaskExecutorHeartbeatConsumer != null) {
return currentTaskExecutorHeartbeatConsumer.apply(heartbeatOrigin, heartbeatPayload);
} else {
return FutureUtils.completedVoidFuture();
}
}
@Override
public CompletableFuture<Void> heartbeatFromJobManager(ResourceID heartbeatOrigin) {
final Function<ResourceID, CompletableFuture<Void>> currentJobMasterHeartbeatFunction =
jobMasterHeartbeatFunction;
if (currentJobMasterHeartbeatFunction != null) {
return currentJobMasterHeartbeatFunction.apply(heartbeatOrigin);
} else {
return FutureUtils.completedVoidFuture();
}
}
@Override
public void disconnectTaskManager(ResourceID resourceID, Exception cause) {
final Consumer<Tuple2<ResourceID, Throwable>> currentConsumer =
disconnectTaskExecutorConsumer;
if (currentConsumer != null) {
currentConsumer.accept(Tuple2.of(resourceID, cause));
}
}
@Override
public void disconnectJobManager(JobID jobId, JobStatus jobStatus, Exception cause) {
final Consumer<Tuple3<JobID, JobStatus, Throwable>> currentConsumer =
disconnectJobManagerConsumer;
if (currentConsumer != null) {
currentConsumer.accept(Tuple3.of(jobId, jobStatus, cause));
}
}
@Override
public CompletableFuture<Collection<TaskManagerInfo>> requestTaskManagerInfo(Duration timeout) {
return CompletableFuture.completedFuture(Collections.emptyList());
}
@Override
public CompletableFuture<TaskManagerInfoWithSlots> requestTaskManagerDetailsInfo(
ResourceID resourceId, Duration timeout) {
final Function<ResourceID, CompletableFuture<TaskManagerInfoWithSlots>> function =
requestTaskManagerDetailsInfoFunction;
if (function != null) {
return function.apply(resourceId);
} else {
return FutureUtils.completedExceptionally(
new IllegalStateException("No requestTaskManagerInfoFunction was set."));
}
}
@Override
public CompletableFuture<ResourceOverview> requestResourceOverview(Duration timeout) {
return CompletableFuture.completedFuture(
new ResourceOverview(1, 1, 1, 0, 0, ResourceProfile.ZERO, ResourceProfile.ZERO));
}
@Override
public CompletableFuture<Collection<Tuple2<ResourceID, String>>>
requestTaskManagerMetricQueryServiceAddresses(Duration timeout) {
return CompletableFuture.completedFuture(Collections.emptyList());
}
@Override
public CompletableFuture<TransientBlobKey> requestTaskManagerFileUploadByType(
ResourceID taskManagerId, FileType fileType, Duration timeout) {
final Function<Tuple2<ResourceID, FileType>, CompletableFuture<TransientBlobKey>> function =
requestTaskManagerFileUploadByTypeFunction;
if (function != null) {
return function.apply(Tuple2.of(taskManagerId, fileType));
} else {
return CompletableFuture.completedFuture(new TransientBlobKey());
}
}
@Override
public CompletableFuture<TransientBlobKey> requestTaskManagerFileUploadByNameAndType(
ResourceID taskManagerId, String fileName, FileType fileType, Duration timeout) {
final Function<Tuple3<ResourceID, String, FileType>, CompletableFuture<TransientBlobKey>>
function = requestTaskManagerFileUploadByNameAndTypeFunction;
if (function != null) {
return function.apply(Tuple3.of(taskManagerId, fileName, fileType));
} else {
return CompletableFuture.completedFuture(new TransientBlobKey());
}
}
@Override
public CompletableFuture<Collection<LogInfo>> requestTaskManagerLogList(
ResourceID taskManagerId, Duration timeout) {
final Function<ResourceID, CompletableFuture<Collection<LogInfo>>> function =
this.requestTaskManagerLogListFunction;
if (function != null) {
return function.apply(taskManagerId);
} else {
return FutureUtils.completedExceptionally(
new UnknownTaskExecutorException(taskManagerId));
}
}
@Override
public CompletableFuture<ThreadDumpInfo> requestThreadDump(
ResourceID taskManagerId, Duration timeout) {
final Function<ResourceID, CompletableFuture<ThreadDumpInfo>> function =
this.requestThreadDumpFunction;
if (function != null) {
return function.apply(taskManagerId);
} else {
return FutureUtils.completedExceptionally(
new UnknownTaskExecutorException(taskManagerId));
}
}
@Override
public CompletableFuture<TaskExecutorThreadInfoGateway> requestTaskExecutorThreadInfoGateway(
ResourceID taskManagerId, Duration timeout) {
final Function<ResourceID, CompletableFuture<TaskExecutorThreadInfoGateway>> function =
this.requestTaskExecutorThreadInfoGateway;
if (function != null) {
return function.apply(taskManagerId);
} else {
return FutureUtils.completedExceptionally(
new UnknownTaskExecutorException(taskManagerId));
}
}
@Override
public CompletableFuture<Collection<ProfilingInfo>> requestTaskManagerProfilingList(
ResourceID taskManagerId, Duration timeout) {
final Function<ResourceID, CompletableFuture<Collection<ProfilingInfo>>> function =
this.requestProfilingListFunction;
if (function != null) {
return function.apply(taskManagerId);
} else {
return FutureUtils.completedExceptionally(
new UnknownTaskExecutorException(taskManagerId));
}
}
@Override
public CompletableFuture<ProfilingInfo> requestProfiling(
ResourceID taskManagerId,
int duration,
ProfilingInfo.ProfilingMode mode,
Duration timeout) {
final Function<ResourceID, CompletableFuture<ProfilingInfo>> function =
this.requestProfilingFunction;
if (function != null) {
return function.apply(taskManagerId);
} else {
return FutureUtils.completedExceptionally(
new UnknownTaskExecutorException(taskManagerId));
}
}
@Override
public ResourceManagerId getFencingToken() {
return resourceManagerId;
}
@Override
public String getAddress() {
return address;
}
@Override
public String getHostname() {
return hostname;
}
@Override
public CompletableFuture<Map<IntermediateDataSetID, DataSetMetaInfo>> listDataSets() {
return CompletableFuture.completedFuture(Collections.emptyMap());
}
@Override
public CompletableFuture<Void> releaseClusterPartitions(
IntermediateDataSetID dataSetToRelease) {
return CompletableFuture.completedFuture(null);
}
@Override
public CompletableFuture<Void> reportClusterPartitions(
ResourceID taskExecutorId, ClusterPartitionReport clusterPartitionReport) {
return CompletableFuture.completedFuture(null);
}
@Override
public CompletableFuture<List<ShuffleDescriptor>> getClusterPartitionsShuffleDescriptors(
IntermediateDataSetID intermediateDataSetID) {
return CompletableFuture.completedFuture(null);
}
@Override
public CompletableFuture<Acknowledge> notifyNewBlockedNodes(Collection<BlockedNode> newNodes) {
return notifyNewBlockedNodesFunction.apply(newNodes);
}
}
|
TestingResourceManagerGateway
|
java
|
spring-projects__spring-framework
|
spring-expression/src/test/java/org/springframework/expression/spel/ComparatorTests.java
|
{
"start": 1387,
"end": 5514
}
|
class ____ {
@Test
void testPrimitives() throws EvaluationException {
TypeComparator comparator = new StandardTypeComparator();
// primitive int
assertThat(comparator.compare(1, 2)).isLessThan(0);
assertThat(comparator.compare(1, 1)).isEqualTo(0);
assertThat(comparator.compare(2, 1)).isGreaterThan(0);
assertThat(comparator.compare(1.0d, 2)).isLessThan(0);
assertThat(comparator.compare(1.0d, 1)).isEqualTo(0);
assertThat(comparator.compare(2.0d, 1)).isGreaterThan(0);
assertThat(comparator.compare(1.0f, 2)).isLessThan(0);
assertThat(comparator.compare(1.0f, 1)).isEqualTo(0);
assertThat(comparator.compare(2.0f, 1)).isGreaterThan(0);
assertThat(comparator.compare(1L, 2)).isLessThan(0);
assertThat(comparator.compare(1L, 1)).isEqualTo(0);
assertThat(comparator.compare(2L, 1)).isGreaterThan(0);
assertThat(comparator.compare(1, 2L)).isLessThan(0);
assertThat(comparator.compare(1, 1L)).isEqualTo(0);
assertThat(comparator.compare(2, 1L)).isGreaterThan(0);
assertThat(comparator.compare(1L, 2L)).isLessThan(0);
assertThat(comparator.compare(1L, 1L)).isEqualTo(0);
assertThat(comparator.compare(2L, 1L)).isGreaterThan(0);
}
@Test
void testNonPrimitiveNumbers() throws EvaluationException {
TypeComparator comparator = new StandardTypeComparator();
BigDecimal bdOne = new BigDecimal("1");
BigDecimal bdTwo = new BigDecimal("2");
assertThat(comparator.compare(bdOne, bdTwo)).isLessThan(0);
assertThat(comparator.compare(bdOne, new BigDecimal("1"))).isEqualTo(0);
assertThat(comparator.compare(bdTwo, bdOne)).isGreaterThan(0);
assertThat(comparator.compare(1, bdTwo)).isLessThan(0);
assertThat(comparator.compare(1, bdOne)).isEqualTo(0);
assertThat(comparator.compare(2, bdOne)).isGreaterThan(0);
assertThat(comparator.compare(1.0d, bdTwo)).isLessThan(0);
assertThat(comparator.compare(1.0d, bdOne)).isEqualTo(0);
assertThat(comparator.compare(2.0d, bdOne)).isGreaterThan(0);
assertThat(comparator.compare(1.0f, bdTwo)).isLessThan(0);
assertThat(comparator.compare(1.0f, bdOne)).isEqualTo(0);
assertThat(comparator.compare(2.0f, bdOne)).isGreaterThan(0);
assertThat(comparator.compare(1L, bdTwo)).isLessThan(0);
assertThat(comparator.compare(1L, bdOne)).isEqualTo(0);
assertThat(comparator.compare(2L, bdOne)).isGreaterThan(0);
}
@Test
void testNulls() throws EvaluationException {
TypeComparator comparator = new StandardTypeComparator();
assertThat(comparator.compare(null,"abc")).isLessThan(0);
assertThat(comparator.compare(null,null)).isEqualTo(0);
assertThat(comparator.compare("abc",null)).isGreaterThan(0);
}
@Test
void testObjects() throws EvaluationException {
TypeComparator comparator = new StandardTypeComparator();
assertThat(comparator.compare("a","a")).isEqualTo(0);
assertThat(comparator.compare("a","b")).isLessThan(0);
assertThat(comparator.compare("b","a")).isGreaterThan(0);
}
@Test
void testCanCompare() throws EvaluationException {
TypeComparator comparator = new StandardTypeComparator();
assertThat(comparator.canCompare(null,1)).isTrue();
assertThat(comparator.canCompare(1,null)).isTrue();
assertThat(comparator.canCompare(2,1)).isTrue();
assertThat(comparator.canCompare("abc","def")).isTrue();
assertThat(comparator.canCompare("abc",3)).isFalse();
assertThat(comparator.canCompare(String.class,3)).isFalse();
}
@Test
void customComparatorWorksWithEquality() {
final StandardEvaluationContext ctx = new StandardEvaluationContext();
ctx.setTypeComparator(customComparator);
ExpressionParser parser = new SpelExpressionParser();
Expression expr = parser.parseExpression("'1' == 1");
assertThat(expr.getValue(ctx, Boolean.class)).isTrue();
}
// A silly comparator declaring everything to be equal
private TypeComparator customComparator = new TypeComparator() {
@Override
public boolean canCompare(@Nullable Object firstObject, @Nullable Object secondObject) {
return true;
}
@Override
public int compare(@Nullable Object firstObject, @Nullable Object secondObject) throws EvaluationException {
return 0;
}
};
}
|
ComparatorTests
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.