language
stringclasses
1 value
repo
stringclasses
60 values
path
stringlengths
22
294
class_span
dict
source
stringlengths
13
1.16M
target
stringlengths
1
113
java
elastic__elasticsearch
x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/results/PyTorchPassThroughResultsTests.java
{ "start": 640, "end": 2453 }
class ____ extends InferenceResultsTestCase<PyTorchPassThroughResults> { public static PyTorchPassThroughResults createRandomResults() { int rows = randomIntBetween(1, 10); int columns = randomIntBetween(1, 10); double[][] arr = new double[rows][columns]; for (int i = 0; i < rows; i++) { for (int j = 0; j < columns; j++) { arr[i][j] = randomDouble(); } } return new PyTorchPassThroughResults(DEFAULT_RESULTS_FIELD, arr, randomBoolean()); } @Override protected Writeable.Reader<PyTorchPassThroughResults> instanceReader() { return PyTorchPassThroughResults::new; } @Override protected PyTorchPassThroughResults createTestInstance() { return createRandomResults(); } @Override protected PyTorchPassThroughResults mutateInstance(PyTorchPassThroughResults instance) { return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929 } public void testAsMap() { PyTorchPassThroughResults testInstance = createTestInstance(); Map<String, Object> asMap = testInstance.asMap(); int size = testInstance.isTruncated ? 2 : 1; assertThat(asMap.keySet(), hasSize(size)); assertArrayEquals(testInstance.getInference(), (double[][]) asMap.get(DEFAULT_RESULTS_FIELD)); if (testInstance.isTruncated) { assertThat(asMap.get("is_truncated"), is(true)); } } @Override void assertFieldValues(PyTorchPassThroughResults createdInstance, IngestDocument document, String parentField, String resultsField) { assertArrayEquals(createdInstance.getInference(), document.getFieldValue(parentField + resultsField, double[][].class)); } }
PyTorchPassThroughResultsTests
java
elastic__elasticsearch
x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/RequestInfo.java
{ "start": 441, "end": 2708 }
class ____ { private static final String CANVAS = "canvas"; public static final String ODBC_32 = "odbc32"; private static final String ODBC_64 = "odbc64"; public static final Set<String> CLIENT_IDS; public static final Set<String> ODBC_CLIENT_IDS; static { Set<String> clientIds = new HashSet<>(4); clientIds.add(CANVAS); clientIds.add(ODBC_32); clientIds.add(ODBC_64); Set<String> odbcClientIds = new HashSet<>(2); odbcClientIds.add(ODBC_32); odbcClientIds.add(ODBC_64); CLIENT_IDS = Collections.unmodifiableSet(clientIds); ODBC_CLIENT_IDS = Collections.unmodifiableSet(odbcClientIds); } private Mode mode; private String clientId; private SqlVersion version; public RequestInfo(Mode mode) { this(mode, null, null); } public RequestInfo(Mode mode, String clientId) { this(mode, clientId, null); } public RequestInfo(Mode mode, String clientId, String version) { mode(mode); clientId(clientId); version(version); } public RequestInfo(Mode mode, SqlVersion version) { mode(mode); this.version = version; } public Mode mode() { return mode; } public void mode(Mode mode) { this.mode = mode; } public String clientId() { return clientId; } public void clientId(String clientId) { if (clientId != null) { clientId = clientId.toLowerCase(Locale.ROOT); if (false == CLIENT_IDS.contains(clientId)) { clientId = null; } } this.clientId = clientId; } public void version(String clientVersion) { this.version = SqlVersion.fromString(clientVersion); } public SqlVersion version() { return version; } @Override public int hashCode() { return Objects.hash(mode, clientId); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; RequestInfo that = (RequestInfo) o; return Objects.equals(mode, that.mode) && Objects.equals(clientId, that.clientId); } }
RequestInfo
java
apache__flink
flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/adaptivebatch/BlockingInputInfo.java
{ "start": 1246, "end": 1362 }
class ____ provides read-only information of input for {@link * VertexParallelismAndInputInfosDecider}. */ public
that
java
elastic__elasticsearch
x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunctionSupplier.java
{ "start": 666, "end": 1778 }
class ____ implements AggregatorFunctionSupplier { public MedianAbsoluteDeviationLongAggregatorFunctionSupplier() { } @Override public List<IntermediateStateDesc> nonGroupingIntermediateStateDesc() { return MedianAbsoluteDeviationLongAggregatorFunction.intermediateStateDesc(); } @Override public List<IntermediateStateDesc> groupingIntermediateStateDesc() { return MedianAbsoluteDeviationLongGroupingAggregatorFunction.intermediateStateDesc(); } @Override public MedianAbsoluteDeviationLongAggregatorFunction aggregator(DriverContext driverContext, List<Integer> channels) { return MedianAbsoluteDeviationLongAggregatorFunction.create(driverContext, channels); } @Override public MedianAbsoluteDeviationLongGroupingAggregatorFunction groupingAggregator( DriverContext driverContext, List<Integer> channels) { return MedianAbsoluteDeviationLongGroupingAggregatorFunction.create(channels, driverContext); } @Override public String describe() { return "median_absolute_deviation of longs"; } }
MedianAbsoluteDeviationLongAggregatorFunctionSupplier
java
apache__camel
core/camel-support/src/main/java/org/apache/camel/support/TypeConverterSupport.java
{ "start": 1033, "end": 1330 }
class ____ {@link TypeConverter} implementations. * <p/> * Implementators need only to implement the {@link TypeConverter#convertTo(Class, org.apache.camel.Exchange, Object)} * method, and can rely on the default implementations of the other methods from this support class. */ public abstract
for
java
elastic__elasticsearch
x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/ingest/BulkProcessorFactory.java
{ "start": 2401, "end": 3430 }
class ____ implements BulkProcessor2.Listener { @Override public void beforeBulk(long executionId, BulkRequest request) {} @Override public void afterBulk(long executionId, BulkRequest request, BulkResponse response) { if (response.hasFailures()) { List<String> failures = Arrays.stream(response.getItems()) .filter(BulkItemResponse::isFailed) .map(r -> r.getId() + " " + r.getFailureMessage()) .collect(Collectors.toList()); logger.error("Bulk write of behavioral analytics events encountered some failures: [{}]", failures); } } @Override public void afterBulk(long executionId, BulkRequest request, Exception failure) { logger.error( "Bulk write of " + request.numberOfActions() + " behavioral analytics events logs failed: " + failure.getMessage(), failure ); } } }
BulkProcessorListener
java
reactor__reactor-core
reactor-core/src/main/java/reactor/util/context/Context0.java
{ "start": 841, "end": 1824 }
class ____ implements CoreContext { static final Context0 INSTANCE = new Context0(); @Override public Context put(Object key, Object value) { Objects.requireNonNull(key, "key"); Objects.requireNonNull(value, "value"); return new Context1(key, value); } @Override public Context delete(Object key) { return this; } @Override public <T> T get(Object key) { throw new NoSuchElementException("Context is empty"); } @Override public boolean hasKey(Object key) { return false; } @Override public int size() { return 0; } @Override public boolean isEmpty() { return true; } @Override public String toString() { return "Context0{}"; } @Override public Stream<Map.Entry<Object, Object>> stream() { return Stream.empty(); } @Override public void forEach(BiConsumer<Object, Object> action) { } @Override public Context putAllInto(Context base) { return base; } @Override public void unsafePutAllInto(ContextN other) { } }
Context0
java
apache__kafka
connect/runtime/src/main/java/org/apache/kafka/connect/runtime/isolation/PluginUtils.java
{ "start": 13621, "end": 16002 }
class ____. Returning only the" + " archives"); } return List.copyOf(archives); } public static Set<PluginSource> pluginSources(Set<Path> pluginLocations, ClassLoader classLoader, PluginClassLoaderFactory factory) { Set<PluginSource> pluginSources = new LinkedHashSet<>(); for (Path pluginLocation : pluginLocations) { try { pluginSources.add(isolatedPluginSource(pluginLocation, classLoader, factory)); } catch (InvalidPathException | MalformedURLException e) { log.error("Invalid path in plugin path: {}. Ignoring.", pluginLocation, e); } catch (IOException e) { log.error("Could not get listing for plugin path: {}. Ignoring.", pluginLocation, e); } } pluginSources.add(classpathPluginSource(classLoader.getParent())); return pluginSources; } public static PluginSource isolatedPluginSource(Path pluginLocation, ClassLoader parent, PluginClassLoaderFactory factory) throws IOException { List<URL> pluginUrls = new ArrayList<>(); List<Path> paths = pluginUrls(pluginLocation); // Infer the type of the source PluginSource.Type type; if (paths.size() == 1 && paths.get(0) == pluginLocation) { if (PluginUtils.isArchive(pluginLocation)) { type = PluginSource.Type.SINGLE_JAR; } else { type = PluginSource.Type.CLASS_HIERARCHY; } } else { type = PluginSource.Type.MULTI_JAR; } for (Path path : paths) { pluginUrls.add(path.toUri().toURL()); } URL[] urls = pluginUrls.toArray(new URL[0]); PluginClassLoader loader = factory.newPluginClassLoader( pluginLocation.toUri().toURL(), urls, parent ); return new PluginSource(pluginLocation, type, loader, urls); } public static PluginSource classpathPluginSource(ClassLoader classLoader) { List<URL> parentUrls = new ArrayList<>(); parentUrls.addAll(forJavaClassPath()); parentUrls.addAll(forClassLoader(classLoader)); return new PluginSource(null, PluginSource.Type.CLASSPATH, classLoader, parentUrls.toArray(new URL[0])); } /** * Return the simple
files
java
elastic__elasticsearch
x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregationBuilder.java
{ "start": 2095, "end": 15694 }
class ____ extends AbstractAggregationBuilder<CategorizeTextAggregationBuilder> { static final TermsAggregator.ConstantBucketCountThresholds DEFAULT_BUCKET_COUNT_THRESHOLDS = new TermsAggregator.ConstantBucketCountThresholds(1, 0, 10, -1); public static final String NAME = "categorize_text"; // In 8.3 the algorithm used by this aggregation was completely changed. // Prior to 8.3 the Drain algorithm was used. From 8.3 the same algorithm // we use in our C++ categorization code was used. As a result of this // the aggregation will not perform well in mixed version clusters where // some nodes are pre-8.3 and others are newer, so we throw an error in // this situation. The aggregation was experimental at the time this change // was made, so this is acceptable. public static final TransportVersion ALGORITHM_CHANGED_VERSION = TransportVersions.V_8_3_0; static final ParseField FIELD_NAME = new ParseField("field"); static final ParseField SIMILARITY_THRESHOLD = new ParseField("similarity_threshold"); // The next two are unused, but accepted and ignored to avoid breaking client code static final ParseField MAX_UNIQUE_TOKENS = new ParseField("max_unique_tokens").withAllDeprecated(); static final ParseField MAX_MATCHED_TOKENS = new ParseField("max_matched_tokens").withAllDeprecated(); static final ParseField CATEGORIZATION_FILTERS = new ParseField("categorization_filters"); static final ParseField CATEGORIZATION_ANALYZER = new ParseField("categorization_analyzer"); public static final ObjectParser<CategorizeTextAggregationBuilder, String> PARSER = ObjectParser.fromBuilder( CategorizeTextAggregationBuilder.NAME, CategorizeTextAggregationBuilder::new ); static { PARSER.declareString(CategorizeTextAggregationBuilder::setFieldName, FIELD_NAME); PARSER.declareInt(CategorizeTextAggregationBuilder::setSimilarityThreshold, SIMILARITY_THRESHOLD); // The next two are unused, but accepted and ignored to avoid breaking client code PARSER.declareInt((p, c) -> {}, MAX_UNIQUE_TOKENS); PARSER.declareInt((p, c) -> {}, MAX_MATCHED_TOKENS); PARSER.declareField( CategorizeTextAggregationBuilder::setCategorizationAnalyzerConfig, (p, c) -> CategorizationAnalyzerConfig.buildFromXContentFragment(p, false), CATEGORIZATION_ANALYZER, ObjectParser.ValueType.OBJECT_OR_STRING ); PARSER.declareStringArray(CategorizeTextAggregationBuilder::setCategorizationFilters, CATEGORIZATION_FILTERS); PARSER.declareInt(CategorizeTextAggregationBuilder::shardSize, TermsAggregationBuilder.SHARD_SIZE_FIELD_NAME); PARSER.declareLong(CategorizeTextAggregationBuilder::minDocCount, TermsAggregationBuilder.MIN_DOC_COUNT_FIELD_NAME); PARSER.declareLong(CategorizeTextAggregationBuilder::shardMinDocCount, TermsAggregationBuilder.SHARD_MIN_DOC_COUNT_FIELD_NAME); PARSER.declareInt(CategorizeTextAggregationBuilder::size, REQUIRED_SIZE_FIELD_NAME); } private TermsAggregator.BucketCountThresholds bucketCountThresholds = new TermsAggregator.BucketCountThresholds( DEFAULT_BUCKET_COUNT_THRESHOLDS ); private CategorizationAnalyzerConfig categorizationAnalyzerConfig; private String fieldName; // Default of 70% matches the C++ code private int similarityThreshold = 70; private CategorizeTextAggregationBuilder(String name) { super(name); } public CategorizeTextAggregationBuilder(String name, String fieldName) { super(name); this.fieldName = ExceptionsHelper.requireNonNull(fieldName, FIELD_NAME); } @Override public boolean supportsSampling() { return true; } public String getFieldName() { return fieldName; } public CategorizeTextAggregationBuilder setFieldName(String fieldName) { this.fieldName = ExceptionsHelper.requireNonNull(fieldName, FIELD_NAME); return this; } public CategorizeTextAggregationBuilder(StreamInput in) throws IOException { super(in); // Disallow this aggregation in mixed version clusters that cross the algorithm change boundary. if (in.getTransportVersion().before(ALGORITHM_CHANGED_VERSION)) { throw new ElasticsearchStatusException( "[" + NAME + "] aggregation cannot be used in a cluster where some nodes have version [" + ALGORITHM_CHANGED_VERSION.toReleaseVersion() + "] or higher and others have a version before this", RestStatus.BAD_REQUEST ); } this.bucketCountThresholds = new TermsAggregator.BucketCountThresholds(in); this.fieldName = in.readString(); this.similarityThreshold = in.readVInt(); this.categorizationAnalyzerConfig = in.readOptionalWriteable(CategorizationAnalyzerConfig::new); } public double getSimilarityThreshold() { return similarityThreshold; } public CategorizeTextAggregationBuilder setSimilarityThreshold(int similarityThreshold) { this.similarityThreshold = similarityThreshold; if (similarityThreshold < 1 || similarityThreshold > 100) { throw ExceptionsHelper.badRequestException( "[{}] must be in the range [1, 100]. Found [{}] in [{}]", SIMILARITY_THRESHOLD.getPreferredName(), similarityThreshold, name ); } return this; } public CategorizeTextAggregationBuilder setCategorizationAnalyzerConfig(CategorizationAnalyzerConfig categorizationAnalyzerConfig) { if (this.categorizationAnalyzerConfig != null) { throw ExceptionsHelper.badRequestException( "[{}] cannot be used with [{}] - instead specify them as pattern_replace char_filters in the analyzer", CATEGORIZATION_FILTERS.getPreferredName(), CATEGORIZATION_ANALYZER.getPreferredName() ); } this.categorizationAnalyzerConfig = categorizationAnalyzerConfig; return this; } public CategorizeTextAggregationBuilder setCategorizationFilters(List<String> categorizationFilters) { if (categorizationFilters == null || categorizationFilters.isEmpty()) { return this; } if (categorizationAnalyzerConfig != null) { throw ExceptionsHelper.badRequestException( "[{}] cannot be used with [{}] - instead specify them as pattern_replace char_filters in the analyzer", CATEGORIZATION_FILTERS.getPreferredName(), CATEGORIZATION_ANALYZER.getPreferredName() ); } if (categorizationFilters.stream().distinct().count() != categorizationFilters.size()) { throw ExceptionsHelper.badRequestException(Messages.JOB_CONFIG_CATEGORIZATION_FILTERS_CONTAINS_DUPLICATES); } if (categorizationFilters.stream().anyMatch(String::isEmpty)) { throw ExceptionsHelper.badRequestException(Messages.getMessage(Messages.JOB_CONFIG_CATEGORIZATION_FILTERS_CONTAINS_EMPTY)); } for (String filter : categorizationFilters) { if (isValidRegex(filter) == false) { throw ExceptionsHelper.badRequestException( Messages.getMessage(Messages.JOB_CONFIG_CATEGORIZATION_FILTERS_CONTAINS_INVALID_REGEX, filter) ); } } this.categorizationAnalyzerConfig = CategorizationAnalyzerConfig.buildStandardCategorizationAnalyzer(categorizationFilters); return this; } /** * @param size indicating how many buckets should be returned */ public CategorizeTextAggregationBuilder size(int size) { if (size <= 0) { throw ExceptionsHelper.badRequestException( "[{}] must be greater than 0. Found [{}] in [{}]", REQUIRED_SIZE_FIELD_NAME.getPreferredName(), size, name ); } bucketCountThresholds.setRequiredSize(size); return this; } /** * @param shardSize - indicating the number of buckets each shard * will return to the coordinating node (the node that coordinates the * search execution). The higher the shard size is, the more accurate the * results are. */ public CategorizeTextAggregationBuilder shardSize(int shardSize) { if (shardSize <= 0) { throw ExceptionsHelper.badRequestException( "[{}] must be greater than 0. Found [{}] in [{}]", SHARD_SIZE_FIELD_NAME.getPreferredName(), shardSize, name ); } bucketCountThresholds.setShardSize(shardSize); return this; } /** * @param minDocCount the minimum document count a text category should have in order to appear in * the response. */ public CategorizeTextAggregationBuilder minDocCount(long minDocCount) { if (minDocCount < 0) { throw ExceptionsHelper.badRequestException( "[{}] must be greater than or equal to 0. Found [{}] in [{}]", MIN_DOC_COUNT_FIELD_NAME.getPreferredName(), minDocCount, name ); } bucketCountThresholds.setMinDocCount(minDocCount); return this; } /** * @param shardMinDocCount the minimum document count a text category should have on the shard in order to * appear in the response. */ public CategorizeTextAggregationBuilder shardMinDocCount(long shardMinDocCount) { if (shardMinDocCount < 0) { throw ExceptionsHelper.badRequestException( "[{}] must be greater than or equal to 0. Found [{}] in [{}]", SHARD_MIN_DOC_COUNT_FIELD_NAME.getPreferredName(), shardMinDocCount, name ); } bucketCountThresholds.setShardMinDocCount(shardMinDocCount); return this; } protected CategorizeTextAggregationBuilder( CategorizeTextAggregationBuilder clone, AggregatorFactories.Builder factoriesBuilder, Map<String, Object> metadata ) { super(clone, factoriesBuilder, metadata); this.bucketCountThresholds = new TermsAggregator.BucketCountThresholds(clone.bucketCountThresholds); this.fieldName = clone.fieldName; this.similarityThreshold = clone.similarityThreshold; this.categorizationAnalyzerConfig = clone.categorizationAnalyzerConfig; } @Override protected void doWriteTo(StreamOutput out) throws IOException { // Disallow this aggregation in mixed version clusters that cross the algorithm change boundary. if (out.getTransportVersion().before(ALGORITHM_CHANGED_VERSION)) { throw new ElasticsearchStatusException( "[" + NAME + "] aggregation cannot be used in a cluster where some nodes have version [" + ALGORITHM_CHANGED_VERSION.toReleaseVersion() + "] or higher and others have a version before this", RestStatus.BAD_REQUEST ); } bucketCountThresholds.writeTo(out); out.writeString(fieldName); out.writeVInt(similarityThreshold); out.writeOptionalWriteable(categorizationAnalyzerConfig); } @Override protected AggregatorFactory doBuild( AggregationContext context, AggregatorFactory parent, AggregatorFactories.Builder subfactoriesBuilder ) throws IOException { return new CategorizeTextAggregatorFactory( name, fieldName, similarityThreshold, bucketCountThresholds, categorizationAnalyzerConfig, context, parent, subfactoriesBuilder, metadata ); } @Override protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); bucketCountThresholds.toXContent(builder, params); builder.field(FIELD_NAME.getPreferredName(), fieldName); builder.field(SIMILARITY_THRESHOLD.getPreferredName(), similarityThreshold); if (categorizationAnalyzerConfig != null) { categorizationAnalyzerConfig.toXContent(builder, params); } builder.endObject(); return null; } @Override protected AggregationBuilder shallowCopy(AggregatorFactories.Builder factoriesBuilder, Map<String, Object> metadata) { return new CategorizeTextAggregationBuilder(this, factoriesBuilder, metadata); } @Override public BucketCardinality bucketCardinality() { return BucketCardinality.MANY; } @Override public String getType() { return NAME; } @Override public TransportVersion getMinimalSupportedVersion() { // This isn't strictly true, as the categorize_text aggregation has existed since 7.16. // However, the implementation completely changed in 8.3, so it's best that if the // coordinating node is on 8.3 or above then it should refuse to use this aggregation // until the older nodes are upgraded. return ALGORITHM_CHANGED_VERSION; } }
CategorizeTextAggregationBuilder
java
spring-projects__spring-boot
core/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/AutoConfigurationPackages.java
{ "start": 5314, "end": 5827 }
class ____ implements ImportBeanDefinitionRegistrar, DeterminableImports { @Override public void registerBeanDefinitions(AnnotationMetadata metadata, BeanDefinitionRegistry registry) { register(registry, new PackageImports(metadata).getPackageNames().toArray(new String[0])); } @Override public Set<Object> determineImports(AnnotationMetadata metadata) { return Collections.singleton(new PackageImports(metadata)); } } /** * Wrapper for a package import. */ private static final
Registrar
java
quarkusio__quarkus
extensions/panache/hibernate-orm-panache/deployment/src/test/java/io/quarkus/hibernate/orm/panache/deployment/test/multiple_pu/repository/Issue11842Entity.java
{ "start": 315, "end": 757 }
class ____ implements Serializable { @Id @GeneratedValue(strategy = GenerationType.IDENTITY) private Integer id; @Column(length = 64) private String name; public Integer getId() { return id; } public void setId(Integer id) { this.id = id; } public String getName() { return name; } public void setName(String name) { this.name = name; } }
Issue11842Entity
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/internal/BaselineSessionEventsListenerBuilder.java
{ "start": 463, "end": 686 }
class ____ exposed via the layer-breaking operation * {@link org.hibernate.boot.spi.SessionFactoryOptions#getBaselineSessionEventsListenerBuilder()}. * Clients should avoid direct use of this class. * * @deprecated This
was
java
elastic__elasticsearch
modules/lang-painless/src/doc/java/org/elasticsearch/painless/JavadocExtractor.java
{ "start": 1765, "end": 3514 }
class ____ { private final JavaClassResolver resolver; private final Map<String, ParsedJavaClass> cache = new HashMap<>(); private static final String GPLv2 = "This code is free software; you can redistribute it and/or" + " modify it under the terms of the GNU General Public License version 2 only, as published" + " by the Free Software Foundation."; private static final String ESv2 = "Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one" + " or more contributor license agreements. Licensed under the \"Elastic License" + " 2.0\", the \"GNU Affero General Public License v3.0 only\", and the \"Server Side" + " Public License v 1\"; you may not use this file except in compliance with, at" + " your election, the \"Elastic License 2.0\", the \"GNU Affero General Public" + " License v3.0 only\", or the \"Server Side Public License, v 1\"."; private static final String[] LICENSES = new String[] { GPLv2, ESv2 }; public JavadocExtractor(JavaClassResolver resolver) { this.resolver = resolver; } public ParsedJavaClass parseClass(String className) throws IOException { ParsedJavaClass parsed = cache.get(className); if (parsed != null) { return parsed; } InputStream classStream = resolver.openClassFile(className); parsed = new ParsedJavaClass(); if (classStream != null) { ClassFileVisitor visitor = new ClassFileVisitor(); CompilationUnit cu = StaticJavaParser.parse(classStream); visitor.visit(cu, parsed); cache.put(className, parsed); } return parsed; } public static
JavadocExtractor
java
apache__camel
components/camel-huawei/camel-huaweicloud-functiongraph/src/main/java/org/apache/camel/FunctionGraphComponent.java
{ "start": 980, "end": 1311 }
class ____ extends DefaultComponent { protected Endpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) throws Exception { Endpoint endpoint = new FunctionGraphEndpoint(uri, remaining, this); setProperties(endpoint, parameters); return endpoint; } }
FunctionGraphComponent
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/inheritance/MultipleInheritanceTest.java
{ "start": 2096, "end": 2333 }
class ____ { @EmbeddedId protected CarPK id; @OneToOne @JoinColumn(name = "CAR_ID_1", referencedColumnName = "CAR_ID_1", insertable = false, updatable = false) CarPart parts; } @Entity(name = "SuperCar") public static
BasicCar
java
elastic__elasticsearch
modules/aggregations/src/main/java/org/elasticsearch/aggregations/pipeline/MovFnPipelineAggregationBuilder.java
{ "start": 1788, "end": 8512 }
class ____ extends AbstractPipelineAggregationBuilder<MovFnPipelineAggregationBuilder> { public static final String NAME = "moving_fn"; private static final ParseField WINDOW = new ParseField("window"); private static final ParseField SHIFT = new ParseField("shift"); private final Script script; private final String bucketsPathString; private String format = null; private GapPolicy gapPolicy = GapPolicy.SKIP; private int window; private int shift; public static final ConstructingObjectParser<MovFnPipelineAggregationBuilder, String> PARSER = new ConstructingObjectParser<>( NAME, false, (args, name) -> new MovFnPipelineAggregationBuilder(name, (String) args[0], (Script) args[1], (int) args[2]) ); static { PARSER.declareString(constructorArg(), BUCKETS_PATH_FIELD); PARSER.declareField( constructorArg(), (p, c) -> Script.parse(p), Script.SCRIPT_PARSE_FIELD, ObjectParser.ValueType.OBJECT_OR_STRING ); PARSER.declareInt(constructorArg(), WINDOW); PARSER.declareInt(MovFnPipelineAggregationBuilder::setShift, SHIFT); PARSER.declareString(MovFnPipelineAggregationBuilder::format, FORMAT); PARSER.declareField(MovFnPipelineAggregationBuilder::gapPolicy, p -> { if (p.currentToken() == XContentParser.Token.VALUE_STRING) { return GapPolicy.parse(p.text().toLowerCase(Locale.ROOT), p.getTokenLocation()); } throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]"); }, GAP_POLICY, ObjectParser.ValueType.STRING); } public MovFnPipelineAggregationBuilder(String name, String bucketsPath, Script script, int window) { super(name, NAME, new String[] { bucketsPath }); this.bucketsPathString = bucketsPath; this.script = script; if (window <= 0) { throw new IllegalArgumentException("[" + WINDOW.getPreferredName() + "] must be a positive, non-zero integer."); } this.window = window; } public MovFnPipelineAggregationBuilder(StreamInput in) throws IOException { super(in, NAME); bucketsPathString = in.readString(); script = new Script(in); format = in.readOptionalString(); gapPolicy = GapPolicy.readFrom(in); window = in.readInt(); shift = in.readInt(); } @Override protected void doWriteTo(StreamOutput out) throws IOException { out.writeString(bucketsPathString); script.writeTo(out); out.writeOptionalString(format); gapPolicy.writeTo(out); out.writeInt(window); out.writeInt(shift); } /** * Sets the format to use on the output of this aggregation. */ public MovFnPipelineAggregationBuilder format(String format) { if (Strings.isNullOrEmpty(format)) { throw new IllegalArgumentException("[" + FORMAT.getPreferredName() + "] must not be null or an empty string."); } this.format = format; return this; } /** * Gets the format to use on the output of this aggregation. */ public String format() { return format; } protected DocValueFormat formatter() { if (format != null) { return new DocValueFormat.Decimal(format); } return DocValueFormat.RAW; } /** * Sets the gap policy to use for this aggregation. */ public MovFnPipelineAggregationBuilder gapPolicy(GapPolicy gapPolicy) { if (gapPolicy == null) { throw new IllegalArgumentException("[" + GAP_POLICY.getPreferredName() + "] must not be null."); } this.gapPolicy = gapPolicy; return this; } /** * Gets the gap policy to use for this aggregation. */ public GapPolicy gapPolicy() { return gapPolicy; } /** * Returns the window size for this aggregation */ public int getWindow() { return window; } /** * Sets the window size for this aggregation */ public void setWindow(int window) { if (window <= 0) { throw new IllegalArgumentException("[" + WINDOW.getPreferredName() + "] must be a positive, non-zero integer."); } this.window = window; } public void setShift(int shift) { this.shift = shift; } @Override protected void validate(ValidationContext context) { if (window <= 0) { context.addValidationError("[" + WINDOW.getPreferredName() + "] must be a positive, non-zero integer."); } context.validateParentAggSequentiallyOrderedWithoutSkips(NAME, name); } @Override protected PipelineAggregator createInternal(Map<String, Object> metadata) { return new MovFnPipelineAggregator(name, bucketsPathString, script, window, shift, formatter(), gapPolicy, metadata); } @Override protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException { builder.field(BUCKETS_PATH.getPreferredName(), bucketsPathString); builder.field(Script.SCRIPT_PARSE_FIELD.getPreferredName(), script); if (format != null) { builder.field(FORMAT.getPreferredName(), format); } builder.field(GAP_POLICY.getPreferredName(), gapPolicy.getName()); builder.field(WINDOW.getPreferredName(), window); builder.field(SHIFT.getPreferredName(), shift); return builder; } @Override protected boolean overrideBucketsPath() { return true; } @Override public int hashCode() { return Objects.hash(super.hashCode(), bucketsPathString, script, format, gapPolicy, window, shift); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null || getClass() != obj.getClass()) return false; if (super.equals(obj) == false) return false; MovFnPipelineAggregationBuilder other = (MovFnPipelineAggregationBuilder) obj; return Objects.equals(bucketsPathString, other.bucketsPathString) && Objects.equals(script, other.script) && Objects.equals(format, other.format) && Objects.equals(gapPolicy, other.gapPolicy) && Objects.equals(window, other.window) && Objects.equals(shift, other.shift); } @Override public String getWriteableName() { return NAME; } @Override public TransportVersion getMinimalSupportedVersion() { return TransportVersion.zero(); } }
MovFnPipelineAggregationBuilder
java
apache__kafka
connect/api/src/test/java/org/apache/kafka/connect/data/StructTest.java
{ "start": 4953, "end": 13334 }
class ____ the object @Test public void testInvalidFieldType() { assertThrows(DataException.class, () -> new Struct(FLAT_STRUCT_SCHEMA).put("int8", "should fail because this is a string, not int8")); } @Test public void testInvalidArrayFieldElements() { assertThrows(DataException.class, () -> new Struct(NESTED_SCHEMA).put("array", List.of("should fail since elements should be int8s"))); } @Test public void testInvalidMapKeyElements() { assertThrows(DataException.class, () -> new Struct(NESTED_SCHEMA).put("map", Map.of("should fail because keys should be int8s", (byte) 12))); } @Test public void testInvalidStructFieldSchema() { assertThrows(DataException.class, () -> new Struct(NESTED_SCHEMA).put("nested", new Struct(MAP_SCHEMA))); } @Test public void testInvalidStructFieldValue() { assertThrows(DataException.class, () -> new Struct(NESTED_SCHEMA).put("nested", new Struct(NESTED_CHILD_SCHEMA))); } @Test public void testMissingFieldValidation() { // Required int8 field Schema schema = SchemaBuilder.struct().field("field", REQUIRED_FIELD_SCHEMA).build(); Struct struct = new Struct(schema); assertThrows(DataException.class, struct::validate); } @Test public void testMissingOptionalFieldValidation() { Schema schema = SchemaBuilder.struct().field("field", OPTIONAL_FIELD_SCHEMA).build(); Struct struct = new Struct(schema); struct.validate(); } @Test public void testMissingFieldWithDefaultValidation() { Schema schema = SchemaBuilder.struct().field("field", DEFAULT_FIELD_SCHEMA).build(); Struct struct = new Struct(schema); struct.validate(); } @Test public void testMissingFieldWithDefaultValue() { Schema schema = SchemaBuilder.struct().field("field", DEFAULT_FIELD_SCHEMA).build(); Struct struct = new Struct(schema); assertEquals((byte) 0, struct.get("field")); } @Test public void testMissingFieldWithoutDefaultValue() { Schema schema = SchemaBuilder.struct().field("field", REQUIRED_FIELD_SCHEMA).build(); Struct struct = new Struct(schema); assertNull(struct.get("field")); } @Test public void testEquals() { Struct struct1 = new Struct(FLAT_STRUCT_SCHEMA) .put("int8", (byte) 12) .put("int16", (short) 12) .put("int32", 12) .put("int64", (long) 12) .put("float32", 12.f) .put("float64", 12.) .put("boolean", true) .put("string", "foobar") .put("bytes", ByteBuffer.wrap("foobar".getBytes())); Struct struct2 = new Struct(FLAT_STRUCT_SCHEMA) .put("int8", (byte) 12) .put("int16", (short) 12) .put("int32", 12) .put("int64", (long) 12) .put("float32", 12.f) .put("float64", 12.) .put("boolean", true) .put("string", "foobar") .put("bytes", ByteBuffer.wrap("foobar".getBytes())); Struct struct3 = new Struct(FLAT_STRUCT_SCHEMA) .put("int8", (byte) 12) .put("int16", (short) 12) .put("int32", 12) .put("int64", (long) 12) .put("float32", 12.f) .put("float64", 12.) .put("boolean", true) .put("string", "mismatching string") .put("bytes", ByteBuffer.wrap("foobar".getBytes())); assertEquals(struct1, struct2); assertNotEquals(struct1, struct3); List<Byte> array = List.of((byte) 1, (byte) 2); Map<Integer, String> map = Map.of(1, "string"); struct1 = new Struct(NESTED_SCHEMA) .put("array", array) .put("map", map) .put("nested", new Struct(NESTED_CHILD_SCHEMA).put("int8", (byte) 12)); List<Byte> array2 = List.of((byte) 1, (byte) 2); Map<Integer, String> map2 = Map.of(1, "string"); struct2 = new Struct(NESTED_SCHEMA) .put("array", array2) .put("map", map2) .put("nested", new Struct(NESTED_CHILD_SCHEMA).put("int8", (byte) 12)); List<Byte> array3 = List.of((byte) 1, (byte) 2, (byte) 3); Map<Integer, String> map3 = Map.of(2, "string"); struct3 = new Struct(NESTED_SCHEMA) .put("array", array3) .put("map", map3) .put("nested", new Struct(NESTED_CHILD_SCHEMA).put("int8", (byte) 13)); assertEquals(struct1, struct2); assertNotEquals(struct1, struct3); } @Test public void testEqualsAndHashCodeWithByteArrayValue() { Struct struct1 = new Struct(FLAT_STRUCT_SCHEMA) .put("int8", (byte) 12) .put("int16", (short) 12) .put("int32", 12) .put("int64", (long) 12) .put("float32", 12.f) .put("float64", 12.) .put("boolean", true) .put("string", "foobar") .put("bytes", "foobar".getBytes()); Struct struct2 = new Struct(FLAT_STRUCT_SCHEMA) .put("int8", (byte) 12) .put("int16", (short) 12) .put("int32", 12) .put("int64", (long) 12) .put("float32", 12.f) .put("float64", 12.) .put("boolean", true) .put("string", "foobar") .put("bytes", "foobar".getBytes()); Struct struct3 = new Struct(FLAT_STRUCT_SCHEMA) .put("int8", (byte) 12) .put("int16", (short) 12) .put("int32", 12) .put("int64", (long) 12) .put("float32", 12.f) .put("float64", 12.) .put("boolean", true) .put("string", "foobar") .put("bytes", "mismatching_string".getBytes()); // Verify contract for equals: method must be reflexive and transitive assertEquals(struct1, struct2); assertEquals(struct2, struct1); assertNotEquals(struct1, struct3); assertNotEquals(struct2, struct3); // Testing hashCode against a hardcoded value here would be incorrect: hashCode values need not be equal for any // two distinct executions. However, based on the general contract for hashCode, if two objects are equal, their // hashCodes must be equal. If they are not equal, their hashCodes should not be equal for performance reasons. assertEquals(struct1.hashCode(), struct2.hashCode()); assertNotEquals(struct1.hashCode(), struct3.hashCode()); assertNotEquals(struct2.hashCode(), struct3.hashCode()); } @Test public void testValidateStructWithNullValue() { Schema schema = SchemaBuilder.struct() .field("one", Schema.STRING_SCHEMA) .field("two", Schema.STRING_SCHEMA) .field("three", Schema.STRING_SCHEMA) .build(); Struct struct = new Struct(schema); Exception e = assertThrows(DataException.class, struct::validate); assertEquals("Invalid value: null used for required field: \"one\", schema type: STRING", e.getMessage()); } @Test public void testPutNullField() { final String fieldName = "fieldName"; Schema testSchema = SchemaBuilder.struct() .field(fieldName, Schema.STRING_SCHEMA); Struct struct = new Struct(testSchema); assertThrows(DataException.class, () -> struct.put((Field) null, "valid")); } @Test public void testInvalidPutIncludesFieldName() { final String fieldName = "fieldName"; Schema testSchema = SchemaBuilder.struct() .field(fieldName, Schema.STRING_SCHEMA); Struct struct = new Struct(testSchema); Exception e = assertThrows(DataException.class, () -> struct.put(fieldName, null)); assertEquals("Invalid value: null used for required field: \"fieldName\", schema type: STRING", e.getMessage()); } }
of
java
google__guava
android/guava-tests/test/com/google/common/collect/OrderingTest.java
{ "start": 42460, "end": 43820 }
class ____<T extends @Nullable Object> implements Comparable<Composite<T>> { final T value; final int rank; Composite(T value, int rank) { this.value = value; this.rank = rank; } // natural order is by rank only; the test will compound() this with the // order of 't'. @Override public int compareTo(Composite<T> that) { return Integer.compare(rank, that.rank); } static <T extends @Nullable Object> Function<Composite<T>, T> getValueFunction() { return new Function<Composite<T>, T>() { @Override public T apply(Composite<T> from) { return from.value; } }; } } @J2ktIncompatible @GwtIncompatible // NullPointerTester public void testNullPointerExceptions() { NullPointerTester tester = new NullPointerTester(); tester.testAllPublicStaticMethods(Ordering.class); // any Ordering<Object> instance that accepts nulls should be good enough tester.testAllPublicInstanceMethods(Ordering.usingToString().nullsFirst()); } private static <T extends @Nullable Object> List<T> shuffledCopy(List<T> in, Random random) { List<T> mutable = new ArrayList<>(in); List<T> out = new ArrayList<>(); while (!mutable.isEmpty()) { out.add(mutable.remove(random.nextInt(mutable.size()))); } return out; } }
Composite
java
reactor__reactor-core
reactor-core/src/test/java/reactor/test/MemoryUtils.java
{ "start": 5328, "end": 7429 }
class ____ extends AtomicBoolean { /** * A pre-released {@link Tracked} instance for convenience in some tests. */ public static final Tracked RELEASED = new Tracked("RELEASED", true); /** * Check if an arbitrary object is a {@link Tracked}, and if so release it. * * @param t the arbitrary object */ @SuppressWarnings("rawtypes") public static void safeRelease(Object t) { if (t instanceof Collection) { for (Object tt : (Collection) t) { if (tt instanceof Tracked) { ((Tracked) tt).release(); } } } else if (t instanceof Tracked) { ((Tracked) t).release(); } else if (t instanceof Collection) { for (Object o : (Collection) t) { if (o instanceof Tracked) { ((Tracked) o).release(); } } } } /** * An identifier for the tracked object, which can help debugging when tests fail. */ public final String identifier; public Tracked(String identifier) { this.identifier = identifier; } public Tracked(String identifier, boolean preReleased) { this.identifier = identifier; set(preReleased); } /** * Release this {@link Tracked} object. */ public void release() { set(true); } /** * Check if this {@link Tracked} object has been released. * * @return true if released, false otherwise */ public boolean isReleased() { return get(); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Tracked tracked = (Tracked) o; return identifier.equals(tracked.identifier); } @Override public int hashCode() { return identifier.hashCode(); } //NOTE: AssertJ has a special representation of AtomicBooleans, so we override it in AssertionsUtils @Override public String toString() { return "Tracked{" + " id=" + identifier + " released=" + get() + " }"; } } }
Tracked
java
micronaut-projects__micronaut-core
http-netty/src/main/java/io/micronaut/http/netty/body/AvailableNettyByteBody.java
{ "start": 2016, "end": 5632 }
class ____ extends InternalByteBody implements CloseableAvailableByteBody { private final long length; @Nullable private ByteBuf buffer; public AvailableNettyByteBody(@NonNull ByteBuf buffer) { this.buffer = Objects.requireNonNull(buffer, "buffer"); this.length = buffer.readableBytes(); } public static CloseableAvailableByteBody empty() { return AvailableByteArrayBody.create( NettyReadBufferFactory.of(ByteBufAllocator.DEFAULT).createEmpty()); } @NonNull public static ByteBuf toByteBuf(@NonNull AvailableByteBody body) { return NettyByteBodyFactory.toByteBuf(body); } /** * This is a wrapper around {@link AvailableNettyByteBody#AvailableNettyByteBody(ByteBuf)} * with an extra body length check. * * @param loop The event loop for constructing {@link StreamingNettyByteBody} * @param bodySizeLimits The body size limits to check * @param buf The input buffer * @return The body with the given input buffer, or a {@link StreamingNettyByteBody} with the * appropriate content length error */ @NonNull public static CloseableByteBody createChecked(@NonNull EventLoop loop, @NonNull BodySizeLimits bodySizeLimits, @NonNull ByteBuf buf) { return new NettyByteBodyFactory(buf.alloc(), loop).createChecked(bodySizeLimits, buf); } public ByteBuf peek() { ByteBuf b = buffer; if (b == null) { failClaim(); } return b; } @Override public @NonNull InputStream toInputStream() { return new ByteBufInputStream(claim(), true); } @Override public long length() { return length; } @NonNull private ByteBuf claim() { ByteBuf b = buffer; if (b == null) { failClaim(); } recordPrimaryOp(); this.buffer = null; BaseSharedBuffer.logClaim(); return b; } @Override public @NonNull ExecutionFlow<? extends CloseableAvailableByteBody> bufferFlow() { return ExecutionFlow.just(new AvailableNettyByteBody(claim())); } @Override public void close() { ByteBuf b = buffer; this.buffer = null; if (b != null) { recordClosed(); b.release(); } } @SuppressWarnings("deprecation") @Override public @NonNull Publisher<ReadBuffer> toReadBufferPublisher() { return Flux.just(NettyReadBufferFactory.of(ByteBufAllocator.DEFAULT).adapt(claim())); } @Override public byte @NonNull [] toByteArray() { ByteBuf b = claim(); try { return ByteBufUtil.getBytes(b); } finally { b.release(); } } @Override public @NonNull ByteBuffer<?> toByteBuffer() { return NettyByteBufferFactory.DEFAULT.wrap(claim()); } @Override public @NonNull CloseableByteBody move() { return new AvailableNettyByteBody(claim()); } @Override public @NonNull String toString(Charset charset) { ByteBuf b = claim(); try { return b.toString(charset); } finally { b.release(); } } @Override public @NonNull CloseableAvailableByteBody split() { ByteBuf b = buffer; if (b == null) { failClaim(); } return new AvailableNettyByteBody(b.retainedSlice()); } @Override public void touch() { ByteBuf b = buffer; if (b != null) { b.touch(); } } }
AvailableNettyByteBody
java
google__guice
core/src/com/google/inject/internal/InterceptorStackCallback.java
{ "start": 1252, "end": 2044 }
class ____ implements InvocationHandler { private static final String GUICE_INTERNAL_AOP_PACKAGE = "com.google.inject.internal.aop"; final Method method; final MethodInterceptor[] interceptors; final BiFunction<Object, Object[], Object> superInvoker; public InterceptorStackCallback( Method method, List<MethodInterceptor> interceptors, BiFunction<Object, Object[], Object> superInvoker) { this.method = method; this.interceptors = interceptors.toArray(new MethodInterceptor[interceptors.size()]); this.superInvoker = superInvoker; } @Override public Object invoke(Object proxy, Method unused, Object[] arguments) throws Throwable { return new InterceptedMethodInvocation(proxy, arguments, 0).proceed(); } private
InterceptorStackCallback
java
apache__camel
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/Dhis2EndpointBuilderFactory.java
{ "start": 44329, "end": 44641 }
class ____ extends AbstractEndpointBuilder implements Dhis2EndpointBuilder, AdvancedDhis2EndpointBuilder { public Dhis2EndpointBuilderImpl(String path) { super(componentName, path); } } return new Dhis2EndpointBuilderImpl(path); } }
Dhis2EndpointBuilderImpl
java
spring-projects__spring-framework
spring-web/src/main/java/org/springframework/web/bind/MissingRequestCookieException.java
{ "start": 1021, "end": 2665 }
class ____ extends MissingRequestValueException { private final String cookieName; private final MethodParameter parameter; /** * Constructor for MissingRequestCookieException. * @param cookieName the name of the missing request cookie * @param parameter the method parameter */ public MissingRequestCookieException(String cookieName, MethodParameter parameter) { this(cookieName, parameter, false); } /** * Constructor for use when a value was present but converted to {@code null}. * @param cookieName the name of the missing request cookie * @param parameter the method parameter * @param missingAfterConversion whether the value became null after conversion * @since 5.3.6 */ public MissingRequestCookieException( String cookieName, MethodParameter parameter, boolean missingAfterConversion) { super("", missingAfterConversion, null, new Object[] {cookieName}); this.cookieName = cookieName; this.parameter = parameter; getBody().setDetail("Required cookie '" + this.cookieName + "' is not present."); } @Override public String getMessage() { return "Required cookie '" + this.cookieName + "' for method parameter type " + this.parameter.getNestedParameterType().getSimpleName() + " is " + (isMissingAfterConversion() ? "present but converted to null" : "not present"); } /** * Return the expected name of the request cookie. */ public final String getCookieName() { return this.cookieName; } /** * Return the method parameter bound to the request cookie. */ public final MethodParameter getParameter() { return this.parameter; } }
MissingRequestCookieException
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/query/sqm/tree/expression/SqmDurationUnit.java
{ "start": 704, "end": 1926 }
class ____<T> extends AbstractSqmNode implements SqmTypedNode<T> { private final TemporalUnit unit; private final ReturnableType<T> type; public SqmDurationUnit(TemporalUnit unit, ReturnableType<T> type, NodeBuilder nodeBuilder) { super( nodeBuilder ); this.type = type; this.unit = unit; } @Override public SqmDurationUnit<T> copy(SqmCopyContext context) { return this; } public ReturnableType<T> getType() { return type; } @Override public <R> R accept(SemanticQueryWalker<R> walker) { return walker.visitDurationUnit( this ); } public TemporalUnit getUnit() { return unit; } @Override public @Nullable SqmBindableType<T> getNodeType() { return nodeBuilder().resolveExpressible( type ); } @Override public void appendHqlString(StringBuilder hql, SqmRenderContext context) { hql.append( unit ); } @Override public boolean equals(@Nullable Object object) { return object instanceof SqmDurationUnit<?> that && this.unit == that.unit; } @Override public int hashCode() { return unit.hashCode(); } @Override public boolean isCompatible(Object object) { return equals( object ); } @Override public int cacheHashCode() { return hashCode(); } }
SqmDurationUnit
java
apache__flink
flink-runtime/src/main/java/org/apache/flink/runtime/state/changelog/StateChangelogWriter.java
{ "start": 1134, "end": 4090 }
interface ____<Handle extends ChangelogStateHandle> extends AutoCloseable { /** Get the initial {@link SequenceNumber} that is used for the first element. */ SequenceNumber initialSequenceNumber(); /** * Get {@link SequenceNumber} to be used for the next element added by {@link #append(int, * byte[]) append}. */ SequenceNumber nextSequenceNumber(); /** Appends the provided **metadata** to this log. No persistency guarantees. */ void appendMeta(byte[] value) throws IOException; /** Appends the provided data to this log. No persistency guarantees. */ void append(int keyGroup, byte[] value) throws IOException; /** * Durably persist previously {@link #append(int, byte[]) appended} data starting from the * provided {@link SequenceNumber} and up to the latest change added. After this call, one of * {@link #confirm(SequenceNumber, SequenceNumber, long) confirm}, {@link #reset(SequenceNumber, * SequenceNumber, long) reset}, or {@link #truncate(SequenceNumber) truncate} eventually must * be called for the corresponding change set. with reset/truncate/confirm methods? * * @param from inclusive * @param checkpointId to persist */ CompletableFuture<SnapshotResult<Handle>> persist(SequenceNumber from, long checkpointId) throws IOException; /** * Truncate this state changelog to free up the resources and collect any garbage. That means: * * <ul> * <li>Discard the written state changes - in the provided range [from; to) * <li>Truncate the in-memory view of this changelog - in the range [0; to) * </ul> * * Called upon state materialization. Any ongoing persist calls will not be affected. * * <p>WARNING: the range [from; to) must not include any range that is included into any * checkpoint that is not subsumed or aborted. * * @param to exclusive */ void truncate(SequenceNumber to); /** * Mark the given state changes as confirmed by the JM. * * @param from inclusive * @param to exclusive * @param checkpointId to confirm */ void confirm(SequenceNumber from, SequenceNumber to, long checkpointId); /** * Reset the state the given state changes. Called upon abortion so that if requested later then * these changes will be re-uploaded. */ void reset(SequenceNumber from, SequenceNumber to, long checkpointId); /** * Truncate the tail of log and close it. No new appends will be possible. Any appended but not * persisted records will be lost. * * @param from {@link SequenceNumber} from which to truncate the changelog, inclusive */ void truncateAndClose(SequenceNumber from); /** * Close this log. No new appends will be possible. Any appended but not persisted records will * be lost. */ void close(); }
StateChangelogWriter
java
apache__camel
components/camel-azure/camel-azure-eventgrid/src/main/java/org/apache/camel/component/azure/eventgrid/EventGridComponent.java
{ "start": 1277, "end": 4920 }
class ____ extends DefaultComponent { private static final Logger LOG = LoggerFactory.getLogger(EventGridComponent.class); @Metadata private EventGridConfiguration configuration = new EventGridConfiguration(); public EventGridComponent() { } @Override protected Endpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) throws Exception { final EventGridConfiguration configuration = this.configuration.copy(); // Set the topic endpoint from the remaining part if not using a custom client if (configuration.getPublisherClient() == null && ObjectHelper.isNotEmpty(remaining)) { configuration.setTopicEndpoint(remaining); } final EventGridEndpoint endpoint = new EventGridEndpoint(uri, this, configuration); setProperties(endpoint, parameters); // Ensure we use default credential type if not configured if (endpoint.getConfiguration().getTokenCredential() == null && endpoint.getConfiguration().getAzureKeyCredential() == null && endpoint.getConfiguration().getAccessKey() == null) { if (endpoint.getConfiguration().getCredentialType() == null) { endpoint.getConfiguration().setCredentialType(CredentialType.AZURE_IDENTITY); } } else if (endpoint.getConfiguration().getTokenCredential() != null) { boolean azure = endpoint.getConfiguration().getTokenCredential() instanceof DefaultAzureCredential; endpoint.getConfiguration() .setCredentialType(azure ? CredentialType.AZURE_IDENTITY : CredentialType.TOKEN_CREDENTIAL); } else if (endpoint.getConfiguration().getAzureKeyCredential() != null || endpoint.getConfiguration().getAccessKey() != null) { endpoint.getConfiguration().setCredentialType(CredentialType.ACCESS_KEY); } validateConfigurations(configuration); return endpoint; } /** * The component configurations */ public EventGridConfiguration getConfiguration() { return configuration; } public void setConfiguration(EventGridConfiguration configuration) { this.configuration = configuration; } private void validateConfigurations(final EventGridConfiguration configuration) { if (configuration.getPublisherClient() == null) { if (ObjectHelper.isEmpty(configuration.getTopicEndpoint())) { throw new IllegalArgumentException("Topic endpoint must be specified."); } if (!isAccessKeySet(configuration) && !isTokenCredentialSet(configuration) && !isAzureIdentitySet(configuration)) { throw new IllegalArgumentException( "Azure EventGrid AccessKey, AzureKeyCredential, TokenCredential or Azure Identity must be specified."); } } } private boolean isAccessKeySet(final EventGridConfiguration configuration) { return ObjectHelper.isNotEmpty(configuration.getAccessKey()) || ObjectHelper.isNotEmpty(configuration.getAzureKeyCredential()); } private boolean isTokenCredentialSet(final EventGridConfiguration configuration) { return ObjectHelper.isNotEmpty(configuration.getTokenCredential()); } private boolean isAzureIdentitySet(final EventGridConfiguration configuration) { return ObjectHelper.isNotEmpty(configuration.getCredentialType()) && configuration.getCredentialType().equals(CredentialType.AZURE_IDENTITY); } }
EventGridComponent
java
apache__camel
core/camel-core/src/test/java/org/apache/camel/issues/ContextScopedOnExceptionErrorHandlerRefIssueTwoRoutesTest.java
{ "start": 1176, "end": 2890 }
class ____ extends ContextTestSupport { @Test public void testOnExceptionErrorHandlerRef() throws Exception { getMockEndpoint("mock:a").expectedMessageCount(1); getMockEndpoint("mock:handled").expectedMessageCount(1); getMockEndpoint("mock:dead").expectedMessageCount(0); template.sendBody("direct:start", "Hello World"); assertMockEndpointsSatisfied(); } @Test public void testOnExceptionErrorHandlerRefFoo() throws Exception { getMockEndpoint("mock:a").expectedMessageCount(0); getMockEndpoint("mock:handled").expectedMessageCount(0); getMockEndpoint("mock:dead").expectedMessageCount(1); template.sendBody("direct:foo", "Hello Foo"); assertMockEndpointsSatisfied(); } @Override protected Registry createCamelRegistry() throws Exception { Registry jndi = super.createCamelRegistry(); jndi.bind("myDLC", new DeadLetterChannelBuilder("mock:dead")); return jndi; } @Override protected RouteBuilder createRouteBuilder() { return new RouteBuilder() { @Override public void configure() { onException(IllegalArgumentException.class).handled(true).to("mock:handled").end(); from("direct:foo").errorHandler(new RefErrorHandlerDefinition("myDLC")).to("mock:foo") .throwException(new IOException("Damn IO")); from("direct:start").errorHandler(new RefErrorHandlerDefinition("myDLC")).to("mock:a") .throwException(new IllegalArgumentException("Damn")); } }; } }
ContextScopedOnExceptionErrorHandlerRefIssueTwoRoutesTest
java
apache__camel
components/camel-google/camel-google-calendar/src/test/java/org/apache/camel/component/google/calendar/CalendarEventsIT.java
{ "start": 1974, "end": 7445 }
class ____ extends AbstractGoogleCalendarTestSupport { private static final Logger LOG = LoggerFactory.getLogger(CalendarEventsIT.class); private static final String PATH_PREFIX = GoogleCalendarApiCollection.getCollection().getApiName(CalendarEventsApiMethod.class).getName(); @Test public void testInsert() { Event event = new Event(); event.setSummary("Feed the Camel"); event.setLocation("Somewhere"); ArrayList<EventAttendee> attendees = new ArrayList<>(); attendees.add(new EventAttendee().setEmail("camel-google-calendar.janstey@gmail.com")); event.setAttendees(attendees); Date startDate = new Date(); Date endDate = new Date(startDate.getTime() + 3600000); DateTime start = new DateTime(startDate, TimeZone.getTimeZone("UTC")); event.setStart(new EventDateTime().setDateTime(start)); DateTime end = new DateTime(endDate, TimeZone.getTimeZone("UTC")); event.setEnd(new EventDateTime().setDateTime(end)); final Map<String, Object> headers = new HashMap<>(); // parameter type is String headers.put("CamelGoogleCalendar.calendarId", getCalendar().getId()); // parameter type is com.google.api.services.calendar.model.Event headers.put("CamelGoogleCalendar.content", event); final com.google.api.services.calendar.model.Event result = requestBodyAndHeaders("direct://INSERT", null, headers); assertEquals("Feed the Camel", result.getSummary()); LOG.debug("insert: {}", result); } @Test public void testManipulatingAnEvent() { // Add an event Map<String, Object> headers = new HashMap<>(); // parameter type is String headers.put("CamelGoogleCalendar.calendarId", getCalendar().getId()); // parameter type is String headers.put("CamelGoogleCalendar.text", "Feed the Camel"); com.google.api.services.calendar.model.Event result = requestBodyAndHeaders("direct://QUICKADD", null, headers); assertNotNull(result, "quickAdd result"); // Check if it is in the list of events for this calendar com.google.api.services.calendar.model.Events events = requestBody("direct://LIST", getCalendar().getId()); Event item = events.getItems().get(0); String eventId = item.getId(); assertEquals("Feed the Camel", item.getSummary()); // Get the event metadata headers = new HashMap<>(); // parameter type is String headers.put("CamelGoogleCalendar.calendarId", getCalendar().getId()); // parameter type is String headers.put("CamelGoogleCalendar.eventId", eventId); result = requestBodyAndHeaders("direct://GET", null, headers); assertEquals("Feed the Camel", result.getSummary()); // Change the event result.setSummary("Feed the Camel later"); // parameter type is com.google.api.services.calendar.model.Event headers.put("CamelGoogleCalendar.content", result); Event newResult = requestBodyAndHeaders("direct://UPDATE", null, headers); assertEquals("Feed the Camel later", newResult.getSummary()); // Delete the event headers = new HashMap<>(); // parameter type is String headers.put("CamelGoogleCalendar.calendarId", getCalendar().getId()); // parameter type is String headers.put("CamelGoogleCalendar.eventId", eventId); result = requestBodyAndHeaders("direct://DELETE", null, headers); // Check if it is NOT in the list of events for this calendar events = requestBody("direct://LIST", getCalendar().getId()); assertEquals(0, events.getItems().size()); } @Override protected RouteBuilder createRouteBuilder() { return new RouteBuilder() { @Override public void configure() { // test route for calendarImport from("direct://CALENDARIMPORT").to("google-calendar://" + PATH_PREFIX + "/calendarImport"); // test route for delete from("direct://DELETE").to("google-calendar://" + PATH_PREFIX + "/delete"); // test route for get from("direct://GET").to("google-calendar://" + PATH_PREFIX + "/get"); // test route for insert from("direct://INSERT").to("google-calendar://" + PATH_PREFIX + "/insert"); // test route for instances from("direct://INSTANCES").to("google-calendar://" + PATH_PREFIX + "/instances"); // test route for list from("direct://LIST").to("google-calendar://" + PATH_PREFIX + "/list?inBody=calendarId"); // test route for move from("direct://MOVE").to("google-calendar://" + PATH_PREFIX + "/move"); // test route for patch from("direct://PATCH").to("google-calendar://" + PATH_PREFIX + "/patch"); // test route for quickAdd from("direct://QUICKADD").to("google-calendar://" + PATH_PREFIX + "/quickAdd"); // test route for update from("direct://UPDATE").to("google-calendar://" + PATH_PREFIX + "/update"); // test route for watch from("direct://WATCH").to("google-calendar://" + PATH_PREFIX + "/watch"); } }; } }
CalendarEventsIT
java
apache__flink
flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/operators/rank/ConstantRankRangeWithoutEnd.java
{ "start": 1407, "end": 2008 }
class ____ implements RankRange { private static final long serialVersionUID = -1944057111062598696L; @JsonProperty(ConstantRankRange.FIELD_NAME_START) private final long rankStart; @JsonCreator public ConstantRankRangeWithoutEnd( @JsonProperty(ConstantRankRange.FIELD_NAME_START) long rankStart) { this.rankStart = rankStart; } @Override public String toString(List<String> inputFieldNames) { return toString(); } @Override public String toString() { return "rankStart=" + rankStart; } }
ConstantRankRangeWithoutEnd
java
assertj__assertj-core
assertj-core/src/test/java/org/assertj/core/condition/NestableConditionFixtures.java
{ "start": 3160, "end": 3341 }
class ____ extends Customer { final Integer value; ValueCustomer(Name name, Address address, Integer value) { super(name, address); this.value = value; } }
ValueCustomer
java
google__guice
core/src/com/google/inject/internal/DeclaredMembers.java
{ "start": 1268, "end": 1416 }
class ____ is sufficient to compare the non-generic method * signature which consists of the name, return type and parameter types. */ public final
it
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/FutureTransformAsyncTest.java
{ "start": 4949, "end": 6134 }
class ____ { private Executor executor; ListenableFuture<String> foo(String s) { return Futures.immediateFuture(s); } ListenableFuture<String> test() { ListenableFuture<String> future = Futures.transformAsync( Futures.immediateFuture(5), value -> { if (value > 0) { return foo("large"); } return Futures.immediateFuture("value: " + value); }, executor); return future; } } """) .doTest(); } @Test public void transformAsync_statementLambda_throwsCheckedException() { compilationHelper .addSourceLines( "in/Test.java", """ import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import java.io.FileNotFoundException; import java.util.concurrent.Executor;
Test
java
spring-projects__spring-framework
spring-core-test/src/test/java/org/springframework/core/test/tools/TestCompilerTests.java
{ "start": 11880, "end": 12342 }
class ____ extends AbstractProcessor { private final List<TypeElement> processedAnnotations = new ArrayList<>(); @Override public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) { this.processedAnnotations.addAll(annotations); return true; } public List<TypeElement> getProcessedAnnotations() { return this.processedAnnotations; } } @SupportedAnnotationTypes("java.lang.Deprecated") static
TestProcessor
java
apache__hadoop
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/AbstractHadoopTestBase.java
{ "start": 1568, "end": 3480 }
class ____ { /** * System property name to set the test timeout: {@value}. */ public static final String PROPERTY_TEST_DEFAULT_TIMEOUT = "test.default.timeout"; /** * The default timeout (in milliseconds) if the system property * {@link #PROPERTY_TEST_DEFAULT_TIMEOUT} * is not set: {@value}. */ public static final int TEST_DEFAULT_TIMEOUT_VALUE = 100000; /** * Retrieve the test timeout from the system property * {@link #PROPERTY_TEST_DEFAULT_TIMEOUT}, falling back to * the value in {@link #TEST_DEFAULT_TIMEOUT_VALUE} if the * property is not defined. * @return the recommended timeout for tests */ public static int retrieveTestTimeout() { String propval = System.getProperty(PROPERTY_TEST_DEFAULT_TIMEOUT, Integer.toString( TEST_DEFAULT_TIMEOUT_VALUE)); int millis; try { millis = Integer.parseInt(propval); } catch (NumberFormatException e) { //fall back to the default value, as the property cannot be parsed millis = 100000; } return millis; } /** * The method name. */ @RegisterExtension private TestName methodName = new TestName(); /** * Get the method name; defaults to the value of {@link #methodName}. * Subclasses may wish to override it, which will tune the thread naming. * @return the name of the method. */ protected String getMethodName() { return methodName.getMethodName(); } /** * Static initializer names this thread "JUnit". */ @BeforeAll public static void nameTestThread() { Thread.currentThread().setName("JUnit"); } /** * Before each method, the thread is renamed to match the method name. */ @BeforeEach public void nameThreadToMethod() { Thread.currentThread().setName("JUnit-" + getMethodName()); } }
AbstractHadoopTestBase
java
google__gson
gson/src/test/java/com/google/gson/functional/JsonAdapterAnnotationOnFieldsTest.java
{ "start": 4547, "end": 4716 }
class ____ { @JsonAdapter(GizmoPartTypeAdapterFactory.class) final Part part; Gizmo(Part part) { this.part = part; } } private static final
Gizmo
java
apache__camel
components/camel-huawei/camel-huaweicloud-iam/src/test/java/org/apache/camel/component/huaweicloud/iam/constants/IAMOperationsTest.java
{ "start": 971, "end": 1301 }
class ____ { @Test public void testOperations() { assertEquals("listUsers", IAMOperations.LIST_USERS); assertEquals("getUser", IAMOperations.GET_USER); assertEquals("getGroupUsers", IAMOperations.GET_GROUP_USERS); assertEquals("listGroups", IAMOperations.LIST_GROUPS); } }
IAMOperationsTest
java
google__guava
android/guava-tests/test/com/google/common/reflect/TypeTokenTest.java
{ "start": 62160, "end": 63597 }
interface ____<E, F> extends List<E> {} public void testGetSubtype_genericSubtypeOfGenericTypeWithFewerParameters() { TypeToken<List<String>> supertype = new TypeToken<List<String>>() {}; TypeToken<MySpecialList<String, ?>> subtype = new TypeToken<MySpecialList<String, ?>>() {}; assertTrue(subtype.isSubtypeOf(supertype)); ParameterizedType actualSubtype = (ParameterizedType) supertype.getSubtype(subtype.getRawType()).getType(); assertEquals(MySpecialList.class, actualSubtype.getRawType()); assertThat(actualSubtype.getActualTypeArguments()[0]).isEqualTo(String.class); assertThat(actualSubtype.getActualTypeArguments()[1]).isInstanceOf(TypeVariable.class); assertTrue(TypeToken.of(actualSubtype).isSubtypeOf(supertype)); } public void testGetSubtype_genericSubtypeOfRawTypeWithFewerTypeParameters() { @SuppressWarnings("rawtypes") // test of raw types TypeToken<List> supertype = new TypeToken<List>() {}; @SuppressWarnings("rawtypes") // test of raw types TypeToken<MySpecialList> subtype = new TypeToken<MySpecialList>() {}; assertTrue(subtype.isSubtypeOf(supertype)); Class<?> actualSubtype = (Class<?>) supertype.getSubtype(subtype.getRawType()).getType(); assertEquals(MySpecialList.class, actualSubtype); assertTrue(TypeToken.of(actualSubtype).isSubtypeOf(supertype)); } public void testGetSubtype_baseClassWithLessTypeArgs() {
MySpecialList
java
quarkusio__quarkus
independent-projects/arc/runtime/src/main/java/io/quarkus/arc/impl/TypeCachePollutionUtils.java
{ "start": 81, "end": 1000 }
class ____ { static boolean isParameterizedType(final Object o) { //Check for ParameterizedTypeImpl first, as it's very likely going //to be one; this prevents some cases of type cache pollution (see JDK-8180450). if (o instanceof ParameterizedTypeImpl) { return true; } return (o instanceof ParameterizedType); } static ParameterizedType asParameterizedType(final Object o) { //Check for ParameterizedTypeImpl first, as it's very likely going //to be one; this prevents some cases of type cache pollution (see JDK-8180450). if (o instanceof ParameterizedTypeImpl) { //N.B. it's crucial for the purposes of this optimisation that //we cast the to concrete type, not to the interface. return (ParameterizedTypeImpl) o; } return (ParameterizedType) o; } }
TypeCachePollutionUtils
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/columndiscriminator/BookDetails.java
{ "start": 163, "end": 398 }
class ____ { private String information; protected BookDetails(String information) { this.information = information; } protected BookDetails() { // default } public String information() { return information; } }
BookDetails
java
apache__dubbo
dubbo-config/dubbo-config-spring/src/test/java/org/apache/dubbo/config/spring/reference/ReferenceKeyTest.java
{ "start": 12600, "end": 16201 }
class ____ { @DubboReference(methods = @Method(name = "sayHello", timeout = 100, retries = 0)) private HelloService helloService; @DubboReference(methods = @Method(timeout = 100, name = "sayHello", retries = 0)) private HelloService helloService2; @DubboReference( methods = @Method(name = "sayHello", timeout = 100, arguments = @Argument(index = 0, callback = true))) private HelloService helloService3; @DubboReference( methods = @Method(arguments = @Argument(callback = true, index = 0), name = "sayHello", timeout = 100)) private HelloService helloService4; // Instance 1 @DubboReference( check = false, parameters = {"a", "2", "b", "1"}, filter = {"echo"}) private HelloService helloServiceWithArray0; // Instance 2 @DubboReference( check = false, parameters = {"a=1", "b", "2"}, filter = {"echo"}) private HelloService helloServiceWithArray1; @DubboReference( parameters = {"b", "2", "a", "1"}, filter = {"echo"}, check = false) private HelloService helloServiceWithArray2; // Instance 3 @DubboReference( check = false, parameters = {"a", "1", "b", "2"}, filter = {"echo"}, methods = { @Method( parameters = {"d", "2", "c", "1"}, name = "sayHello", timeout = 100) }) private HelloService helloServiceWithMethod1; @DubboReference( parameters = {"b=2", "a=1"}, filter = {"echo"}, check = false, methods = { @Method( name = "sayHello", timeout = 100, parameters = {"c", "1", "d", "2"}) }) private HelloService helloServiceWithMethod2; // Instance 4 @DubboReference( parameters = {"a", "1", "b", "2"}, filter = {"echo"}, methods = { @Method( name = "sayHello", arguments = { @Argument(callback = true, type = "String"), @Argument(callback = false, type = "int") }, timeout = 100) }, check = false) private HelloService helloServiceWithArgument1; @DubboReference( check = false, filter = {"echo"}, parameters = {"b", "2", "a", "1"}, methods = { @Method( name = "sayHello", timeout = 100, arguments = { @Argument(callback = false, type = "int"), @Argument(callback = true, type = "String") }) }) private HelloService helloServiceWithArgument2; } @Configuration @ImportResource({ "classpath:/org/apache/dubbo/config/spring/init-reference-keys.xml", "classpath:/org/apache/dubbo/config/spring/init-reference-properties.xml" }) static
ReferenceConfiguration
java
playframework__playframework
documentation/manual/working/javaGuide/main/dependencyinjection/code/javaguide/di/guice/CircularDependencies.java
{ "start": 280, "end": 327 }
class ____ { // #circular public
NoProvider
java
apache__kafka
storage/src/main/java/org/apache/kafka/storage/internals/checkpoint/OffsetCheckpointFile.java
{ "start": 2976, "end": 3762 }
class ____ implements CheckpointFile.EntryFormatter<TopicPartitionOffset> { @Override public String toString(TopicPartitionOffset tpo) { TopicPartition tp = tpo.tp; return tp.topic() + " " + tp.partition() + " " + tpo.offset; } @Override public Optional<TopicPartitionOffset> fromString(String line) { String[] parts = WHITESPACES_PATTERN.split(line); if (parts.length == 3) { return Optional.of(new TopicPartitionOffset(new TopicPartition(parts[0], Integer.parseInt(parts[1])), Long.parseLong(parts[2]))); } else { return Optional.empty(); } } } record TopicPartitionOffset(TopicPartition tp, long offset) { } }
Formatter
java
spring-projects__spring-boot
module/spring-boot-health/src/main/java/org/springframework/boot/health/actuate/endpoint/HealthEndpointGroupsPostProcessor.java
{ "start": 944, "end": 1312 }
interface ____ { /** * Post-process the given {@link HealthEndpointGroups} instance. * @param groups the existing groups instance * @return a post-processed groups instance, or the original instance if not * post-processing was required */ HealthEndpointGroups postProcessHealthEndpointGroups(HealthEndpointGroups groups); }
HealthEndpointGroupsPostProcessor
java
spring-projects__spring-framework
spring-orm/src/test/java/org/springframework/orm/jpa/hibernate/beans/NoDefinitionInSpringContextTestBean.java
{ "start": 945, "end": 1365 }
class ____ itself, even though it should delegate to the fallback producer?" ); } /* * Expect instantiation through a non-default constructor, just to be sure that Spring will fail if it tries to instantiate it, * and will subsequently delegate to the fallback bean instance producer. */ public NoDefinitionInSpringContextTestBean(String name, BeanSource source) { setName(name); setSource(source); } }
by
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/flogger/FloggerLogWithCauseTest.java
{ "start": 2436, "end": 3001 }
class ____ { private static final FluentLogger logger = FluentLogger.forEnclosingClass(); public void test() { try { } catch (Exception e) { logger.atWarning().withCause(e).log("failed"); } } } """) .doTest(); } @Test public void variableUsedInOtherWay() { compilationHelper .addSourceLines( "Test.java", """ import com.google.common.flogger.FluentLogger;
Test
java
alibaba__druid
core/src/test/java/com/alibaba/druid/bvt/sql/mysql/create/MySqlCreateSequenceTest.java
{ "start": 966, "end": 1474 }
class ____ extends MysqlTest { @Test public void test_one() throws Exception { String sql = "CREATE SEQUENCE seq1;"; List<SQLStatement> stmtList = SQLUtils.toStatementList(sql, JdbcConstants.MYSQL); SQLStatement stmt = stmtList.get(0); MySqlSchemaStatVisitor visitor = new MySqlSchemaStatVisitor(); stmt.accept(visitor); String output = SQLUtils.toMySqlString(stmt); assertEquals("CREATE SEQUENCE seq1;", output); } }
MySqlCreateSequenceTest
java
elastic__elasticsearch
libs/x-content/src/test/java/org/elasticsearch/xcontent/ObjectParserTests.java
{ "start": 5829, "end": 7080 }
class ____ { URI parseURI(XContentParser parser) throws IOException { String fieldName = null; String host = ""; int port = 0; XContentParser.Token token; while ((token = parser.currentToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { fieldName = parser.currentName(); } else if (token == XContentParser.Token.VALUE_STRING) { if (fieldName.equals("host")) { host = parser.text(); } else { throw new IllegalStateException("boom"); } } else if (token == XContentParser.Token.VALUE_NUMBER) { if (fieldName.equals("port")) { port = parser.intValue(); } else { throw new IllegalStateException("boom"); } } parser.nextToken(); } return URI.create(host + ":" + port); } }
ClassicParser
java
spring-projects__spring-boot
module/spring-boot-security-oauth2-resource-server/src/main/java/org/springframework/boot/security/oauth2/server/resource/autoconfigure/reactive/ReactiveOAuth2ResourceServerJwkConfiguration.java
{ "start": 10234, "end": 10787 }
class ____ { @Bean @ConditionalOnBean(ReactiveJwtDecoder.class) SecurityWebFilterChain springSecurityFilterChain(ServerHttpSecurity http, ReactiveJwtDecoder jwtDecoder) { http.authorizeExchange((exchanges) -> exchanges.anyExchange().authenticated()); http.oauth2ResourceServer((server) -> customDecoder(server, jwtDecoder)); return http.build(); } private void customDecoder(OAuth2ResourceServerSpec server, ReactiveJwtDecoder decoder) { server.jwt((jwt) -> jwt.jwtDecoder(decoder)); } } private static
WebSecurityConfiguration
java
apache__commons-lang
src/main/java/org/apache/commons/lang3/time/StopWatch.java
{ "start": 4298, "end": 4441 }
enum ____ { SPLIT, UNSPLIT } /** * Enumeration type which indicates the status of a StopWatch. */ private
SplitState
java
hibernate__hibernate-orm
hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/inheritance/single/ParentAuditing.java
{ "start": 760, "end": 2650 }
class ____ { private Integer id1; @BeforeClassTemplate public void initData(EntityManagerFactoryScope scope) { // Rev 1 scope.inTransaction( em -> { ParentEntity pe = new ParentEntity( "x" ); em.persist( pe ); id1 = pe.getId(); } ); // Rev 2 scope.inTransaction( em -> { ParentEntity pe = em.find( ParentEntity.class, id1 ); pe.setData( "y" ); } ); } @Test public void testRevisionsCounts(EntityManagerFactoryScope scope) { scope.inEntityManager( em -> { final var auditReader = AuditReaderFactory.get( em ); assertEquals( Arrays.asList( 1, 2 ), auditReader.getRevisions( ParentEntity.class, id1 ) ); } ); } @Test public void testHistoryOfChildId1(EntityManagerFactoryScope scope) { scope.inEntityManager( em -> { final var auditReader = AuditReaderFactory.get( em ); assertNull( auditReader.find( ChildEntity.class, id1, 1 ) ); assertNull( auditReader.find( ChildEntity.class, id1, 2 ) ); } ); } @Test public void testHistoryOfParentId1(EntityManagerFactoryScope scope) { ParentEntity ver1 = new ParentEntity( id1, "x" ); ParentEntity ver2 = new ParentEntity( id1, "y" ); scope.inEntityManager( em -> { final var auditReader = AuditReaderFactory.get( em ); assertEquals( ver1, auditReader.find( ParentEntity.class, id1, 1 ) ); assertEquals( ver2, auditReader.find( ParentEntity.class, id1, 2 ) ); } ); } @Test public void testPolymorphicQuery(EntityManagerFactoryScope scope) { ParentEntity parentVer1 = new ParentEntity( id1, "x" ); scope.inEntityManager( em -> { final var auditReader = AuditReaderFactory.get( em ); assertEquals( parentVer1, auditReader.createQuery().forEntitiesAtRevision( ParentEntity.class, 1 ).getSingleResult() ); assertEquals( 0, auditReader.createQuery().forEntitiesAtRevision( ChildEntity.class, 1 ).getResultList().size() ); } ); } }
ParentAuditing
java
spring-projects__spring-framework
spring-test/src/main/java/org/springframework/test/json/JsonAssert.java
{ "start": 2357, "end": 3361 }
class ____ implements JsonComparator { private final JSONComparator jsonAssertComparator; JsonAssertJsonComparator(JSONComparator jsonAssertComparator) { this.jsonAssertComparator = jsonAssertComparator; } JsonAssertJsonComparator(JSONCompareMode compareMode) { this(new DefaultComparator(compareMode)); } @Override public JsonComparison compare(@Nullable String expectedJson, @Nullable String actualJson) { if (actualJson == null) { return (expectedJson != null) ? JsonComparison.mismatch("Expected null JSON") : JsonComparison.match(); } if (expectedJson == null) { return JsonComparison.mismatch("Expected non-null JSON"); } try { JSONCompareResult result = JSONCompare.compareJSON(expectedJson, actualJson, this.jsonAssertComparator); return (!result.passed()) ? JsonComparison.mismatch(result.getMessage()) : JsonComparison.match(); } catch (JSONException ex) { throw new IllegalStateException(ex); } } } }
JsonAssertJsonComparator
java
reactor__reactor-core
reactor-core/src/main/java/reactor/core/publisher/FluxElapsed.java
{ "start": 1628, "end": 3728 }
class ____<T> implements InnerOperator<T, Tuple2<Long, T>>, QueueSubscription<Tuple2<Long, T>> { final CoreSubscriber<? super Tuple2<Long, T>> actual; final Scheduler scheduler; @SuppressWarnings("NotNullFieldNotInitialized") // s initialized in onSubscribe Subscription s; @Nullable QueueSubscription<T> qs; long lastTime; ElapsedSubscriber(CoreSubscriber<? super Tuple2<Long, T>> actual, Scheduler scheduler) { this.actual = actual; this.scheduler = scheduler; } @Override public @Nullable Object scanUnsafe(Attr key) { if (key == Attr.PARENT) return s; if (key == Attr.RUN_ON) return scheduler; if (key == Attr.RUN_STYLE) return Attr.RunStyle.SYNC; return InnerOperator.super.scanUnsafe(key); } @Override public void onSubscribe(Subscription s) { if (Operators.validate(this.s, s)) { lastTime = scheduler.now(TimeUnit.MILLISECONDS); this.s = s; actual.onSubscribe(this); } } @Override public CoreSubscriber<? super Tuple2<Long, T>> actual() { return actual; } @SuppressWarnings("DataFlowIssue") // fusion passes nulls via onNext @Override public void onNext(T t) { if(t == null){ actual.onNext(null); return; } actual.onNext(snapshot(t)); } @Override public void onError(Throwable t) { actual.onError(t); } @Override public void onComplete() { actual.onComplete(); } @Override public void request(long n) { s.request(n); } @Override public void cancel() { s.cancel(); } @Override public int requestFusion(int requestedMode) { QueueSubscription<T> qs = Operators.as(s); if (qs != null) { this.qs = qs; return qs.requestFusion(requestedMode); } return Fuseable.NONE; } Tuple2<Long, T> snapshot(T data){ long now = scheduler.now(TimeUnit.MILLISECONDS); long last = lastTime; lastTime = now; long delta = now - last; return Tuples.of(delta, data); } @Override public @Nullable Tuple2<Long, T> poll() { assert qs != null : "Queue
ElapsedSubscriber
java
spring-projects__spring-security
web/src/main/java/org/springframework/security/web/jackson2/DefaultSavedRequestMixin.java
{ "start": 1046, "end": 1257 }
class ____ serialize/deserialize {@link DefaultSavedRequest}. This mixin * use {@link org.springframework.security.web.savedrequest.DefaultSavedRequest.Builder} * to deserialized json.In order to use this mixin
to
java
apache__camel
components/camel-as2/camel-as2-component/src/main/java/org/apache/camel/component/as2/AS2Producer.java
{ "start": 1534, "end": 2471 }
class ____ extends AbstractApiProducer<AS2ApiName, AS2Configuration> { public AS2Producer(AS2Endpoint endpoint) { super(endpoint, AS2PropertiesHelper.getHelper(endpoint.getCamelContext())); } @Override public void interceptResult(Object methodResult, Exchange resultExchange) { HttpCoreContext context = (HttpCoreContext) methodResult; resultExchange.setProperty(AS2Constants.AS2_INTERCHANGE, context); HttpResponse response = context.getResponse(); if (response instanceof ClassicHttpResponse classicResponse) { HttpEntity entity = classicResponse.getEntity(); if (entity instanceof DispositionNotificationMultipartReportEntity || entity instanceof MultipartSignedEntity) { resultExchange.getMessage().setBody(entity); } else { resultExchange.getMessage().setBody(null); } } } }
AS2Producer
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/annotations/manytomany/Friend.java
{ "start": 574, "end": 1302 }
class ____ implements Serializable { private Integer id; private String name; private Set<Friend> friends; @Id @GeneratedValue public Integer getId() { return id; } public String getName() { return name; } public void setId(Integer integer) { id = integer; } public void setName(String string) { name = string; } @ManyToMany( cascade = {CascadeType.PERSIST, CascadeType.MERGE} ) @JoinTable( name = "FRIEND2FRIEND", joinColumns = {@JoinColumn(name = "FROM_FR", nullable = false)}, inverseJoinColumns = {@JoinColumn(name = "TO_FR", nullable = false)} ) public Set<Friend> getFriends() { return friends; } public void setFriends(Set<Friend> friend) { this.friends = friend; } }
Friend
java
assertj__assertj-core
assertj-core/src/test/java/org/assertj/core/api/instant/InstantAssert_isStrictlyBetween_Test.java
{ "start": 770, "end": 1261 }
class ____ extends org.assertj.core.api.InstantAssertBaseTest { private Instant before = now.minusSeconds(1); private Instant after = now.plusSeconds(1); @Override protected InstantAssert invoke_api_method() { return assertions.isStrictlyBetween(before, after); } @Override protected void verify_internal_effects() { verify(comparables).assertIsBetween(getInfo(assertions), getActual(assertions), before, after, false, false); } }
InstantAssert_isStrictlyBetween_Test
java
spring-projects__spring-boot
documentation/spring-boot-actuator-docs/src/test/java/org/springframework/boot/actuate/docs/env/EnvironmentEndpointDocumentationTests.java
{ "start": 2669, "end": 6305 }
class ____ extends MockMvcEndpointDocumentationTests { private static final FieldDescriptor activeProfiles = fieldWithPath("activeProfiles") .description("Names of the active profiles, if any."); private static final FieldDescriptor defaultProfiles = fieldWithPath("defaultProfiles") .description("Names of the default profiles, if any."); private static final FieldDescriptor propertySources = fieldWithPath("propertySources") .description("Property sources in order of precedence."); private static final FieldDescriptor propertySourceName = fieldWithPath("propertySources.[].name") .description("Name of the property source."); @Test void env() { assertThat(this.mvc.get().uri("/actuator/env")).hasStatusOk() .apply(document("env/all", preprocessResponse( replacePattern(Pattern.compile( "org/springframework/boot/actuate/autoconfigure/endpoint/web/documentation/"), ""), filterProperties()), responseFields(activeProfiles, defaultProfiles, propertySources, propertySourceName, fieldWithPath("propertySources.[].properties") .description("Properties in the property source keyed by property name."), fieldWithPath("propertySources.[].properties.*.value") .description("Value of the property."), fieldWithPath("propertySources.[].properties.*.origin") .description("Origin of the property, if any.") .optional()))); } @Test void singlePropertyFromEnv() { assertThat(this.mvc.get().uri("/actuator/env/com.example.cache.max-size")).hasStatusOk() .apply(document("env/single", preprocessResponse(replacePattern(Pattern .compile("org/springframework/boot/actuate/autoconfigure/endpoint/web/documentation/"), "")), responseFields( fieldWithPath("property").description("Property from the environment, if found.") .optional(), fieldWithPath("property.source").description("Name of the source of the property."), fieldWithPath("property.value").description("Value of the property."), activeProfiles, defaultProfiles, propertySources, propertySourceName, fieldWithPath("propertySources.[].property") .description("Property in the property source, if any.") .optional(), fieldWithPath("propertySources.[].property.value").description("Value of the property."), fieldWithPath("propertySources.[].property.origin") .description("Origin of the property, if any.") .optional()))); } private OperationPreprocessor filterProperties() { return new ContentModifyingOperationPreprocessor(this::filterProperties); } @SuppressWarnings("unchecked") private byte[] filterProperties(byte[] content, MediaType mediaType) { JsonMapper jsonMapper = JsonMapper.builder().enable(SerializationFeature.INDENT_OUTPUT).build(); Map<String, Object> payload = jsonMapper.readValue(content, Map.class); List<Map<String, Object>> propertySources = (List<Map<String, Object>>) payload.get("propertySources"); for (Map<String, Object> propertySource : propertySources) { Map<String, String> properties = (Map<String, String>) propertySource.get("properties"); Set<String> filteredKeys = properties.keySet() .stream() .filter(this::retainKey) .limit(3) .collect(Collectors.toSet()); properties.keySet().retainAll(filteredKeys); } return jsonMapper.writeValueAsBytes(payload); } private boolean retainKey(String key) { return key.startsWith("java.") || key.equals("JAVA_HOME") || key.startsWith("com.example."); } @Configuration(proxyBeanMethods = false) static
EnvironmentEndpointDocumentationTests
java
spring-projects__spring-boot
core/spring-boot/src/test/java/org/springframework/boot/convert/NumberToDurationConverterTests.java
{ "start": 1294, "end": 2994 }
class ____ { @ConversionServiceTest void convertWhenSimpleWithoutSuffixShouldReturnDuration(ConversionService conversionService) { assertThat(convert(conversionService, 10)).hasMillis(10); assertThat(convert(conversionService, +10)).hasMillis(10); assertThat(convert(conversionService, -10)).hasMillis(-10); } @ConversionServiceTest void convertWhenSimpleWithoutSuffixButWithAnnotationShouldReturnDuration(ConversionService conversionService) { assertThat(convert(conversionService, 10, ChronoUnit.SECONDS)).hasSeconds(10); assertThat(convert(conversionService, +10, ChronoUnit.SECONDS)).hasSeconds(10); assertThat(convert(conversionService, -10, ChronoUnit.SECONDS)).hasSeconds(-10); } private @Nullable Duration convert(ConversionService conversionService, Integer source) { return conversionService.convert(source, Duration.class); } @SuppressWarnings({ "rawtypes", "unchecked" }) private @Nullable Duration convert(ConversionService conversionService, Integer source, @Nullable ChronoUnit defaultUnit) { TypeDescriptor targetType = mock(TypeDescriptor.class); if (defaultUnit != null) { DurationUnit unitAnnotation = AnnotationUtils .synthesizeAnnotation(Collections.singletonMap("value", defaultUnit), DurationUnit.class, null); given(targetType.getAnnotation(DurationUnit.class)).willReturn(unitAnnotation); } given(targetType.getType()).willReturn((Class) Duration.class); return (Duration) conversionService.convert(source, TypeDescriptor.forObject(source), targetType); } static Stream<? extends Arguments> conversionServices() { return ConversionServiceArguments.with(new NumberToDurationConverter()); } }
NumberToDurationConverterTests
java
mybatis__mybatis-3
src/test/java/org/apache/ibatis/autoconstructor/ExtensiveSubject.java
{ "start": 701, "end": 1001 }
class ____ { private final byte aByte; private final short aShort; private final char aChar; private final int anInt; private final long aLong; private final float aFloat; private final double aDouble; private final boolean aBoolean; private final String aString; //
ExtensiveSubject
java
spring-projects__spring-framework
spring-context/src/test/java/org/springframework/context/event/EventPublicationInterceptorTests.java
{ "start": 4462, "end": 4776 }
class ____ extends TestApplicationListener implements FactoryBean<Object> { @Override public Object getObject() { return "test"; } @Override public Class<String> getObjectType() { return String.class; } @Override public boolean isSingleton() { return true; } } }
FactoryBeanTestListener
java
apache__spark
sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/VectorizedDeltaBinaryPackedReader.java
{ "start": 2220, "end": 11253 }
class ____ extends VectorizedReaderBase { // header data private int blockSizeInValues; private int miniBlockNumInABlock; private int totalValueCount; private long firstValue; private int miniBlockSizeInValues; // values read by the caller private int valuesRead = 0; // variables to keep state of the current block and miniblock private long lastValueRead; // needed to compute the next value private long minDeltaInCurrentBlock; // needed to compute the next value // currentMiniBlock keeps track of the mini block within the current block that // we read and decoded most recently. Only used as an index into // bitWidths array private int currentMiniBlock = 0; private int[] bitWidths; // bit widths for each miniBlock in the current block private int remainingInBlock = 0; // values in current block still to be read private int remainingInMiniBlock = 0; // values in current mini block still to be read private long[] unpackedValuesBuffer; private ByteBufferInputStream in; // temporary buffers used by readByte, readShort, readInteger, and readLong private byte byteVal; private short shortVal; private int intVal; private long longVal; @Override public void initFromPage(int valueCount, ByteBufferInputStream in) throws IOException { JavaUtils.checkArgument(valueCount >= 1, "Page must have at least one value, but it has " + valueCount); this.in = in; // Read the header this.blockSizeInValues = BytesUtils.readUnsignedVarInt(in); this.miniBlockNumInABlock = BytesUtils.readUnsignedVarInt(in); double miniSize = (double) blockSizeInValues / miniBlockNumInABlock; JavaUtils.checkArgument(miniSize % 8 == 0, "miniBlockSize must be multiple of 8, but it's " + miniSize); this.miniBlockSizeInValues = (int) miniSize; // True value count. May be less than valueCount because of nulls this.totalValueCount = BytesUtils.readUnsignedVarInt(in); this.bitWidths = new int[miniBlockNumInABlock]; this.unpackedValuesBuffer = new long[miniBlockSizeInValues]; // read the first value firstValue = BytesUtils.readZigZagVarLong(in); } // True value count. May be less than valueCount because of nulls int getTotalValueCount() { return totalValueCount; } @Override public byte readByte() { readValues(1, null, 0, (w, r, v) -> byteVal = (byte) v); return byteVal; } @Override public short readShort() { readValues(1, null, 0, (w, r, v) -> shortVal = (short) v); return shortVal; } @Override public int readInteger() { readValues(1, null, 0, (w, r, v) -> intVal = (int) v); return intVal; } @Override public long readLong() { readValues(1, null, 0, (w, r, v) -> longVal = v); return longVal; } @Override public void readBytes(int total, WritableColumnVector c, int rowId) { readValues(total, c, rowId, (w, r, v) -> w.putByte(r, (byte) v)); } @Override public void readShorts(int total, WritableColumnVector c, int rowId) { readValues(total, c, rowId, (w, r, v) -> w.putShort(r, (short) v)); } @Override public void readIntegers(int total, WritableColumnVector c, int rowId) { readValues(total, c, rowId, (w, r, v) -> w.putInt(r, (int) v)); } // Based on VectorizedPlainValuesReader.readIntegersWithRebase @Override public final void readIntegersWithRebase( int total, WritableColumnVector c, int rowId, boolean failIfRebase) { readValues(total, c, rowId, (w, r, v) -> { if (v < RebaseDateTime.lastSwitchJulianDay()) { if (failIfRebase) { throw DataSourceUtils.newRebaseExceptionInRead("Parquet"); } else { w.putInt(r, RebaseDateTime.rebaseJulianToGregorianDays((int) v)); } } else { w.putInt(r, (int) v); } }); } @Override public void readUnsignedIntegers(int total, WritableColumnVector c, int rowId) { readValues(total, c, rowId, (w, r, v) -> { w.putLong(r, Integer.toUnsignedLong((int) v)); }); } @Override public void readUnsignedLongs(int total, WritableColumnVector c, int rowId) { readValues(total, c, rowId, (w, r, v) -> { w.putByteArray(r, new BigInteger(Long.toUnsignedString(v)).toByteArray()); }); } @Override public void readLongs(int total, WritableColumnVector c, int rowId) { readValues(total, c, rowId, WritableColumnVector::putLong); } @Override public final void readLongsWithRebase( int total, WritableColumnVector c, int rowId, boolean failIfRebase, String timeZone) { readValues(total, c, rowId, (w, r, v) -> { if (v < RebaseDateTime.lastSwitchJulianTs()) { if (failIfRebase) { throw DataSourceUtils.newRebaseExceptionInRead("Parquet"); } else { w.putLong(r, RebaseDateTime.rebaseJulianToGregorianMicros(timeZone, v)); } } else { w.putLong(r, v); } }); } @Override public void skipBytes(int total) { skipValues(total); } @Override public void skipShorts(int total) { skipValues(total); } @Override public void skipIntegers(int total) { skipValues(total); } @Override public void skipLongs(int total) { skipValues(total); } private void readValues(int total, WritableColumnVector c, int rowId, IntegerOutputWriter outputWriter) { if (valuesRead + total > totalValueCount) { throw new ParquetDecodingException( "No more values to read. Total values read: " + valuesRead + ", total count: " + totalValueCount + ", trying to read " + total + " more."); } int remaining = total; // First value if (valuesRead == 0) { outputWriter.write(c, rowId, firstValue); lastValueRead = firstValue; rowId++; remaining--; } while (remaining > 0) { int n; try { n = loadMiniBlockToOutput(remaining, c, rowId, outputWriter); } catch (IOException e) { throw new ParquetDecodingException("Error reading mini block.", e); } rowId += n; remaining -= n; } valuesRead = total - remaining; } /** * Read from a mini block. Read at most 'remaining' values into output. * * @return the number of values read into output */ private int loadMiniBlockToOutput(int remaining, WritableColumnVector c, int rowId, IntegerOutputWriter outputWriter) throws IOException { // new block; read the block header if (remainingInBlock == 0) { readBlockHeader(); } // new miniblock, unpack the miniblock if (remainingInMiniBlock == 0) { unpackMiniBlock(); } // read values from miniblock int valuesRead = 0; for (int i = miniBlockSizeInValues - remainingInMiniBlock; i < miniBlockSizeInValues && valuesRead < remaining; i++) { // calculate values from deltas unpacked for current block long outValue = lastValueRead + minDeltaInCurrentBlock + unpackedValuesBuffer[i]; lastValueRead = outValue; outputWriter.write(c, rowId + valuesRead, outValue); remainingInBlock--; remainingInMiniBlock--; valuesRead++; } return valuesRead; } private void readBlockHeader() { try { minDeltaInCurrentBlock = BytesUtils.readZigZagVarLong(in); } catch (IOException e) { throw new ParquetDecodingException("Can not read min delta in current block", e); } readBitWidthsForMiniBlocks(); remainingInBlock = blockSizeInValues; currentMiniBlock = 0; remainingInMiniBlock = 0; } /** * mini block has a size of 8*n, unpack 32 value each time * * see org.apache.parquet.column.values.delta.DeltaBinaryPackingValuesReader#unpackMiniBlock */ private void unpackMiniBlock() throws IOException { Arrays.fill(this.unpackedValuesBuffer, 0); BytePackerForLong packer = Packer.LITTLE_ENDIAN.newBytePackerForLong( bitWidths[currentMiniBlock]); for (int j = 0; j < miniBlockSizeInValues; j += 8) { ByteBuffer buffer = in.slice(packer.getBitWidth()); if (buffer.hasArray()) { packer.unpack8Values(buffer.array(), buffer.arrayOffset() + buffer.position(), unpackedValuesBuffer, j); } else { packer.unpack8Values(buffer, buffer.position(), unpackedValuesBuffer, j); } } remainingInMiniBlock = miniBlockSizeInValues; currentMiniBlock++; } // From org.apache.parquet.column.values.delta.DeltaBinaryPackingValuesReader private void readBitWidthsForMiniBlocks() { for (int i = 0; i < miniBlockNumInABlock; i++) { try { bitWidths[i] = BytesUtils.readIntLittleEndianOnOneByte(in); } catch (IOException e) { throw new ParquetDecodingException("Can not decode bitwidth in block header", e); } } } private void skipValues(int total) { // Read the values but don't write them out (the writer output method is a no-op) readValues(total, null, -1, (w, r, v) -> {}); } }
VectorizedDeltaBinaryPackedReader
java
apache__camel
components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/primary/CustomerImpl.java
{ "start": 852, "end": 1068 }
class ____ implements Customer { private final String name; public CustomerImpl(String name) { this.name = name; } @Override public String name() { return name; } }
CustomerImpl
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueue.java
{ "start": 1890, "end": 15419 }
interface ____ { void boundsChanged(int topSlot) throws IOException; } // the slot for the current candidate private static final int CANDIDATE_SLOT = Integer.MAX_VALUE; private final BigArrays bigArrays; private final int maxSize; private final Map<Slot, Integer> map; private final SingleDimensionValuesSource<?>[] arrays; private final CompetitiveBoundsChangedListener competitiveBoundsChangedListener; private LongArray docCounts; private boolean afterKeyIsSet = false; /** * Constructs a composite queue with the specified size and sources. * * @param sources The list of {@link CompositeValuesSourceConfig} to build the composite buckets. * @param size The number of composite buckets to keep. * @param indexReader */ CompositeValuesCollectorQueue(BigArrays bigArrays, SingleDimensionValuesSource<?>[] sources, int size, IndexReader indexReader) { super(size, bigArrays); this.bigArrays = bigArrays; this.maxSize = size; this.arrays = sources; boolean success = false; try { // If the leading source is a GlobalOrdinalValuesSource we can apply an optimization which requires // tracking the highest competitive value. if (arrays[0] instanceof GlobalOrdinalValuesSource globalOrdinalValuesSource) { if (shouldApplyGlobalOrdinalDynamicPruningForLeadingSource(sources, size, indexReader)) { competitiveBoundsChangedListener = globalOrdinalValuesSource::updateHighestCompetitiveValue; } else { competitiveBoundsChangedListener = null; } } else { competitiveBoundsChangedListener = null; } this.map = Maps.newMapWithExpectedSize(size); this.docCounts = bigArrays.newLongArray(1, false); success = true; } finally { if (success == false) { super.close(); } } } private static boolean shouldApplyGlobalOrdinalDynamicPruningForLeadingSource( SingleDimensionValuesSource<?>[] sources, int size, IndexReader indexReader ) { if (sources.length == 0) { return false; } if (sources[0] instanceof GlobalOrdinalValuesSource firstSource) { if (firstSource.mayDynamicallyPrune(indexReader) == false) { return false; } long approximateTotalNumberOfBuckets = firstSource.getUniqueValueCount(); if (sources.length > 1) { // When there are multiple sources, it's hard to guess how many // unique buckets there might be. Let's be conservative and // assume that other sources increase the number of buckets by // 3x. approximateTotalNumberOfBuckets *= 3L; } // If the size is not significantly less than the total number of // buckets then dynamic pruning can't help much. if (size >= approximateTotalNumberOfBuckets / 8) { return false; } // Try to estimate the width of the ordinal range that might be // returned on each page. Since not all ordinals might match the // query, we're increasing `size` by 25%. long rangeWidthPerPage = size + (size / 4); if (sources.length > 1) { // Again assume other sources bump the number of buckets by 3x rangeWidthPerPage /= 3; } if (rangeWidthPerPage > GlobalOrdinalValuesSource.MAX_TERMS_FOR_DYNAMIC_PRUNING) { return false; } return true; } return false; } /** * Return true if this queue produces a {@link LeafBucketCollector} that may * dynamically prune hits that are not competitive. */ public boolean mayDynamicallyPrune() { return competitiveBoundsChangedListener != null; } /** * Sets after key * @param afterKey composite key */ public void setAfterKey(CompositeKey afterKey) { assert afterKey.size() == arrays.length; afterKeyIsSet = true; for (int i = 0; i < afterKey.size(); i++) { try { arrays[i].setAfter(afterKey.get(i)); } catch (IllegalArgumentException ex) { throw new IllegalArgumentException("incompatible value in the position " + i + ": " + ex.getMessage(), ex); } } } @Override protected boolean lessThan(Integer a, Integer b) { return compare(a, b) > 0; } /** * Whether the queue is full or not. */ boolean isFull() { return size() >= maxSize; } /** * Compares the current candidate with the values in the queue and returns * the slot if the candidate is already in the queue or null if the candidate is not present. */ Integer compareCurrent() { return map.get(new Slot(CANDIDATE_SLOT)); } /** * Returns the lowest value (exclusive) of the leading source. */ Comparable<?> getLowerValueLeadSource() { return afterKeyIsSet ? arrays[0].getAfter() : null; } /** * Returns the upper value (inclusive) of the leading source. */ Comparable<?> getUpperValueLeadSource() throws IOException { return size() >= maxSize ? arrays[0].toComparable(top()) : null; } /** * Returns the document count in <code>slot</code>. */ long getDocCount(int slot) { return docCounts.get(slot); } /** * Copies the current value in <code>slot</code>. */ private void copyCurrent(int slot, long value) { for (SingleDimensionValuesSource<?> array : arrays) { array.copyCurrent(slot); } docCounts = bigArrays.grow(docCounts, slot + 1); docCounts.set(slot, value); } /** * Compares the values in <code>slot1</code> with the values in <code>slot2</code>. */ int compare(int slot1, int slot2) { assert slot2 != CANDIDATE_SLOT; for (int i = 0; i < arrays.length; i++) { final int cmp; if (slot1 == CANDIDATE_SLOT) { cmp = arrays[i].compareCurrent(slot2); } else { cmp = arrays[i].compare(slot1, slot2); } if (cmp != 0) { return cmp > 0 ? i + 1 : -(i + 1); } } return 0; } /** * Returns true if the values in <code>slot1</code> are equals to the value in <code>slot2</code>. */ boolean equals(int slot1, int slot2) { assert slot2 != CANDIDATE_SLOT; for (SingleDimensionValuesSource<?> array : arrays) { final int cmp; if (slot1 == CANDIDATE_SLOT) { cmp = array.compareCurrent(slot2); } else { cmp = array.compare(slot1, slot2); } if (cmp != 0) { return false; } } return true; } /** * Returns a hash code value for the values in <code>slot</code>. */ int hashCode(int slot) { int result = 1; for (SingleDimensionValuesSource<?> array : arrays) { result = 31 * result + (slot == CANDIDATE_SLOT ? array.hashCodeCurrent() : array.hashCode(slot)); } return result; } /** * Compares the after values with the values in <code>slot</code>. */ private int compareCurrentWithAfter() { for (int i = 0; i < arrays.length; i++) { int cmp = arrays[i].compareCurrentWithAfter(); if (cmp != 0) { return cmp > 0 ? i + 1 : -(i + 1); } } return 0; } /** * Builds the {@link CompositeKey} for <code>slot</code>. */ CompositeKey toCompositeKey(int slot) throws IOException { assert slot < maxSize; Comparable<?>[] values = new Comparable<?>[arrays.length]; for (int i = 0; i < values.length; i++) { values[i] = arrays[i].toComparable(slot); } return new CompositeKey(values); } /** * Creates the collector that will visit the composite buckets of the matching documents. * The provided collector <code>in</code> is called on each composite bucket. */ LeafBucketCollector getLeafCollector(LeafReaderContext context, LeafBucketCollector in) throws IOException { LeafBucketCollector leafBucketCollector = getLeafCollector(null, context, in); // As we are starting to collect from a new segment we need to update the topChangedListener if present // and if the queue is full. if (competitiveBoundsChangedListener != null && size() >= maxSize) { competitiveBoundsChangedListener.boundsChanged(top()); } return leafBucketCollector; } /** * Creates the collector that will visit the composite buckets of the matching documents. * If <code>forceLeadSourceValue</code> is not null, the leading source will use this value * for each document. * The provided collector <code>in</code> is called on each composite bucket. */ LeafBucketCollector getLeafCollector(Comparable<?> forceLeadSourceValue, LeafReaderContext context, LeafBucketCollector in) throws IOException { int last = arrays.length - 1; LeafBucketCollector collector = in; while (last > 0) { collector = arrays[last--].getLeafCollector(context, collector); } if (forceLeadSourceValue != null) { collector = arrays[last].getLeafCollector(forciblyCast(forceLeadSourceValue), context, collector); } else { collector = arrays[last].getLeafCollector(context, collector); } return collector; } /** * Check if the current candidate should be added in the queue. * @return <code>true</code> if the candidate is competitive (added or already in the queue). */ boolean addIfCompetitive(long inc) throws IOException { return addIfCompetitive(0, inc); } /** * Add or update the current composite key in the queue if the values are competitive. * * @param indexSortSourcePrefix 0 if the index sort is null or doesn't match any of the sources field, * a value greater than 0 indicates the prefix len of the sources that match the index sort * and a negative value indicates that the index sort match the source field but the order is reversed. * @return <code>true</code> if the candidate is competitive (added or already in the queue). * * @throws CollectionTerminatedException if the current collection can be terminated early due to index sorting. */ boolean addIfCompetitive(int indexSortSourcePrefix, long inc) throws IOException { // checks if the candidate key is competitive Integer topSlot = compareCurrent(); if (topSlot != null) { // this key is already in the top N, skip it docCounts.increment(topSlot, inc); return true; } if (afterKeyIsSet) { int cmp = compareCurrentWithAfter(); if (cmp <= 0) { if (indexSortSourcePrefix < 0 && cmp == indexSortSourcePrefix) { // the leading index sort is in the reverse order of the leading source // so we can early terminate when we reach a document that is smaller // than the after key (collected on a previous page). throw new CollectionTerminatedException(); } // key was collected on a previous page, skip it (>= afterKey). return false; } } if (size() >= maxSize) { // the tree map is full, check if the candidate key should be kept int cmp = compare(CANDIDATE_SLOT, top()); if (cmp > 0) { if (cmp <= indexSortSourcePrefix) { // index sort guarantees that there is no key greater or equal than the // current one in the subsequent documents so we can early terminate. throw new CollectionTerminatedException(); } // the candidate key is not competitive, skip it. return false; } } // the candidate key is competitive final int newSlot; if (size() >= maxSize) { // the queue is full, we replace the last key with this candidate int slot = pop(); map.remove(new Slot(slot)); // and we recycle the deleted slot newSlot = slot; } else { newSlot = (int) size(); } // move the candidate key to its new slot copyCurrent(newSlot, inc); map.put(new Slot(newSlot), newSlot); add(newSlot); if (competitiveBoundsChangedListener != null && size() >= maxSize) { competitiveBoundsChangedListener.boundsChanged(top()); } return true; } @Override protected void doClose() { Releasables.close(docCounts); } }
CompetitiveBoundsChangedListener
java
alibaba__fastjson
src/test/java/com/alibaba/json/bvt/issue_1300/Issue1371.java
{ "start": 489, "end": 1734 }
enum ____{ A, B, C, D ,E ; } public void testFastjsonEnum(){ Map<Rooms, Rooms> enumMap = new TreeMap<Rooms, Rooms>(); enumMap.put(Rooms.C, Rooms.D); enumMap.put(Rooms.E, Rooms.A); Assert.assertEquals(JSON.toJSONString(enumMap, SerializerFeature.WriteNonStringKeyAsString), "{\"C\":\"D\",\"E\":\"A\"}"); } // public void testParsed(){ // // String oldStyleJson = "{1:'abc', 2:'cde'}"; // // Gson gson = new Gson(); // // Map fromJson = gson.fromJson(oldStyleJson, Map.class); // // Assert.assertNull(fromJson.get(1)); // // Assert.assertEquals(fromJson.get("1"), "abc" ); // // Map parsed = JSON.parseObject(oldStyleJson, Map.class, Feature.IgnoreAutoType, Feature.DisableFieldSmartMatch); // // // Assert.assertNull(parsed.get(1)); // // Assert.assertEquals(parsed.get("1"), "abc" ); // // } // // public void testParsed_jackson() throws Exception { // // String oldStyleJson = "{1:\"abc\", 2:\"cde\"}"; // // ObjectMapper objectMapper = new ObjectMapper(); // Map fromJson = objectMapper.readValue(oldStyleJson, Map.class); // Assert.assertNull(fromJson.get(1)); // } }
Rooms
java
quarkusio__quarkus
extensions/resteasy-classic/resteasy-common/spi/src/main/java/io/quarkus/resteasy/common/spi/ResteasyDotNames.java
{ "start": 5530, "end": 6609 }
class ____ implements Predicate<MethodInfo> { @Override public boolean test(MethodInfo methodInfo) { return methodInfo.hasAnnotation(JSON_IGNORE) || methodInfo.hasAnnotation(JSONB_TRANSIENT) || methodInfo.hasAnnotation(XML_TRANSIENT); } } // Types ignored for reflection used by the RESTEasy and SmallRye REST client extensions. private static final Set<DotName> TYPES_IGNORED_FOR_REFLECTION = new HashSet<>(List.of( // Consider adding packages below instead if it makes more sense )); private static final String[] PACKAGES_IGNORED_FOR_REFLECTION = { // JSON-P "jakarta.json.", "jakarta.json.", // Jackson "com.fasterxml.jackson.databind.", // JAX-RS "jakarta.ws.rs.", // RESTEasy "org.jboss.resteasy.", // Vert.x JSON layer "io.vertx.core.json.", // Mutiny "io.smallrye.mutiny." }; }
IgnoreMethodForReflectionPredicate
java
apache__flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/DataTypeQueryable.java
{ "start": 957, "end": 1017 }
class ____ implements this interfaces. */ @Internal public
that
java
apache__logging-log4j2
log4j-core/src/main/java/org/apache/logging/log4j/core/lookup/StrSubstitutor.java
{ "start": 40579, "end": 51912 }
class ____ not need to call this method. This method will * be called automatically by another (public) method. * </p> * <p> * Writers of subclasses can override this method if they need access to * the substitution process at the start or end. * </p> * * @param event The current LogEvent, if there is one. * @param buf the string builder to substitute into, not null * @param offset the start offset within the builder, must be valid * @param length the length within the builder to be processed, must be valid * @return true if altered */ protected boolean substitute(final LogEvent event, final StringBuilder buf, final int offset, final int length) { return substitute(event, buf, offset, length, null) > 0; } /** * Recursive handler for multiple levels of interpolation. This is the main * interpolation method, which resolves the values of all variable references * contained in the passed in text. * * @param event The current LogEvent, if there is one. * @param buf the string builder to substitute into, not null * @param offset the start offset within the builder, must be valid * @param length the length within the builder to be processed, must be valid * @param priorVariables the stack keeping track of the replaced variables, may be null * @return the length change that occurs, unless priorVariables is null when the int * represents a boolean flag as to whether any change occurred. */ private int substitute( final LogEvent event, final StringBuilder buf, final int offset, final int length, List<String> priorVariables) { final StrMatcher prefixMatcher = getVariablePrefixMatcher(); final StrMatcher suffixMatcher = getVariableSuffixMatcher(); final char escape = getEscapeChar(); final StrMatcher valueDelimiterMatcher = getValueDelimiterMatcher(); final boolean substitutionInVariablesEnabled = isEnableSubstitutionInVariables(); final boolean top = priorVariables == null; boolean altered = false; int lengthChange = 0; char[] chars = getChars(buf); int bufEnd = offset + length; int pos = offset; while (pos < bufEnd) { final int startMatchLen = prefixMatcher.isMatch(chars, pos, offset, bufEnd); if (startMatchLen == 0) { pos++; } else // found variable start marker if (pos > offset && chars[pos - 1] == escape) { // escaped buf.deleteCharAt(pos - 1); chars = getChars(buf); lengthChange--; altered = true; bufEnd--; } else { // find suffix final int startPos = pos; pos += startMatchLen; int endMatchLen = 0; int nestedVarCount = 0; while (pos < bufEnd) { if (substitutionInVariablesEnabled && (endMatchLen = prefixMatcher.isMatch(chars, pos, offset, bufEnd)) != 0) { // found a nested variable start nestedVarCount++; pos += endMatchLen; continue; } endMatchLen = suffixMatcher.isMatch(chars, pos, offset, bufEnd); if (endMatchLen == 0) { pos++; } else { // found variable end marker if (nestedVarCount == 0) { String varNameExpr = new String(chars, startPos + startMatchLen, pos - startPos - startMatchLen); if (substitutionInVariablesEnabled) { // initialize priorVariables if they're not already set if (priorVariables == null) { priorVariables = new ArrayList<>(); } final StringBuilder bufName = new StringBuilder(varNameExpr); substitute(event, bufName, 0, bufName.length(), priorVariables); varNameExpr = bufName.toString(); } pos += endMatchLen; final int endPos = pos; String varName = varNameExpr; String varDefaultValue = null; if (valueDelimiterMatcher != null) { final char[] varNameExprChars = varNameExpr.toCharArray(); int valueDelimiterMatchLen = 0; for (int i = 0; i < varNameExprChars.length; i++) { // if there's any nested variable when nested variable substitution disabled, then // stop resolving name and default value. if (!substitutionInVariablesEnabled && prefixMatcher.isMatch(varNameExprChars, i, i, varNameExprChars.length) != 0) { break; } if (valueEscapeDelimiterMatcher != null) { final int matchLen = valueEscapeDelimiterMatcher.isMatch(varNameExprChars, i); if (matchLen != 0) { final String varNamePrefix = varNameExpr.substring(0, i) + Interpolator.PREFIX_SEPARATOR; varName = varNamePrefix + varNameExpr.substring(i + matchLen - 1); for (int j = i + matchLen; j < varNameExprChars.length; ++j) { if ((valueDelimiterMatchLen = valueDelimiterMatcher.isMatch(varNameExprChars, j)) != 0) { varName = varNamePrefix + varNameExpr.substring(i + matchLen, j); varDefaultValue = varNameExpr.substring(j + valueDelimiterMatchLen); break; } } break; } else if ((valueDelimiterMatchLen = valueDelimiterMatcher.isMatch(varNameExprChars, i)) != 0) { varName = varNameExpr.substring(0, i); varDefaultValue = varNameExpr.substring(i + valueDelimiterMatchLen); break; } } else if ((valueDelimiterMatchLen = valueDelimiterMatcher.isMatch(varNameExprChars, i)) != 0) { varName = varNameExpr.substring(0, i); varDefaultValue = varNameExpr.substring(i + valueDelimiterMatchLen); break; } } } // on the first call initialize priorVariables if (priorVariables == null) { priorVariables = new ArrayList<>(); priorVariables.add(new String(chars, offset, length + lengthChange)); } // handle cyclic substitution final boolean isCyclic = isCyclicSubstitution(varName, priorVariables); // resolve the variable final LookupResult resolvedResult = isCyclic ? null : resolveVariable(event, varName, buf, startPos, endPos); String varValue = resolvedResult == null ? null : resolvedResult.value(); if (varValue == null) { varValue = varDefaultValue; } if (varValue != null) { // recursive replace final int varLen = varValue.length(); buf.replace(startPos, endPos, varValue); altered = true; int change = resolvedResult != null && resolvedResult.isLookupEvaluationAllowedInValue() ? substitute(event, buf, startPos, varLen, priorVariables) : 0; change = change + (varLen - (endPos - startPos)); pos += change; bufEnd += change; lengthChange += change; chars = getChars(buf); // in case buffer was altered } // remove variable from the cyclic stack if (!isCyclic) { priorVariables.remove(priorVariables.size() - 1); } break; } nestedVarCount--; pos += endMatchLen; } } } } if (top) { return altered ? 1 : 0; } return lengthChange; } /** * Checks if the specified variable is already in the stack (list) of variables, adding the value * if it's not already present. * * @param varName the variable name to check * @param priorVariables the list of prior variables * @return true if this is a cyclic substitution */ private boolean isCyclicSubstitution(final String varName, final List<String> priorVariables) { if (!priorVariables.contains(varName)) { priorVariables.add(varName); return false; } final StringBuilder buf = new StringBuilder(BUF_SIZE); buf.append("Infinite loop in property interpolation of "); appendWithSeparators(buf, priorVariables, "->"); StatusLogger.getLogger().warn(buf); return true; } /** * Internal method that resolves the value of a variable. * <p> * Most users of this
do
java
apache__hadoop
hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsConstants.java
{ "start": 8114, "end": 8203 }
enum ____ { QUERY, FINALIZE; } // type of the datanode report public
UpgradeAction
java
quarkusio__quarkus
extensions/smallrye-reactive-messaging/deployment/src/main/java/io/quarkus/smallrye/reactivemessaging/deployment/WiringHelper.java
{ "start": 3841, "end": 3953 }
class ____ an outbound (outgoing) connector. * * @param ci the class * @return {@code true} if the
is
java
quarkusio__quarkus
devtools/project-core-extension-codestarts/src/main/resources/codestarts/quarkus/examples/funqy-amazon-lambda-example/java/src/test/java/org/acme/funqy/FunqyTest.java
{ "start": 221, "end": 769 }
class ____ { @Test void testFunqyLambda() throws Exception { // you test your lambdas by invoking on http://localhost:8081 // this works in dev mode too Person in = new Person(); in.setName("Bill"); given() .contentType("application/json") .accept("application/json") .body(in) .when() .post() .then() .statusCode(200) .body(containsString("Hello Bill")); } }
FunqyTest
java
micronaut-projects__micronaut-core
inject-java/src/test/java/io/micronaut/aop/ByteBuddyRuntimeProxy.java
{ "start": 7275, "end": 7358 }
interface ____ { Object call(Object[] args) throws Exception; } }
SuperCall
java
elastic__elasticsearch
x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/fst/Util.java
{ "start": 1599, "end": 3517 }
class ____ { private Util() {} /** Looks up the output for this input, or null if the input is not accepted. */ public static <T> T get(FST<T> fst, IntsRef input) throws IOException { // TODO: would be nice not to alloc this on every lookup final Arc<T> arc = fst.getFirstArc(new Arc<>()); final BytesReader fstReader = fst.getBytesReader(); // Accumulate output as we go T output = fst.outputs.getNoOutput(); for (int i = 0; i < input.length; i++) { if (fst.findTargetArc(input.ints[input.offset + i], arc, arc, fstReader) == null) { return null; } output = fst.outputs.add(output, arc.output()); } if (arc.isFinal()) { return fst.outputs.add(output, arc.nextFinalOutput()); } else { return null; } } // TODO: maybe a CharsRef version for BYTE2 /** Looks up the output for this input, or null if the input is not accepted */ public static <T> T get(FST<T> fst, BytesRef input) throws IOException { assert fst.inputType == FST.INPUT_TYPE.BYTE1; final BytesReader fstReader = fst.getBytesReader(); // TODO: would be nice not to alloc this on every lookup final Arc<T> arc = fst.getFirstArc(new Arc<>()); // Accumulate output as we go T output = fst.outputs.getNoOutput(); for (int i = 0; i < input.length; i++) { if (fst.findTargetArc(input.bytes[i + input.offset] & 0xFF, arc, arc, fstReader) == null) { return null; } output = fst.outputs.add(output, arc.output()); } if (arc.isFinal()) { return fst.outputs.add(output, arc.nextFinalOutput()); } else { return null; } } /** * Represents a path in TopNSearcher. * */ public static
Util
java
apache__camel
core/camel-api/src/main/java/org/apache/camel/health/HealthCheckHelper.java
{ "start": 1594, "end": 14907 }
class ____ { private HealthCheckHelper() { } /** * Invokes all the checks and returns a collection of results. */ public static Collection<HealthCheck.Result> invoke(CamelContext camelContext) { return invoke(camelContext, check -> Map.of(HealthCheck.CHECK_KIND, HealthCheck.Kind.ALL), check -> false, null); } /** * Invokes all the checks and returns a collection of results. * * @param camelContext the camel context * @param exposureLevel level of exposure (full, oneline or default) */ public static Collection<HealthCheck.Result> invoke(CamelContext camelContext, String exposureLevel) { return invoke(camelContext, check -> Map.of(HealthCheck.CHECK_KIND, HealthCheck.Kind.ALL), check -> false, exposureLevel); } /** * Invokes the readiness checks and returns a collection of results. */ public static Collection<HealthCheck.Result> invokeReadiness(CamelContext camelContext) { return invoke(camelContext, check -> Map.of(HealthCheck.CHECK_KIND, HealthCheck.Kind.READINESS), check -> !check.isReadiness(), null); } /** * Invokes the readiness checks and returns a collection of results. * * @param camelContext the camel context * @param exposureLevel level of exposure (full, oneline or default) */ public static Collection<HealthCheck.Result> invokeReadiness(CamelContext camelContext, String exposureLevel) { return invoke(camelContext, check -> Map.of(HealthCheck.CHECK_KIND, HealthCheck.Kind.READINESS), check -> !check.isReadiness(), exposureLevel); } /** * Invokes the liveness checks and returns a collection of results. */ public static Collection<HealthCheck.Result> invokeLiveness(CamelContext camelContext) { return invoke(camelContext, check -> Map.of(HealthCheck.CHECK_KIND, HealthCheck.Kind.LIVENESS), check -> !check.isLiveness(), null); } /** * Invokes the liveness checks and returns a collection of results. * * @param camelContext the camel context * @param exposureLevel level of exposure (full, oneline or default) */ public static Collection<HealthCheck.Result> invokeLiveness(CamelContext camelContext, String exposureLevel) { return invoke(camelContext, check -> Map.of(HealthCheck.CHECK_KIND, HealthCheck.Kind.LIVENESS), check -> !check.isLiveness(), exposureLevel); } /** * Invokes the checks and returns a collection of results. */ public static Collection<HealthCheck.Result> invoke( CamelContext camelContext, Function<HealthCheck, Map<String, Object>> optionsSupplier) { return invoke(camelContext, optionsSupplier, check -> false, null); } /** * Invokes the checks and returns a collection of results. */ public static Collection<HealthCheck.Result> invoke( CamelContext camelContext, Predicate<HealthCheck> filter) { return invoke(camelContext, check -> Collections.emptyMap(), filter, null); } /** * Invokes the checks and returns a collection of results. * * @param camelContext the camel context. * @param optionsSupplier a supplier for options. * @param filter filter to exclude some checks. * @param exposureLevel full or oneline (null to use default) */ public static Collection<HealthCheck.Result> invoke( CamelContext camelContext, Function<HealthCheck, Map<String, Object>> optionsSupplier, Predicate<HealthCheck> filter, String exposureLevel) { final HealthCheckRegistry registry = HealthCheckRegistry.get(camelContext); if (registry != null) { Collection<HealthCheck.Result> result = registry.stream() .collect(Collectors.groupingBy(HealthCheckHelper::getGroup)) .values().stream() .flatMap(Collection::stream) .filter(check -> !registry.isExcluded(check) && !filter.test(check)) .sorted(Comparator.comparingInt(HealthCheck::getOrder)) .distinct() .map(check -> check.call(optionsSupplier.apply(check))) .toList(); if (result.isEmpty()) { return Collections.emptyList(); } if (exposureLevel == null) { exposureLevel = registry.getExposureLevel(); } // the result includes all the details if ("full".equals(exposureLevel)) { return result; } else { // are there any downs? Collection<HealthCheck.Result> downs = result.stream().filter(r -> r.getState().equals(HealthCheck.State.DOWN)) .collect(Collectors.toCollection(ArrayList::new)); // default mode is to either be just UP or include all DOWNs // oneline mode is either UP or DOWN if (!downs.isEmpty()) { if ("oneline".equals(exposureLevel)) { // grab first down return Collections.singleton(downs.iterator().next()); } else { return downs; } } else { // all up so grab first HealthCheck.Result up = result.iterator().next(); return Collections.singleton(up); } } } return Collections.emptyList(); } /** * Invoke a check by id. * * @param camelContext the camel context. * @param id the check id. * @param options the check options. * @return an optional {@link HealthCheck.Result}. */ public static Optional<HealthCheck.Result> invoke(CamelContext camelContext, String id, Map<String, Object> options) { final HealthCheckRegistry registry = HealthCheckRegistry.get(camelContext); if (registry != null) { return registry.getCheck(id).map(check -> check.call(options)); } return Optional.empty(); } /** * Gets the {@link HealthCheckRegistry}. * * @param context the camel context * @return the health check registry, or <tt>null</tt> if health-check is not enabled. */ public static HealthCheckRegistry getHealthCheckRegistry(CamelContext context) { return context.getCamelContextExtension().getContextPlugin(HealthCheckRegistry.class); } /** * Gets the {@link HealthCheck} by the given id (will resolve from classpath if necessary) * * @param context the camel context * @param id the id of the health check * @return the health check, or <tt>null</tt> if no health check exists with this id */ public static HealthCheck getHealthCheck(CamelContext context, String id) { HealthCheck answer = null; HealthCheckRegistry hcr = context.getCamelContextExtension().getContextPlugin(HealthCheckRegistry.class); if (hcr != null && hcr.isEnabled()) { Optional<HealthCheck> check = hcr.getCheck(id); if (check.isEmpty()) { // use resolver to load from classpath if needed HealthCheckResolver resolver = context.getCamelContextExtension().getContextPlugin(HealthCheckResolver.class); HealthCheck hc = resolver.resolveHealthCheck(id); if (hc != null) { check = Optional.of(hc); hcr.register(hc); } } if (check.isPresent()) { answer = check.get(); } } return answer; } /** * Gets the {@link HealthCheck} by the given id (will resolve from classpath if necessary) * * @param context the camel context * @param id the id of the health check * @param type the expected type of the health check repository * @return the health check, or <tt>null</tt> if no health check exists with this id */ public static <T extends HealthCheck> T getHealthCheck(CamelContext context, String id, Class<T> type) { HealthCheck answer = getHealthCheck(context, id); if (answer != null) { return type.cast(answer); } return null; } /** * Gets the {@link HealthCheckRepository} by the given id (will resolve from classpath if necessary) * * @param context the camel context * @param id the id of the health check repository * @return the health check repository, or <tt>null</tt> if no health check repository exists with this id */ public static HealthCheckRepository getHealthCheckRepository(CamelContext context, String id) { HealthCheckRepository answer = null; HealthCheckRegistry hcr = context.getCamelContextExtension().getContextPlugin(HealthCheckRegistry.class); if (hcr != null && hcr.isEnabled()) { Optional<HealthCheckRepository> repo = hcr.getRepository(id); if (repo.isEmpty()) { // use resolver to load from classpath if needed HealthCheckResolver resolver = context.getCamelContextExtension().getContextPlugin(HealthCheckResolver.class); HealthCheckRepository hr = resolver.resolveHealthCheckRepository(id); if (hr != null) { repo = Optional.of(hr); hcr.register(hr); } } if (repo.isPresent()) { answer = repo.get(); } } return answer; } /** * Gets the {@link HealthCheckRepository} by the given id (will resolve from classpath if necessary) * * @param context the camel context * @param id the id of the health check repository * @param type the expected type of the health check repository * @return the health check repository, or <tt>null</tt> if no health check repository exists with this id */ public static <T extends HealthCheckRepository> T getHealthCheckRepository(CamelContext context, String id, Class<T> type) { HealthCheckRepository answer = getHealthCheckRepository(context, id); if (answer != null) { return type.cast(answer); } return null; } /** * Checks the overall status of the results. * * @param results the results from the invoked health checks * @param readiness readiness or liveness mode * @return true if up, or false if down */ public static boolean isResultsUp(Collection<HealthCheck.Result> results, boolean readiness) { boolean up; if (readiness) { // readiness requires that all are UP up = results.stream().allMatch(r -> r.getState().equals(HealthCheck.State.UP)); } else { // liveness will fail if there is any down up = results.stream().noneMatch(r -> r.getState().equals(HealthCheck.State.DOWN)); } return up; } /** * Get the group of the given check or an empty string if the group is not set. * * @param check the health check * @return the {@link HealthCheck#getGroup()} or an empty string if it is <code>null</code> */ private static String getGroup(HealthCheck check) { return ObjectHelper.supplyIfEmpty(check.getGroup(), () -> ""); } /** * Is the given key a reserved key used by Camel to store metadata in health check response details. * * @param key the key * @return true if reserved, false otherwise */ public static boolean isReservedKey(String key) { if (key == null) { return false; } if (HealthCheck.CHECK_ID.equals(key)) { return true; } else if (HealthCheck.CHECK_GROUP.equals(key)) { return true; } else if (HealthCheck.CHECK_KIND.equals(key)) { return true; } else if (HealthCheck.CHECK_ENABLED.equals(key)) { return true; } else if (HealthCheck.INVOCATION_COUNT.equals(key)) { return true; } else if (HealthCheck.INVOCATION_TIME.equals(key)) { return true; } else if (HealthCheck.FAILURE_COUNT.equals(key)) { return true; } else if (HealthCheck.FAILURE_START_TIME.equals(key)) { return true; } else if (HealthCheck.FAILURE_TIME.equals(key)) { return true; } else if (HealthCheck.FAILURE_ERROR_COUNT.equals(key)) { return true; } else if (HealthCheck.SUCCESS_COUNT.equals(key)) { return true; } else if (HealthCheck.SUCCESS_START_TIME.equals(key)) { return true; } else if (HealthCheck.SUCCESS_TIME.equals(key)) { return true; } return false; } }
HealthCheckHelper
java
reactor__reactor-core
reactor-core/src/test/java/reactor/core/scheduler/ExecutorSchedulerTest.java
{ "start": 7253, "end": 7700 }
class ____ implements Executor, Scannable { @Override public void execute(@NonNull Runnable command) { command.run(); } @Override public @Nullable Object scanUnsafe(Attr key) { if (key == Attr.CAPACITY) return 123; if (key == Attr.BUFFERED) return 1024; if (key == Attr.NAME) return toString(); return null; } @Override public String toString() { return "scannableExecutor"; } } static final
ScannableExecutor
java
apache__flink
flink-core/src/main/java/org/apache/flink/util/InstantiationUtil.java
{ "start": 13983, "end": 14299 }
class ____ a public nullary constructor, false if not. */ public static boolean hasPublicNullaryConstructor(Class<?> clazz) { return Arrays.stream(clazz.getConstructors()) .anyMatch(constructor -> constructor.getParameterCount() == 0); } /** * Checks, whether the given
has
java
apache__kafka
clients/src/main/java/org/apache/kafka/clients/admin/DeleteConsumerGroupsOptions.java
{ "start": 957, "end": 1049 }
class ____ extends AbstractOptions<DeleteConsumerGroupsOptions> { }
DeleteConsumerGroupsOptions
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/softdelete/SoftDeleteMappedColumnTest.java
{ "start": 838, "end": 2296 }
class ____ { @Test public void testValid() { try (final SessionFactory sf = buildSessionFactory( ValidEntity.class )) { sf.inTransaction( session -> { final ValidEntity validEntity = new ValidEntity( 1L, "valid1" ); session.persist( validEntity ); session.flush(); assertThat( validEntity.isDeleted() ).isFalse(); session.remove( validEntity ); } ); sf.inSession( session -> assertThat( session.find( ValidEntity.class, 1L ) ).isNull() ); } } @Test public void testInvalid() { try (final SessionFactory sf = buildSessionFactory( InvalidEntity.class )) { sf.inTransaction( session -> { final InvalidEntity entity = new InvalidEntity( 2L, "invalid2" ); session.persist( entity ); } ); fail( "Duplicate soft-delete column should fail" ); } catch (Exception e) { assertThat( e ).isInstanceOf( MappingException.class ); assertThat( e.getMessage() ).contains( "Column 'is_deleted' is duplicated" ); } } private SessionFactory buildSessionFactory(Class<?> entityClass) { final Configuration cfg = new Configuration() .setProperty( AvailableSettings.JAKARTA_HBM2DDL_DATABASE_ACTION, Action.ACTION_CREATE_THEN_DROP ) .addAnnotatedClass( entityClass ); ServiceRegistryUtil.applySettings( cfg.getStandardServiceRegistryBuilder() ); return cfg.buildSessionFactory(); } @Entity( name = "ValidEntity" ) @SoftDelete( columnName = "is_deleted" ) public static
SoftDeleteMappedColumnTest
java
hibernate__hibernate-orm
tooling/metamodel-generator/src/main/java/org/hibernate/processor/annotation/CDITypeMetaAttribute.java
{ "start": 298, "end": 2115 }
class ____ implements MetaAttribute { private AnnotationMetaEntity annotationMetaEntity; private String typeName; private Object superTypeName; public CDITypeMetaAttribute(AnnotationMetaEntity annotationMetaEntity, String className, String superTypeName) { this.annotationMetaEntity = annotationMetaEntity; this.superTypeName = superTypeName; this.typeName = className; } @Override public boolean hasTypedAttribute() { return true; } @Override public boolean hasStringAttribute() { return false; } @Override public String getAttributeDeclarationString() { final StringBuilder declaration = new StringBuilder(); modifiers( declaration ); preamble( declaration ); closingBrace( declaration ); return declaration.toString(); } void closingBrace(StringBuilder declaration) { declaration.append("}"); } void preamble(StringBuilder declaration) { declaration .append("class ") .append(typeName) .append(" implements ") .append( superTypeName ); declaration .append(" {\n"); } @Override public String getAttributeNameDeclarationString() { return ""; } @Override public String getMetaType() { throw new UnsupportedOperationException("operation not supported"); } @Override public String getPropertyName() { return ""; } @Override public String getTypeDeclaration() { return ""; } void modifiers(StringBuilder declaration) { annotationMetaEntity.importType("jakarta.annotation.Generated"); annotationMetaEntity.importType("jakarta.enterprise.context.Dependent"); declaration .append("\n@Dependent\n") .append("@Generated(\""+HibernateProcessor.class.getName()+"\")\n"); declaration .append("public static "); } @Override public Metamodel getHostingEntity() { return annotationMetaEntity; } }
CDITypeMetaAttribute
java
apache__camel
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/VertxEndpointBuilderFactory.java
{ "start": 16025, "end": 16337 }
class ____ extends AbstractEndpointBuilder implements VertxEndpointBuilder, AdvancedVertxEndpointBuilder { public VertxEndpointBuilderImpl(String path) { super(componentName, path); } } return new VertxEndpointBuilderImpl(path); } }
VertxEndpointBuilderImpl
java
spring-projects__spring-framework
spring-core/src/main/java/org/springframework/core/DecoratingProxy.java
{ "start": 1564, "end": 1825 }
class ____ this proxy. * <p>In case of an AOP proxy, this will be the ultimate target class, * not just the immediate target (in case of multiple nested proxies). * @return the decorated class (never {@code null}) */ Class<?> getDecoratedClass(); }
behind
java
apache__camel
core/camel-support/src/main/java/org/apache/camel/throttling/ThrottlingExceptionRoutePolicy.java
{ "start": 3143, "end": 4572 }
class ____ extends RoutePolicySupport implements CamelContextAware, RouteAware { private static final Logger LOG = LoggerFactory.getLogger(ThrottlingExceptionRoutePolicy.class); private static final int STATE_CLOSED = 0; private static final int STATE_HALF_OPEN = 1; private static final int STATE_OPEN = 2; private CamelContext camelContext; private Route route; private final Lock lock = new ReentrantLock(); private CamelLogger stateLogger; // configuration @Metadata(description = "How many failed messages within the window would trigger the circuit breaker to open", defaultValue = "50") private int failureThreshold = 50; @Metadata(description = "Sliding window for how long time to go back (in millis) when counting number of failures", defaultValue = "60000") private long failureWindow = 60000; @Metadata(description = "Interval (in millis) for how often to check whether a currently open circuit breaker may work again", defaultValue = "30000") private long halfOpenAfter = 30000; @Metadata(description = "Whether to always keep the circuit breaker open (never closes). This is only intended for development and testing purposes.") private boolean keepOpen; @Metadata(description = "Allows to only throttle based on certain types of exceptions. Multiple exceptions (use FQN
ThrottlingExceptionRoutePolicy
java
elastic__elasticsearch
x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Attribute.java
{ "start": 1893, "end": 1960 }
class ____ extends NamedExpression { /** * A wrapper
Attribute
java
micronaut-projects__micronaut-core
inject-groovy/src/main/groovy/io/micronaut/ast/groovy/visitor/GroovyNativeElement.java
{ "start": 1228, "end": 1348 }
class ____ */ record Class(ClassNode annotatedNode) implements GroovyNativeElement { } /** * The
node
java
apache__flink
flink-tests/src/test/java/org/apache/flink/test/state/operator/restore/keyed/KeyedJob.java
{ "start": 6565, "end": 8567 }
class ____ extends RichWindowFunction<Tuple2<Integer, Integer>, Integer, Tuple, GlobalWindow> { private static final long serialVersionUID = -7236313076792964055L; private final ExecutionMode mode; private transient ListState<Integer> state; private boolean applyCalled = false; private StatefulWindowFunction(ExecutionMode mode) { this.mode = mode; } @Override public void open(OpenContext openContext) { this.state = getRuntimeContext() .getListState(new ListStateDescriptor<>("values", Integer.class)); } @Override public void apply( Tuple key, GlobalWindow window, Iterable<Tuple2<Integer, Integer>> values, Collector<Integer> out) throws Exception { // fail-safe to make sure apply is actually called applyCalled = true; switch (mode) { case GENERATE: for (Tuple2<Integer, Integer> value : values) { state.add(value.f1); } break; case MIGRATE: case RESTORE: Iterator<Tuple2<Integer, Integer>> input = values.iterator(); Iterator<Integer> restored = state.get().iterator(); while (input.hasNext() && restored.hasNext()) { Tuple2<Integer, Integer> value = input.next(); Integer rValue = restored.next(); Assert.assertEquals(rValue, value.f1); } Assert.assertEquals(restored.hasNext(), input.hasNext()); } } @Override public void close() { Assert.assertTrue("Apply was never called.", applyCalled); } } private static
StatefulWindowFunction
java
apache__flink
flink-connectors/flink-connector-base/src/main/java/org/apache/flink/connector/base/source/reader/fetcher/SplitFetcherTask.java
{ "start": 959, "end": 1066 }
interface ____ to {@link Runnable} but allows throwing exceptions and wakeup. */ @PublicEvolving public
similar
java
spring-projects__spring-boot
module/spring-boot-jersey/src/main/java/org/springframework/boot/jersey/autoconfigure/JerseyProperties.java
{ "start": 2187, "end": 2405 }
class ____ { /** * Jersey filter chain order. */ private int order; public int getOrder() { return this.order; } public void setOrder(int order) { this.order = order; } } public static
Filter
java
apache__logging-log4j2
log4j-api/src/main/java/org/apache/logging/log4j/util/Strings.java
{ "start": 1022, "end": 1175 }
class ____.</em> * * @see <a href="https://commons.apache.org/proper/commons-lang/index.html">Apache Commons Lang</a> */ @InternalApi public final
private
java
alibaba__druid
core/src/test/java/com/alibaba/druid/bvt/sql/db2/DB2SelectTest_2.java
{ "start": 1085, "end": 2794 }
class ____ extends DB2Test { public void test_0() throws Exception { String sql = "SELECT CTRYNUM, FMS_INSTANCE_CD FROM DBEFMSDR.FMSA_O_WW_CTRY_AG WHERE ACCT_YR=? WITH UR"; DB2StatementParser parser = new DB2StatementParser(sql); List<SQLStatement> statementList = parser.parseStatementList(); SQLStatement stmt = statementList.get(0); print(statementList); assertEquals(1, statementList.size()); DB2SchemaStatVisitor visitor = new DB2SchemaStatVisitor(); stmt.accept(visitor); // System.out.println("Tables : " + visitor.getTables()); // System.out.println("fields : " + visitor.getColumns()); // System.out.println("coditions : " + visitor.getConditions()); // System.out.println("orderBy : " + visitor.getOrderByColumns()); assertEquals(1, visitor.getTables().size()); assertEquals(3, visitor.getColumns().size()); assertEquals(1, visitor.getConditions().size()); assertTrue(visitor.getTables().containsKey(new TableStat.Name("DBEFMSDR.FMSA_O_WW_CTRY_AG"))); assertTrue(visitor.getColumns().contains(new Column("DBEFMSDR.FMSA_O_WW_CTRY_AG", "CTRYNUM"))); // assertTrue(visitor.getColumns().contains(new Column("mytable", "first_name"))); // assertTrue(visitor.getColumns().contains(new Column("mytable", "full_name"))); String output = SQLUtils.toSQLString(stmt, JdbcConstants.DB2); assertEquals("SELECT CTRYNUM, FMS_INSTANCE_CD" + "\nFROM DBEFMSDR.FMSA_O_WW_CTRY_AG"// + "\nWHERE ACCT_YR = ?"// + "\nWITH UR", // output); } }
DB2SelectTest_2
java
alibaba__druid
core/src/test/java/com/alibaba/druid/bvt/sql/mysql/param/MySqlParameterizedOutputVisitorTest_52_schema.java
{ "start": 513, "end": 1915 }
class ____ extends TestCase { public void test_for_parameterize() throws Exception { final DbType dbType = JdbcConstants.MYSQL; String sql = "UPDATE `buyer_0158`.xc_yy_order_1082\n" + "SET `ALIPAY_BUYER_ID` = 1025, `GMT_MODIFIED` = 'aaaa'\n" + "WHERE `PAY_ORDER_ID` = 'kkk'"; SQLStatementParser parser = SQLParserUtils.createSQLStatementParser(sql, dbType); List<SQLStatement> stmtList = parser.parseStatementList(); SQLStatement statement = stmtList.get(0); StringBuilder out = new StringBuilder(); SQLASTOutputVisitor visitor = SQLUtils.createOutputVisitor(out, JdbcConstants.MYSQL); List<Object> parameters = new ArrayList<Object>(); visitor.setParameterized(true); visitor.setParameterizedMergeInList(true); visitor.setParameters(parameters); /*visitor.setPrettyFormat(false);*/ statement.accept(visitor); /* JSONArray array = new JSONArray(); for(String table : visitor.getTables()){ array.add(table.replaceAll("`","")); }*/ String psql = out.toString(); System.out.println(psql); assertEquals("UPDATE buyer.xc_yy_order\n" + "SET `ALIPAY_BUYER_ID` = ?, `GMT_MODIFIED` = ?\n" + "WHERE `PAY_ORDER_ID` = ?", psql); } }
MySqlParameterizedOutputVisitorTest_52_schema
java
quarkusio__quarkus
independent-projects/arc/processor/src/main/java/io/quarkus/arc/processor/ContextInstancesGenerator.java
{ "start": 1121, "end": 16344 }
class ____ extends AbstractGenerator { static final String CONTEXT_INSTANCES_SUFFIX = "_ContextInstances"; private final BeanDeployment beanDeployment; private final Map<DotName, String> scopeToGeneratedName; public ContextInstancesGenerator(boolean generateSources, ReflectionRegistration reflectionRegistration, BeanDeployment beanDeployment, Map<DotName, String> scopeToGeneratedName) { super(generateSources, reflectionRegistration); this.beanDeployment = beanDeployment; this.scopeToGeneratedName = scopeToGeneratedName; } void precomputeGeneratedName(DotName scope) { String generatedName = DEFAULT_PACKAGE + "." + beanDeployment.name + UNDERSCORE + scope.toString().replace(".", UNDERSCORE) + CONTEXT_INSTANCES_SUFFIX; scopeToGeneratedName.put(scope, generatedName); } Collection<Resource> generate(DotName scope) { ResourceClassOutput classOutput = new ResourceClassOutput(true, generateSources); Gizmo gizmo = gizmo(classOutput); createContextInstances(gizmo, scope); return classOutput.getResources(); } private void createContextInstances(Gizmo gizmo, DotName scope) { String generatedName = scopeToGeneratedName.get(scope); reflectionRegistration.registerMethod(generatedName, Methods.INIT); List<BeanInfo> beans = new BeanStream(beanDeployment.getBeans()).withScope(scope).collect(); MethodDesc newUpdater = MethodDesc.of(AtomicReferenceFieldUpdater.class, "newUpdater", AtomicReferenceFieldUpdater.class, Class.class, Class.class, String.class); gizmo.class_(generatedName, cc -> { cc.implements_(ContextInstances.class); Map<String, BeanFields> beanFields = new TreeMap<>(); int fieldIndex = 0; // We need to iterate the beans in order for the field names to be deterministic for (BeanInfo bean : orderedBeans(beans)) { String beanIdx = "" + fieldIndex++; // add these fields for each bean: // - `private volatile ContextInstanceHandle h<idx>` // - `private volatile Lock l<idx>` // - `private static final AtomicReferenceFieldUpdater<ContextInstances, ContextInstanceHandle> L<idx>_UPDATER` FieldDesc handleField = cc.field("h" + beanIdx, fc -> { fc.private_(); fc.volatile_(); fc.setType(ContextInstanceHandle.class); }); FieldDesc lockField = cc.field("l" + beanIdx, fc -> { fc.private_(); fc.volatile_(); fc.setType(Lock.class); }); StaticFieldVar lockUpdaterField = cc.staticField("L" + beanIdx + "_UPDATER", fc -> { fc.private_(); fc.final_(); fc.setType(AtomicReferenceFieldUpdater.class); fc.setInitializer(bc -> { bc.yield(bc.invokeStatic(newUpdater, Const.of(cc.type()), Const.of(Lock.class), Const.of("l" + beanIdx))); }); }); beanFields.put(bean.getIdentifier(), new BeanFields(handleField, lockField, lockUpdaterField)); } cc.defaultConstructor(); Map<String, MethodDesc> lazyLocks = generateLazyLocks(cc, beanFields); generateComputeIfAbsent(cc, beanFields, lazyLocks); generateGetIfPresent(cc, beanFields); generateGetAllPresent(cc, beanFields); List<MethodDesc> remove = generateRemove(cc, beanFields, lazyLocks); generateRemoveEach(cc, remove); }); } private Map<String, MethodDesc> generateLazyLocks(ClassCreator cc, Map<String, BeanFields> beanFields) { MethodDesc updaterCas = MethodDesc.of(AtomicReferenceFieldUpdater.class, "compareAndSet", boolean.class, Object.class, Object.class, Object.class); Map<String, MethodDesc> result = new HashMap<>(beanFields.size()); for (Map.Entry<String, BeanFields> namedFields : beanFields.entrySet()) { String beanId = namedFields.getKey(); BeanFields fields = namedFields.getValue(); // private Lock lazyl<idx>() { // if (this.l<idx> != null) { // return this.l<idx>; // } // Lock newLock = new ReentrantLock(); // if (L<idx>_UPDATER.compareAndSet(this, null, newLock)) { // return newLock; // } // return this.l<idx>; // } MethodDesc desc = cc.method("lazy" + fields.lock().name(), mc -> { mc.private_(); mc.returning(Lock.class); mc.body(b0 -> { FieldVar lock = cc.this_().field(fields.lock()); b0.ifNotNull(lock, b1 -> { b1.return_(lock); }); LocalVar newLock = b0.localVar("newLock", b0.new_(ReentrantLock.class)); Expr casResult = b0.invokeVirtual(updaterCas, fields.lockUpdater(), cc.this_(), Const.ofNull(Lock.class), newLock); b0.if_(casResult, b1 -> { b1.return_(newLock); }); b0.return_(lock); }); }); result.put(beanId, desc); } return result; } private void generateComputeIfAbsent(ClassCreator cc, Map<String, BeanFields> beanFields, Map<String, MethodDesc> lazyLocks) { Map<String, MethodDesc> computeMethodsByBean = new HashMap<>(); for (Map.Entry<String, BeanFields> idToFields : beanFields.entrySet()) { String beanId = idToFields.getKey(); BeanFields fields = idToFields.getValue(); // There is a separate compute method for every bean instance field MethodDesc desc = cc.method("c" + fields.instance().name(), mc -> { mc.returning(ContextInstanceHandle.class); ParamVar supplier = mc.parameter("supplier", Supplier.class); mc.body(b0 -> { // ContextInstanceHandle<?> copy = this.h<idx>; // if (copy != null) { // return copy; // } // Lock lock = lazyl<idx>(); // lock.lock(); // try { // copy = this.h<idx>; // if (copy != null) { // return copy; // } // copy = supplier.get(); // this.h<idx> = copy; // return copy; // } finally { // lock.unlock(); // } LocalVar copy = b0.localVar("copy", cc.this_().field(fields.instance())); b0.ifNotNull(copy, b1 -> { b1.return_(copy); }); LocalVar lock = b0.localVar("lock", b0.invokeVirtual(lazyLocks.get(beanId), cc.this_())); b0.locked(lock, b1 -> { b1.set(copy, cc.this_().field(fields.instance())); b1.ifNotNull(copy, b2 -> { b2.return_(copy); }); b1.set(copy, b1.invokeInterface(MethodDescs.SUPPLIER_GET, supplier)); b1.set(cc.this_().field(fields.instance()), copy); b1.return_(copy); }); }); }); computeMethodsByBean.put(beanId, desc); } cc.method("computeIfAbsent", mc -> { mc.returning(ContextInstanceHandle.class); ParamVar rtBeanId = mc.parameter("beanId", String.class); ParamVar supplier = mc.parameter("supplier", Supplier.class); mc.body(b0 -> { b0.return_(b0.switch_(ContextInstanceHandle.class, rtBeanId, sc -> { for (String btBeanId : beanFields.keySet()) { sc.caseOf(btBeanId, b1 -> { b1.return_(b1.invokeVirtual(computeMethodsByBean.get(btBeanId), cc.this_(), supplier)); }); } sc.default_(b1 -> { b1.throw_(IllegalArgumentException.class, "Unknown bean identifier"); }); })); }); }); } private void generateGetIfPresent(ClassCreator cc, Map<String, BeanFields> beanFields) { cc.method("getIfPresent", mc -> { mc.returning(ContextInstanceHandle.class); ParamVar rtBeanId = mc.parameter("beanId", String.class); mc.body(b0 -> { b0.return_(b0.switch_(ContextInstanceHandle.class, rtBeanId, sc -> { for (Map.Entry<String, BeanFields> idToFields : beanFields.entrySet()) { String btBeanId = idToFields.getKey(); BeanFields fields = idToFields.getValue(); sc.caseOf(btBeanId, b1 -> { b1.yield(cc.this_().field(fields.instance())); }); } sc.default_(b1 -> { b1.throw_(IllegalArgumentException.class, "Unknown bean identifier"); }); })); }); }); } private void generateGetAllPresent(ClassCreator cc, Map<String, BeanFields> beanFields) { cc.method("getAllPresent", mc -> { mc.returning(Set.class); mc.body(b0 -> { // ContextInstanceHandle<?> h<idx> = this.h<idx>; // Set<ContextInstanceHandle<?>> result = new HashSet<>(); // if (h<idx> != null) { // result.add(h<idx>); // } // return result; List<LocalVar> handles = new ArrayList<>(beanFields.size()); for (BeanFields fields : beanFields.values()) { handles.add(b0.localVar(cc.this_().field(fields.instance))); } LocalVar result = b0.localVar("result", b0.new_(HashSet.class)); for (LocalVar handle : handles) { b0.ifNotNull(handle, b1 -> { b1.withSet(result).add(handle); }); } b0.return_(result); }); }); } private List<MethodDesc> generateRemove(ClassCreator cc, Map<String, BeanFields> beanFields, Map<String, MethodDesc> lazyLocks) { // There is a separate remove method for every instance handle field // To eliminate large stack map table in the bytecode List<MethodDesc> removeMethods = new ArrayList<>(beanFields.size()); Map<String, MethodDesc> removeMethodsByBean = new HashMap<>(); for (Map.Entry<String, BeanFields> idToFields : beanFields.entrySet()) { String beanId = idToFields.getKey(); BeanFields fields = idToFields.getValue(); FieldDesc instanceField = fields.instance; MethodDesc desc = cc.method("r" + instanceField.name(), mc -> { mc.returning(ContextInstanceHandle.class); mc.body(b0 -> { // ContextInstanceHandle<?> copy = this.h<idx>; // if (copy == null) { // return null; // } // Lock lock = lazyl<idx>(); // lock.lock(); // try { // copy = this.h<idx>; // this.h<idx> = null; // } finally { // lock.unlock(); // } // return copy; LocalVar copy = b0.localVar("copy", cc.this_().field(instanceField)); b0.ifNull(copy, b1 -> { b1.return_(Const.ofNull(ContextInstanceHandle.class)); }); LocalVar lock = b0.localVar("lock", b0.invokeVirtual(lazyLocks.get(beanId), cc.this_())); b0.locked(lock, b1 -> { b1.set(copy, cc.this_().field(instanceField)); b1.set(cc.this_().field(instanceField), Const.ofNull(ContextInstanceHandle.class)); }); b0.return_(copy); }); }); removeMethods.add(desc); removeMethodsByBean.put(beanId, desc); } cc.method("remove", mc -> { mc.returning(ContextInstanceHandle.class); ParamVar rtBeanId = mc.parameter("beanId", String.class); mc.body(b0 -> { b0.return_(b0.switch_(ContextInstanceHandle.class, rtBeanId, sc -> { for (String btBeanId : beanFields.keySet()) { sc.caseOf(btBeanId, b1 -> { b1.return_(b1.invokeVirtual(removeMethodsByBean.get(btBeanId), cc.this_())); }); } sc.default_(b1 -> { b1.throw_(IllegalArgumentException.class, "Unknown bean identifier"); }); })); }); }); return removeMethods; } private void generateRemoveEach(ClassCreator cc, List<MethodDesc> removeInstances) { cc.method("removeEach", mc -> { mc.returning(void.class); ParamVar action = mc.parameter("action", Consumer.class); mc.body(b0 -> { // ContextInstanceHandle<?> copy<idx> = rh<idx>(); // if (action != null) // if (copy<idx> != null) { // action.accept(copy<idx>); // } // } int counter = 0; List<LocalVar> results = new ArrayList<>(removeInstances.size()); for (MethodDesc removeInstance : removeInstances) { // invoke remove method for every instance handle field results.add(b0.localVar("copy" + counter, b0.invokeVirtual(removeInstance, cc.this_()))); counter++; } b0.ifNotNull(action, b1 -> { for (LocalVar result : results) { b1.ifNotNull(result, b2 -> { b2.invokeInterface(MethodDescs.CONSUMER_ACCEPT, action, result); }); } }); b0.return_(); }); }); } record BeanFields(FieldDesc instance, FieldDesc lock, StaticFieldVar lockUpdater) { } }
ContextInstancesGenerator
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/engine/internal/EntityEntryContext.java
{ "start": 26059, "end": 26794 }
class ____ implements EntityEntryCrossRef { private final Object entity; private EntityEntry entityEntry; private EntityEntryCrossRefImpl(Object entity, EntityEntry entityEntry) { this.entity = entity; this.entityEntry = entityEntry; } @Override public Object getEntity() { return entity; } @Override public EntityEntry getEntityEntry() { return entityEntry; } @Override public Object getKey() { return getEntity(); } @Override public EntityEntry getValue() { return getEntityEntry(); } @Override public EntityEntry setValue(EntityEntry entityEntry) { final EntityEntry old = this.entityEntry; this.entityEntry = entityEntry; return old; } } }
EntityEntryCrossRefImpl
java
apache__avro
lang/java/avro/src/main/java/org/apache/avro/util/NonCopyingByteArrayOutputStream.java
{ "start": 1036, "end": 1776 }
class ____ extends ByteArrayOutputStream { /** * Creates a new byte array output stream, with a buffer capacity of the * specified size, in bytes. * * @param size the initial size * @throws IllegalArgumentException if size is negative */ public NonCopyingByteArrayOutputStream(int size) { super(size); } /** * Get the contents of this ByteArrayOutputStream wrapped as a ByteBuffer. This * is a shallow copy. Changes to this ByteArrayOutputstream "write through" to * the ByteBuffer. * * @return The contents of this ByteArrayOutputstream wrapped as a ByteBuffer */ public ByteBuffer asByteBuffer() { return ByteBuffer.wrap(super.buf, 0, super.count); } }
NonCopyingByteArrayOutputStream
java
google__guice
core/test/com/google/inject/spi/FailingElementVisitor.java
{ "start": 674, "end": 843 }
class ____ extends DefaultElementVisitor<Void> { @Override protected Void visitOther(Element element) { throw new AssertionFailedError(); } }
FailingElementVisitor
java
elastic__elasticsearch
x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownFilterAndLimitIntoUnionAllTests.java
{ "start": 2646, "end": 69265 }
class ____ extends AbstractLogicalPlanOptimizerTests { @Before public void checkSubqueryInFromCommandSupport() { assumeTrue("Requires subquery in FROM command support", EsqlCapabilities.Cap.SUBQUERY_IN_FROM_COMMAND.isEnabled()); } /* *Limit[1000[INTEGER],false,false] * \_UnionAll[[_meta_field{r}#45, emp_no{r}#46, first_name{r}#47, gender{r}#48, hire_date{r}#49, job{r}#50, job.raw{r}#51, * languages{r}#52, last_name{r}#53, long_noidx{r}#54, salary{r}#55, language_code{r}#56, language_name{r}#57]] * |_EsqlProject[[_meta_field{f}#12, emp_no{f}#6, first_name{f}#7, gender{f}#8, hire_date{f}#13, job{f}#14, job.raw{f}#15, * languages{f}#9, last_name{f}#10, long_noidx{f}#16, salary{f}#11, language_code{r}#30, * language_name{r}#31]] * | \_Eval[[null[INTEGER] AS language_code#30, null[KEYWORD] AS language_name#31]] * | \_Limit[1000[INTEGER],false,false] * | \_Filter[emp_no{f}#6 > 10000[INTEGER]] * | \_EsRelation[test][_meta_field{f}#12, emp_no{f}#6, first_name{f}#7, ge..] * |_EsqlProject[[_meta_field{f}#23, emp_no{f}#17, first_name{f}#18, gender{f}#19, hire_date{f}#24, job{f}#25, job.raw{f}#26, * languages{f}#20, last_name{f}#21, long_noidx{f}#27, salary{f}#22, language_code{r}#32, * language_name{r}#33]] * | \_Eval[[null[INTEGER] AS language_code#32, null[KEYWORD] AS language_name#33]] * | \_Subquery[] * | \_Limit[1000[INTEGER],false,false] * | \_Filter[languages{f}#20 > 0[INTEGER] AND emp_no{f}#17 > 10000[INTEGER]] * | \_EsRelation[test][_meta_field{f}#23, emp_no{f}#17, first_name{f}#18, ..] * \_LocalRelation[[_meta_field{r}#34, emp_no{r}#35, first_name{r}#36, gender{r}#37, hire_date{r}#38, job{r}#39, job.raw{r}#40, * languages{r}#41, last_name{r}#42, long_noidx{r}#43, salary{r}#44, language_code{f}#28, * language_name{f}#29],EMPTY] */ public void testPushDownSimpleFilterPastUnionAll() { var plan = planSubquery(""" FROM test, (FROM test | WHERE languages > 0), (FROM languages | WHERE language_code > 0) | WHERE emp_no > 10000 """); Limit limit = as(plan, Limit.class); UnionAll unionAll = as(limit.child(), UnionAll.class); assertEquals(3, unionAll.children().size()); EsqlProject child1 = as(unionAll.children().get(0), EsqlProject.class); Eval eval = as(child1.child(), Eval.class); Limit childLimit = as(eval.child(), Limit.class); Filter childFilter = as(childLimit.child(), Filter.class); GreaterThan greaterThan = as(childFilter.condition(), GreaterThan.class); FieldAttribute empNo = as(greaterThan.left(), FieldAttribute.class); assertEquals("emp_no", empNo.name()); Literal right = as(greaterThan.right(), Literal.class); assertEquals(10000, right.value()); EsRelation relation = as(childFilter.child(), EsRelation.class); assertEquals("test", relation.indexPattern()); EsqlProject child2 = as(unionAll.children().get(1), EsqlProject.class); eval = as(child2.child(), Eval.class); Subquery subquery = as(eval.child(), Subquery.class); childLimit = as(subquery.child(), Limit.class); childFilter = as(childLimit.child(), Filter.class); And and = as(childFilter.condition(), And.class); greaterThan = as(and.left(), GreaterThan.class); empNo = as(greaterThan.left(), FieldAttribute.class); assertEquals("languages", empNo.name()); right = as(greaterThan.right(), Literal.class); assertEquals(0, right.value()); greaterThan = as(and.right(), GreaterThan.class); empNo = as(greaterThan.left(), FieldAttribute.class); assertEquals("emp_no", empNo.name()); right = as(greaterThan.right(), Literal.class); assertEquals(10000, right.value()); relation = as(childFilter.child(), EsRelation.class); assertEquals("test", relation.indexPattern()); LocalRelation localRelation = as(unionAll.children().get(2), LocalRelation.class); } /* *Limit[1000[INTEGER],false,false] * \_UnionAll[[_meta_field{r}#27, emp_no{r}#28, first_name{r}#29, gender{r}#30, hire_date{r}#31, job{r}#32, job.raw{r}#33, * languages{r}#34, last_name{r}#35, long_noidx{r}#36, salary{r}#37]] * |_EsqlProject[[_meta_field{f}#11, emp_no{f}#5, first_name{f}#6, gender{f}#7, hire_date{f}#12, job{f}#13, job.raw{f}#14, * languages{f}#8, last_name{f}#9, long_noidx{f}#15, salary{f}#10]] * | \_Limit[1000[INTEGER],false,false] * | \_EsRelation[test][_meta_field{f}#11, emp_no{f}#5, first_name{f}#6, ge..] * \_EsqlProject[[_meta_field{f}#22, emp_no{f}#16, first_name{f}#17, gender{f}#18, hire_date{f}#23, job{f}#24, job.raw{f}#25, * languages{f}#19, last_name{f}#20, long_noidx{f}#26, salary{f}#21]] * \_Subquery[] * \_TopN[[Order[emp_no{f}#16,ASC,LAST]],1000[INTEGER],false] * \_Filter[languages{f}#19 > 0[INTEGER]] * \_EsRelation[test][_meta_field{f}#22, emp_no{f}#16, first_name{f}#17, ..] */ public void testPushDownLimitPastSubqueryWithSort() { var plan = planSubquery(""" FROM test, (FROM test | WHERE languages > 0 | SORT emp_no) """); Limit limit = as(plan, Limit.class); UnionAll unionAll = as(limit.child(), UnionAll.class); assertEquals(2, unionAll.children().size()); EsqlProject child1 = as(unionAll.children().get(0), EsqlProject.class); Limit childLimit = as(child1.child(), Limit.class); EsRelation relation = as(childLimit.child(), EsRelation.class); assertEquals("test", relation.indexPattern()); EsqlProject child2 = as(unionAll.children().get(1), EsqlProject.class); Subquery subquery = as(child2.child(), Subquery.class); TopN topN = as(subquery.child(), TopN.class); Filter childFilter = as(topN.child(), Filter.class); GreaterThan greaterThan = as(childFilter.condition(), GreaterThan.class); FieldAttribute empNo = as(greaterThan.left(), FieldAttribute.class); assertEquals("languages", empNo.name()); Literal right = as(greaterThan.right(), Literal.class); assertEquals(0, right.value()); relation = as(childFilter.child(), EsRelation.class); assertEquals("test", relation.indexPattern()); } /* *Limit[1000[INTEGER],false,false] * \_UnionAll[[_meta_field{r}#28, emp_no{r}#29, first_name{r}#30, gender{r}#31, hire_date{r}#32, job{r}#33, job.raw{r}#34, * languages{r}#35, last_name{r}#36, long_noidx{r}#37, salary{r}#38]] * |_EsqlProject[[_meta_field{f}#12, emp_no{f}#6, first_name{f}#7, gender{f}#8, hire_date{f}#13, job{f}#14, job.raw{f}#15, * languages{f}#9, last_name{f}#10, long_noidx{f}#16, salary{f}#11]] * | \_Limit[1000[INTEGER],false,false] * | \_Filter[emp_no{f}#6 > 10000[INTEGER]] * | \_EsRelation[test][_meta_field{f}#12, emp_no{f}#6, first_name{f}#7, ge..] * \_EsqlProject[[_meta_field{f}#23, emp_no{f}#17, first_name{f}#18, gender{f}#19, hire_date{f}#24, job{f}#25, job.raw{f}#26, * languages{f}#20, last_name{f}#21, long_noidx{f}#27, salary{f}#22]] * \_Subquery[] * \_TopN[[Order[emp_no{f}#17,ASC,LAST]],1000[INTEGER],false] * \_Filter[languages{f}#20 > 0[INTEGER] AND emp_no{f}#17 > 10000[INTEGER]] * \_EsRelation[test][_meta_field{f}#23, emp_no{f}#17, first_name{f}#18, ..] */ public void testPushDownFilterAndLimitPastSubqueryWithSort() { var plan = planSubquery(""" FROM test, (FROM test | WHERE languages > 0 | SORT emp_no) | WHERE emp_no > 10000 """); Limit limit = as(plan, Limit.class); UnionAll unionAll = as(limit.child(), UnionAll.class); assertEquals(2, unionAll.children().size()); EsqlProject child1 = as(unionAll.children().get(0), EsqlProject.class); Limit childLimit = as(child1.child(), Limit.class); Filter childFilter = as(childLimit.child(), Filter.class); GreaterThan greaterThan = as(childFilter.condition(), GreaterThan.class); FieldAttribute empNo = as(greaterThan.left(), FieldAttribute.class); assertEquals("emp_no", empNo.name()); Literal right = as(greaterThan.right(), Literal.class); assertEquals(10000, right.value()); EsRelation relation = as(childFilter.child(), EsRelation.class); assertEquals("test", relation.indexPattern()); EsqlProject child2 = as(unionAll.children().get(1), EsqlProject.class); Subquery subquery = as(child2.child(), Subquery.class); TopN topN = as(subquery.child(), TopN.class); childFilter = as(topN.child(), Filter.class); And and = as(childFilter.condition(), And.class); greaterThan = as(and.left(), GreaterThan.class); empNo = as(greaterThan.left(), FieldAttribute.class); assertEquals("languages", empNo.name()); right = as(greaterThan.right(), Literal.class); assertEquals(0, right.value()); greaterThan = as(and.right(), GreaterThan.class); empNo = as(greaterThan.left(), FieldAttribute.class); assertEquals("emp_no", empNo.name()); right = as(greaterThan.right(), Literal.class); assertEquals(10000, right.value()); relation = as(childFilter.child(), EsRelation.class); assertEquals("test", relation.indexPattern()); } /* *Limit[1000[INTEGER],false,false] * \_UnionAll[[_meta_field{r}#46, emp_no{r}#47, first_name{r}#48, gender{r}#49, hire_date{r}#50, job{r}#51, job.raw{r}#52, * languages{r}#53, last_name{r}#54, long_noidx{r}#55, salary{r}#56, language_code{r}#57, language_name{r}#58]] * |_EsqlProject[[_meta_field{f}#13, emp_no{f}#7, first_name{f}#8, gender{f}#9, hire_date{f}#14, job{f}#15, job.raw{f}#16, * languages{f}#10, last_name{f}#11, long_noidx{f}#17, salary{f}#12, language_code{r}#31, * language_name{r}#32]] * | \_Eval[[null[INTEGER] AS language_code#31, null[KEYWORD] AS language_name#32]] * | \_Limit[1000[INTEGER],false,false] * | \_Filter[emp_no{f}#7 > 10000[INTEGER] AND salary{f}#12 > 50000[INTEGER]] * | \_EsRelation[test][_meta_field{f}#13, emp_no{f}#7, first_name{f}#8, ge..] * |_EsqlProject[[_meta_field{f}#24, emp_no{f}#18, first_name{f}#19, gender{f}#20, hire_date{f}#25, job{f}#26, job.raw{f}#27, * languages{f}#21, last_name{f}#22, long_noidx{f}#28, salary{f}#23, language_code{r}#33, * language_name{r}#34]] * | \_Eval[[null[INTEGER] AS language_code#33, null[KEYWORD] AS language_name#34]] * | \_Subquery[] * | \_Limit[1000[INTEGER],false,false] * | \_Filter[languages{f}#21 > 0[INTEGER] AND emp_no{f}#18 > 10000[INTEGER] AND salary{f}#23 > 50000[INTEGER]] * | \_EsRelation[test][_meta_field{f}#24, emp_no{f}#18, first_name{f}#19, ..] * \_LocalRelation[[_meta_field{r}#35, emp_no{r}#36, first_name{r}#37, gender{r}#38, hire_date{r}#39, job{r}#40, job.raw{r}#41, * languages{r}#42, last_name{r}#43, long_noidx{r}#44, salary{r}#45, language_code{f}#29, * language_name{f}#30], EMPTY] */ public void testPushDownConjunctiveFilterPastUnionAll() { var plan = planSubquery(""" FROM test, (FROM test | WHERE languages > 0), (FROM languages | WHERE language_code > 0) | WHERE emp_no > 10000 and salary > 50000 """); Limit limit = as(plan, Limit.class); UnionAll unionAll = as(limit.child(), UnionAll.class); assertEquals(3, unionAll.children().size()); EsqlProject child1 = as(unionAll.children().get(0), EsqlProject.class); Eval eval = as(child1.child(), Eval.class); Limit childLimit = as(eval.child(), Limit.class); Filter childFilter = as(childLimit.child(), Filter.class); And and = as(childFilter.condition(), And.class); GreaterThan emp_no = as(and.left(), GreaterThan.class); FieldAttribute empNo = as(emp_no.left(), FieldAttribute.class); assertEquals("emp_no", empNo.name()); Literal right = as(emp_no.right(), Literal.class); assertEquals(10000, right.value()); GreaterThan salary = as(and.right(), GreaterThan.class); FieldAttribute salaryField = as(salary.left(), FieldAttribute.class); assertEquals("salary", salaryField.name()); right = as(salary.right(), Literal.class); assertEquals(50000, right.value()); EsRelation relation = as(childFilter.child(), EsRelation.class); assertEquals("test", relation.indexPattern()); EsqlProject child2 = as(unionAll.children().get(1), EsqlProject.class); eval = as(child2.child(), Eval.class); Subquery subquery = as(eval.child(), Subquery.class); childLimit = as(subquery.child(), Limit.class); childFilter = as(childLimit.child(), Filter.class); and = as(childFilter.condition(), And.class); GreaterThan greaterThan = as(and.left(), GreaterThan.class); FieldAttribute languages = as(greaterThan.left(), FieldAttribute.class); assertEquals("languages", languages.name()); right = as(greaterThan.right(), Literal.class); assertEquals(0, right.value()); and = as(and.right(), And.class); emp_no = as(and.left(), GreaterThan.class); empNo = as(emp_no.left(), FieldAttribute.class); assertEquals("emp_no", empNo.name()); right = as(emp_no.right(), Literal.class); assertEquals(10000, right.value()); salary = as(and.right(), GreaterThan.class); salaryField = as(salary.left(), FieldAttribute.class); assertEquals("salary", salaryField.name()); right = as(salary.right(), Literal.class); assertEquals(50000, right.value()); relation = as(childFilter.child(), EsRelation.class); assertEquals("test", relation.indexPattern()); LocalRelation localRelation = as(unionAll.children().get(2), LocalRelation.class); } /* *Limit[1000[INTEGER],false,false] * \_UnionAll[[_meta_field{r}#46, emp_no{r}#47, first_name{r}#48, gender{r}#49, hire_date{r}#50, job{r}#51, job.raw{r}#52, * languages{r}#53, last_name{r}#54, long_noidx{r}#55, salary{r}#56, language_code{r}#57, language_name{r}#58]] * |_EsqlProject[[_meta_field{f}#13, emp_no{f}#7, first_name{f}#8, gender{f}#9, hire_date{f}#14, job{f}#15, job.raw{f}#16, * languages{f}#10, last_name{f}#11, long_noidx{f}#17, salary{f}#12, language_code{r}#31, * language_name{r}#32]] * | \_Eval[[null[INTEGER] AS language_code#31, null[KEYWORD] AS language_name#32]] * | \_Limit[1000[INTEGER],false,false] * | \_Filter[emp_no{f}#7 > 10000[INTEGER] OR salary{f}#12 > 50000[INTEGER]] * | \_EsRelation[test][_meta_field{f}#13, emp_no{f}#7, first_name{f}#8, ge..] * |_EsqlProject[[_meta_field{f}#24, emp_no{f}#18, first_name{f}#19, gender{f}#20, hire_date{f}#25, job{f}#26, job.raw{f}#27, * languages{f}#21, last_name{f}#22, long_noidx{f}#28, salary{f}#23, language_code{r}#33, * language_name{r}#34]] * | \_Eval[[null[INTEGER] AS language_code#33, null[KEYWORD] AS language_name#34]] * | \_Subquery[] * | \_Limit[1000[INTEGER],false,false] * | \_Filter[languages{f}#21 > 0[INTEGER] AND emp_no{f}#18 > 10000[INTEGER] OR salary{f}#23 > 50000[INTEGER]] * | \_EsRelation[test][_meta_field{f}#24, emp_no{f}#18, first_name{f}#19, ..] * \_LocalRelation[[_meta_field{r}#35, emp_no{r}#36, first_name{r}#37, gender{r}#38, hire_date{r}#39, job{r}#40, job.raw{r}#41, * languages{r}#42, last_name{r}#43, long_noidx{r}#44, salary{r}#45, language_code{f}#29, * language_name{f}#30],EMPTY] */ public void testPushDownDisjunctiveFilterPastUnionAll() { var plan = planSubquery(""" FROM test, (FROM test | WHERE languages > 0), (FROM languages | WHERE language_code > 0) | WHERE emp_no > 10000 or salary > 50000 """); Limit limit = as(plan, Limit.class); UnionAll unionAll = as(limit.child(), UnionAll.class); assertEquals(3, unionAll.children().size()); EsqlProject child1 = as(unionAll.children().get(0), EsqlProject.class); Eval eval = as(child1.child(), Eval.class); Limit childLimit = as(eval.child(), Limit.class); Filter childFilter = as(childLimit.child(), Filter.class); Or or = as(childFilter.condition(), Or.class); GreaterThan emp_no = as(or.left(), GreaterThan.class); FieldAttribute empNo = as(emp_no.left(), FieldAttribute.class); assertEquals("emp_no", empNo.name()); Literal right = as(emp_no.right(), Literal.class); assertEquals(10000, right.value()); GreaterThan salary = as(or.right(), GreaterThan.class); FieldAttribute salaryField = as(salary.left(), FieldAttribute.class); assertEquals("salary", salaryField.name()); right = as(salary.right(), Literal.class); assertEquals(50000, right.value()); EsRelation relation = as(childFilter.child(), EsRelation.class); assertEquals("test", relation.indexPattern()); EsqlProject child2 = as(unionAll.children().get(1), EsqlProject.class); eval = as(child2.child(), Eval.class); Subquery subquery = as(eval.child(), Subquery.class); childLimit = as(subquery.child(), Limit.class); childFilter = as(childLimit.child(), Filter.class); And and = as(childFilter.condition(), And.class); GreaterThan greaterThan = as(and.left(), GreaterThan.class); FieldAttribute languages = as(greaterThan.left(), FieldAttribute.class); assertEquals("languages", languages.name()); right = as(greaterThan.right(), Literal.class); assertEquals(0, right.value()); or = as(and.right(), Or.class); emp_no = as(or.left(), GreaterThan.class); empNo = as(emp_no.left(), FieldAttribute.class); assertEquals("emp_no", empNo.name()); right = as(emp_no.right(), Literal.class); assertEquals(10000, right.value()); salary = as(or.right(), GreaterThan.class); salaryField = as(salary.left(), FieldAttribute.class); assertEquals("salary", salaryField.name()); right = as(salary.right(), Literal.class); assertEquals(50000, right.value()); relation = as(childFilter.child(), EsRelation.class); assertEquals("test", relation.indexPattern()); LocalRelation localRelation = as(unionAll.children().get(2), LocalRelation.class); } /* *Limit[1000[INTEGER],false,false] * \_UnionAll[[_meta_field{r}#46, emp_no{r}#47, first_name{r}#48, gender{r}#49, hire_date{r}#50, job{r}#51, job.raw{r}#52, * languages{r}#53, last_name{r}#54, long_noidx{r}#55, salary{r}#56, language_code{r}#57, language_name{r}#58]] * |_EsqlProject[[_meta_field{f}#13, emp_no{f}#7, first_name{f}#8, gender{f}#9, hire_date{f}#14, job{f}#15, job.raw{f}#16, * languages{f}#10, last_name{f}#11, long_noidx{f}#17, salary{f}#12, language_code{r}#31, * language_name{r}#32]] * | \_Eval[[null[INTEGER] AS language_code#31, null[KEYWORD] AS language_name#32]] * | \_Limit[1000[INTEGER],false,false] * | \_Filter[emp_no{f}#7 > 10000[INTEGER] AND salary{f}#12 < 50000[INTEGER]] * | \_EsRelation[test][_meta_field{f}#13, emp_no{f}#7, first_name{f}#8, ge..] * |_EsqlProject[[_meta_field{f}#24, emp_no{f}#18, first_name{f}#19, gender{f}#20, hire_date{f}#25, job{f}#26, job.raw{f}#27, * languages{f}#21, last_name{f}#22, long_noidx{f}#28, salary{f}#23, language_code{r}#33, * language_name{r}#34]] * | \_Eval[[null[INTEGER] AS language_code#33, null[KEYWORD] AS language_name#34]] * | \_Subquery[] * | \_Limit[1000[INTEGER],false,false] * | \_Filter[salary{f}#23 < 50000[INTEGER] AND emp_no{f}#18 > 10000[INTEGER]] * | \_EsRelation[test][_meta_field{f}#24, emp_no{f}#18, first_name{f}#19, ..] * \_LocalRelation[[_meta_field{r}#35, emp_no{r}#36, first_name{r}#37, gender{r}#38, hire_date{r}#39, job{r}#40, job.raw{r}#41, * languages{r}#42, last_name{r}#43, long_noidx{r}#44, salary{r}#45, language_code{f}#29, * language_name{f}#30],EMPTY] */ public void testPushDownFilterPastUnionAllAndCombineWithFilterInSubquery() { var plan = planSubquery(""" FROM test, (FROM test | where salary < 100000), (FROM languages | WHERE language_code > 0) | WHERE emp_no > 10000 and salary < 50000 """); Limit limit = as(plan, Limit.class); UnionAll unionAll = as(limit.child(), UnionAll.class); assertEquals(3, unionAll.children().size()); EsqlProject child1 = as(unionAll.children().get(0), EsqlProject.class); Eval eval = as(child1.child(), Eval.class); Limit childLimit = as(eval.child(), Limit.class); Filter childFilter = as(childLimit.child(), Filter.class); And and = as(childFilter.condition(), And.class); GreaterThan emp_no = as(and.left(), GreaterThan.class); FieldAttribute empNo = as(emp_no.left(), FieldAttribute.class); assertEquals("emp_no", empNo.name()); Literal right = as(emp_no.right(), Literal.class); assertEquals(10000, right.value()); LessThan salary = as(and.right(), LessThan.class); FieldAttribute salaryField = as(salary.left(), FieldAttribute.class); assertEquals("salary", salaryField.name()); right = as(salary.right(), Literal.class); assertEquals(50000, right.value()); EsRelation relation = as(childFilter.child(), EsRelation.class); assertEquals("test", relation.indexPattern()); EsqlProject child2 = as(unionAll.children().get(1), EsqlProject.class); eval = as(child2.child(), Eval.class); Subquery subquery = as(eval.child(), Subquery.class); childLimit = as(subquery.child(), Limit.class); childFilter = as(childLimit.child(), Filter.class); and = as(childFilter.condition(), And.class); emp_no = as(and.right(), GreaterThan.class); empNo = as(emp_no.left(), FieldAttribute.class); assertEquals("emp_no", empNo.name()); right = as(emp_no.right(), Literal.class); assertEquals(10000, right.value()); salary = as(and.left(), LessThan.class); salaryField = as(salary.left(), FieldAttribute.class); assertEquals("salary", salaryField.name()); right = as(salary.right(), Literal.class); assertEquals(50000, right.value()); relation = as(childFilter.child(), EsRelation.class); assertEquals("test", relation.indexPattern()); LocalRelation localRelation = as(unionAll.children().get(2), LocalRelation.class); } /* *Project[[_meta_field{r}#102, emp_no{r}#103, first_name{r}#104, gender{r}#105, hire_date{r}#106, job{r}#107, job.raw{r}#108, * languages{r}#109, last_name{r}#110, long_noidx{r}#111, salary{r}#112, z{r}#115, language_name{r}#116, * $$x$converted_to$long{r$}#125 AS x#38, $$y$converted_to$long{r$}#126 AS y#41]] * \_Limit[1000[INTEGER],false,false] * \_UnionAll[[_meta_field{r}#102, emp_no{r}#103, first_name{r}#104, gender{r}#105, hire_date{r}#106, job{r}#107, job.raw{r}#108, * languages{r}#109, last_name{r}#110, long_noidx{r}#111, salary{r}#112, x{r}#113, $$x$converted_to$long{r$}#125, * y{r}#114, $$y$converted_to$long{r$}#126, z{r}#115, language_name{r}#116]] * |_LocalRelation[[_meta_field{f}#51, emp_no{f}#45, first_name{f}#46, gender{f}#47, hire_date{f}#52, job{f}#53, job.raw{f}#54, * languages{f}#48, last_name{f}#49, long_noidx{f}#55, salary{f}#50, x{r}#82, * $$x$converted_to$long{r}#117, y{r}#127, $$y$converted_to$long{r}#118, z{r}#84, * language_name{r}#85],EMPTY] * |_EsqlProject[[_meta_field{f}#62, emp_no{f}#56, first_name{f}#57, gender{f}#58, hire_date{f}#63, job{f}#64, job.raw{f}#65, * languages{f}#59, last_name{f}#60, long_noidx{f}#66, salary{f}#61, x{r}#5, $$x$converted_to$long{r}#119, * y{r}#128, $$y$converted_to$long{r}#120, z{r}#11, language_name{r}#86]] * | \_Filter[ISNOTNULL($$y$converted_to$long{r}#120)] * | \_Eval[[null[KEYWORD] AS language_name#86, 1[LONG] AS $$x$converted_to$long#119, * TOLONG(y{r}#8) AS $$y$converted_to$long#120, null[KEYWORD] AS y#128]] * | \_Subquery[] * | \_Project[[_meta_field{f}#62, emp_no{f}#56, first_name{f}#57, gender{f}#58, hire_date{f}#63, job{f}#64, job.raw{f}#65, * languages{f}#59, last_name{f}#60, long_noidx{f}#66, salary{f}#61, x{r}#5, emp_no{f}#56 AS y#8, z{r}#11]] * | \_Limit[1000[INTEGER],false,false] * | \_Filter[z{r}#11 > 0[INTEGER]] * | \_Eval[[1[INTEGER] AS x#5, emp_no{f}#56 + 1[INTEGER] AS z#11]] * | \_Filter[salary{f}#61 < 100000[INTEGER]] * | \_EsRelation[test][_meta_field{f}#62, emp_no{f}#56, first_name{f}#57, ..] * |_EsqlProject[[_meta_field{r}#87, emp_no{r}#88, first_name{r}#89, gender{r}#90, hire_date{r}#91, job{r}#92, job.raw{r}#93, * languages{r}#94, last_name{r}#95, long_noidx{r}#96, salary{r}#97, x{r}#22, $$x$converted_to$long{r}#121, * y{r}#129, $$y$converted_to$long{r}#122, z{r}#17, language_name{r}#98]] * | \_Filter[ISNOTNULL($$y$converted_to$long{r}#122)] * | \_Eval[[null[KEYWORD] AS _meta_field#87, null[INTEGER] AS emp_no#88, null[KEYWORD] AS first_name#89, * null[TEXT] AS gender#90, null[DATETIME] AS hire_date#91, null[TEXT] AS job#92, null[KEYWORD] AS job.raw#93, * null[INTEGER] AS languages#94, null[KEYWORD] AS last_name#95, null[LONG] AS long_noidx#96, * null[INTEGER] AS salary#97, null[KEYWORD] AS language_name#98, 1[LONG] AS $$x$converted_to$long#121, * TOLONG(y{r}#20) AS $$y$converted_to$long#122, null[KEYWORD] AS y#129]] * | \_Subquery[] * | \_Eval[[1[INTEGER] AS x#22]] * | \_Limit[1000[INTEGER],false,false] * | \_Filter[z{r}#17 > 0[INTEGER]] * | \_Aggregate[[language_code{f}#67],[COUNT(*[KEYWORD],true[BOOLEAN]) AS y#20, language_code{f}#67 AS z#17]] * | \_EsRelation[languages][language_code{f}#67, language_name{f}#68] * \_EsqlProject[[_meta_field{f}#75, emp_no{r}#99, first_name{f}#70, gender{f}#71, hire_date{f}#76, job{f}#77, job.raw{f}#78, * languages{r}#100, last_name{f}#73, long_noidx{f}#79, salary{r}#101, x{r}#29, * $$x$converted_to$long{r}#123, y{r}#130, $$y$converted_to$long{r}#124, z{r}#35, language_name{f}#81]] * \_Filter[ISNOTNULL($$x$converted_to$long{r}#123) AND ISNOTNULL($$y$converted_to$long{r}#124)] * \_Eval[[null[INTEGER] AS emp_no#99, null[INTEGER] AS languages#100, null[INTEGER] AS salary#101, * TOLONG(x{r}#29) AS $$x$converted_to$long#123, TOLONG(y{r}#32) AS $$y$converted_to$long#124, * null[KEYWORD] AS y#130]] * \_Subquery[] * \_Project[[_meta_field{f}#75, emp_no{f}#69 AS x#29, first_name{f}#70, gender{f}#71, hire_date{f}#76, job{f}#77, * job.raw{f}#78, languages{f}#72 AS z#35, last_name{f}#73, long_noidx{f}#79, salary{f}#74 AS y#32, * language_name{f}#81]] * \_Limit[1000[INTEGER],true,false] * \_Join[LEFT,[languages{f}#72],[language_code{f}#80],null] * |_Limit[1000[INTEGER],false,false] * | \_Filter[languages{f}#72 > 0[INTEGER]] * | \_EsRelation[test][_meta_field{f}#75, emp_no{f}#69, first_name{f}#70, ..] * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#80, language_name{f}#81] */ public void testPushDownFilterOnReferenceAttributesPastUnionAll() { var plan = planSubquery(""" FROM test , (FROM test | where salary < 100000 | EVAL x = 1, y = emp_no, z = emp_no + 1) , (FROM languages | STATS cnt = COUNT(*) by language_code | RENAME language_code AS z, cnt AS y | EVAL x = 1) , (FROM test | RENAME languages AS language_code | LOOKUP JOIN languages_lookup ON language_code | RENAME emp_no AS x, salary AS y, language_code AS z) | EVAL x = x::long, y = y::long | WHERE x is not null and y is not null and z > 0 """); Project project = as(plan, Project.class); List<? extends NamedExpression> projections = project.projections(); assertEquals(15, projections.size()); Limit limit = as(project.child(), Limit.class); UnionAll unionAll = as(limit.child(), UnionAll.class); assertEquals(4, unionAll.children().size()); LocalRelation child1 = as(unionAll.children().get(0), LocalRelation.class); EsqlProject child2 = as(unionAll.children().get(1), EsqlProject.class); Filter filter = as(child2.child(), Filter.class); IsNotNull isNotNull = as(filter.condition(), IsNotNull.class); ReferenceAttribute y = as(isNotNull.field(), ReferenceAttribute.class); assertEquals("$$y$converted_to$long", y.name()); Eval eval = as(filter.child(), Eval.class); List<Alias> aliases = eval.fields(); assertEquals(4, aliases.size()); assertEquals("language_name", aliases.get(0).name()); assertEquals("$$x$converted_to$long", aliases.get(1).name()); assertEquals("$$y$converted_to$long", aliases.get(2).name()); assertEquals("y", aliases.get(3).name()); Subquery subquery = as(eval.child(), Subquery.class); project = as(subquery.child(), Project.class); Limit childLimit = as(project.child(), Limit.class); Filter childFilter = as(childLimit.child(), Filter.class); GreaterThan greaterThan = as(childFilter.condition(), GreaterThan.class); ReferenceAttribute z = as(greaterThan.left(), ReferenceAttribute.class); assertEquals("z", z.name()); Literal right = as(greaterThan.right(), Literal.class); assertEquals(0, right.value()); eval = as(childFilter.child(), Eval.class); aliases = eval.fields(); assertEquals(2, aliases.size()); Alias aliasX = aliases.get(0); assertEquals("x", aliasX.name()); Literal xLiteral = as(aliasX.child(), Literal.class); assertEquals(1, xLiteral.value()); Alias aliasZ = aliases.get(1); assertEquals("z", aliasZ.name()); childFilter = as(eval.child(), Filter.class); LessThan lessThan = as(childFilter.condition(), LessThan.class); FieldAttribute salaryField = as(lessThan.left(), FieldAttribute.class); assertEquals("salary", salaryField.name()); Literal literal = as(lessThan.right(), Literal.class); assertEquals(100000, literal.value()); EsRelation relation = as(childFilter.child(), EsRelation.class); assertEquals("test", relation.indexPattern()); EsqlProject child3 = as(unionAll.children().get(2), EsqlProject.class); filter = as(child3.child(), Filter.class); isNotNull = as(filter.condition(), IsNotNull.class); y = as(isNotNull.field(), ReferenceAttribute.class); assertEquals("$$y$converted_to$long", y.name()); eval = as(filter.child(), Eval.class); subquery = as(eval.child(), Subquery.class); eval = as(subquery.child(), Eval.class); limit = as(eval.child(), Limit.class); filter = as(limit.child(), Filter.class); greaterThan = as(filter.condition(), GreaterThan.class); z = as(greaterThan.left(), ReferenceAttribute.class); assertEquals("z", z.name()); right = as(greaterThan.right(), Literal.class); assertEquals(0, right.value()); Aggregate aggregate = as(filter.child(), Aggregate.class); List<Expression> groupings = aggregate.groupings(); assertEquals(1, groupings.size()); FieldAttribute language_code = as(groupings.get(0), FieldAttribute.class); assertEquals("language_code", language_code.name()); List<? extends NamedExpression> aggregates = aggregate.aggregates(); assertEquals(2, aggregates.size()); assertEquals("y", aggregates.get(0).name()); assertEquals("z", aggregates.get(1).name()); relation = as(aggregate.child(), EsRelation.class); assertEquals("languages", relation.indexPattern()); EsqlProject child4 = as(unionAll.children().get(3), EsqlProject.class); filter = as(child4.child(), Filter.class); And and = as(filter.condition(), And.class); isNotNull = as(and.left(), IsNotNull.class); ReferenceAttribute x = as(isNotNull.field(), ReferenceAttribute.class); assertEquals("$$x$converted_to$long", x.name()); isNotNull = as(and.right(), IsNotNull.class); ReferenceAttribute yAttr = as(isNotNull.field(), ReferenceAttribute.class); assertEquals("$$y$converted_to$long", yAttr.name()); eval = as(filter.child(), Eval.class); aliases = eval.fields(); assertEquals(6, aliases.size()); subquery = as(eval.child(), Subquery.class); project = as(subquery.child(), Project.class); limit = as(project.child(), Limit.class); Join lookupJoin = as(limit.child(), Join.class); limit = as(lookupJoin.left(), Limit.class); filter = as(limit.child(), Filter.class); greaterThan = as(filter.condition(), GreaterThan.class); language_code = as(greaterThan.left(), FieldAttribute.class); assertEquals("languages", language_code.name()); right = as(greaterThan.right(), Literal.class); assertEquals(0, right.value()); relation = as(filter.child(), EsRelation.class); assertEquals("test", relation.indexPattern()); relation = as(lookupJoin.right(), EsRelation.class); assertEquals("languages_lookup", relation.indexPattern()); } /* *Limit[1000[INTEGER],false,false] * \_UnionAll[[_meta_field{r}#36, emp_no{r}#37, first_name{r}#38, gender{r}#39, hire_date{r}#40, job{r}#41, job.raw{r}#42, * languages{r}#43, last_name{r}#44, long_noidx{r}#45, salary{r}#46, x{r}#47, y{r}#48]] * |_LocalRelation[[_meta_field{f}#18, emp_no{f}#12, first_name{f}#13, gender{f}#14, hire_date{f}#19, job{f}#20, job.raw{f}#21, * languages{f}#15, last_name{f}#16, long_noidx{f}#22, salary{f}#17, x{r}#34, y{r}#35],EMPTY] * \_EsqlProject[[_meta_field{f}#29, emp_no{f}#23, first_name{f}#24, gender{f}#25, hire_date{f}#30, job{f}#31, job.raw{f}#32, * languages{f}#26, last_name{f}#27, long_noidx{f}#33, salary{f}#28, x{r}#5, y{r}#8]] * \_Subquery[] * \_Limit[1000[INTEGER],false,false] * \_Filter[y{r}#8 > 0[INTEGER]] * \_Eval[[1[INTEGER] AS x#5, emp_no{f}#23 + 1[INTEGER] AS y#8]] * \_Filter[salary{f}#28 < 100000[INTEGER] AND emp_no{f}#23 > 0[INTEGER]] * \_EsRelation[test][_meta_field{f}#29, emp_no{f}#23, first_name{f}#24, ..] */ public void testPushDownFilterOnReferenceAttributesAndFieldAttributesPastUnionAll() { var plan = planSubquery(""" FROM test, (FROM test | where salary < 100000 | EVAL x = 1, y = emp_no + 1) | WHERE x is not null and y > 0 and emp_no > 0 """); Limit limit = as(plan, Limit.class); UnionAll unionAll = as(limit.child(), UnionAll.class); assertEquals(2, unionAll.children().size()); LocalRelation child1 = as(unionAll.children().get(0), LocalRelation.class); EsqlProject child2 = as(unionAll.children().get(1), EsqlProject.class); Subquery subquery = as(child2.child(), Subquery.class); Limit childLimit = as(subquery.child(), Limit.class); Filter childFilter = as(childLimit.child(), Filter.class); GreaterThan greaterThan = as(childFilter.condition(), GreaterThan.class); ReferenceAttribute y = as(greaterThan.left(), ReferenceAttribute.class); assertEquals("y", y.name()); Literal right = as(greaterThan.right(), Literal.class); assertEquals(0, right.value()); Eval eval = as(childFilter.child(), Eval.class); List<Alias> aliases = eval.fields(); assertEquals(2, aliases.size()); Alias aliasX = aliases.get(0); assertEquals("x", aliasX.name()); Literal xLiteral = as(aliasX.child(), Literal.class); assertEquals(1, xLiteral.value()); Alias aliasZ = aliases.get(1); assertEquals("y", aliasZ.name()); childFilter = as(eval.child(), Filter.class); And and = as(childFilter.condition(), And.class); greaterThan = as(and.right(), GreaterThan.class); FieldAttribute emp_no = as(greaterThan.left(), FieldAttribute.class); assertEquals("emp_no", emp_no.name()); LessThan lessThan = as(and.left(), LessThan.class); FieldAttribute salaryField = as(lessThan.left(), FieldAttribute.class); assertEquals("salary", salaryField.name()); Literal literal = as(lessThan.right(), Literal.class); assertEquals(100000, literal.value()); EsRelation relation = as(childFilter.child(), EsRelation.class); assertEquals("test", relation.indexPattern()); } /* * TODO push down filter on mixed typed attributes * Project[[_meta_field{r}#67, first_name{r}#69, hire_date{r}#71, job{r}#72, job.raw{r}#73, languages{r}#74, last_name{r}#75, * long_noidx{r}#76, salary{r}#77, avg_worked_seconds{r}#78, birth_date{r}#79, height{r}#80, height.double{r}#81, * height.half_float{r}#82, height.scaled_float{r}#83, is_rehired{r}#84, job_positions{r}#85, languages.int{r}#86, * languages.long{r}#87, languages.short{r}#88, salary_change{r}#89, still_hired{r}#90, * $$emp_no$converted_to$double{r$}#97 AS x#6, $$emp_no$converted_to$long{r$}#98 AS emp_no#9, * $$gender$converted_to$keyword{r$}#99 AS gender#12, languages{r}#74 AS y#15]] * \_Limit[1000[INTEGER],false,false] * \_Filter[$$emp_no$converted_to$long{r$}#98 > 10000[INTEGER] AND * ISNOTNULL($$gender$converted_to$keyword{r$}#99) AND * languages{r}#74 < 5[INTEGER]] * \_UnionAll[[_meta_field{r}#67, emp_no{r}#68, $$emp_no$converted_to$double{r$}#97, $$emp_no$converted_to$long{r$}#98, * first_name{r}#69, gender{r}#70, $$gender$converted_to$keyword{r$}#99, hire_date{r}#71, job{r}#72, * job.raw{r}#73, languages{r}#74, last_name{r}#75, long_noidx{r}#76, salary{r}#77, avg_worked_seconds{r}#78, * birth_date{r}#79, height{r}#80, height.double{r}#81, height.half_float{r}#82, height.scaled_float{r}#83, * is_rehired{r}#84, job_positions{r}#85, languages.int{r}#86, languages.long{r}#87, languages.short{r}#88, * salary_change{r}#89, still_hired{r}#90]] * |_EsqlProject[[_meta_field{f}#25, emp_no{r}#100, $$emp_no$converted_to$double{r}#91, $$emp_no$converted_to$long{r}#92, * first_name{r}#101, gender{f}#21, $$gender$converted_to$keyword{r}#93, hire_date{r}#102, job{f}#27, * job.raw{f}#28, languages{f}#22, last_name{r}#103, long_noidx{f}#29, salary{r}#104, * avg_worked_seconds{r}#50, birth_date{r}#51, height{r}#52, height.double{r}#53, * height.half_float{r}#54, height.scaled_float{r}#55, is_rehired{r}#56, job_positions{r}#57, * languages.int{r}#58, languages.long{r}#59, languages.short{r}#60, salary_change{r}#61, * still_hired{r}#62]] * | \_Eval[[null[UNSIGNED_LONG] AS avg_worked_seconds#50, null[DATETIME] AS birth_date#51, null[DOUBLE] AS height#52, * null[DOUBLE] AS height.double#53, null[DOUBLE] AS height.half_float#54, null[DOUBLE] AS height.scaled_float#55, * null[KEYWORD] AS is_rehired#56, null[TEXT] AS job_positions#57, null[INTEGER] AS languages.int#58, * null[LONG] AS languages.long#59, null[INTEGER] AS languages.short#60, null[DOUBLE] AS salary_change#61, * null[KEYWORD] AS still_hired#62, TODOUBLE(emp_no{f}#19) AS $$emp_no$converted_to$double#91, * TOLONG(emp_no{f}#19) AS $$emp_no$converted_to$long#92, * TOSTRING(gender{f}#21) AS $$gender$converted_to$keyword#93, null[KEYWORD] AS emp_no#100, * null[KEYWORD] AS first_name#101, TODATENANOS(hire_date{f}#26) AS hire_date#102, * null[KEYWORD] AS last_name#103, null[KEYWORD] AS salary#104]] * | \_Limit[1000[INTEGER],false,false] * | \_EsRelation[test][_meta_field{f}#25, emp_no{f}#19, first_name{f}#20, ..] * \_Project[[_meta_field{r}#63, $$emp_no$temp_name$109{r}#110 AS emp_no#105, $$emp_no$converted_to$double{r}#94, * emp_no{f}#30 AS $$emp_no$converted_to$long#95, first_name{r}#106, gender{f}#33, * $$gender$converted_to$keyword{r}#96, hire_date{f}#35, job{r}#64, job.raw{r}#65, languages{f}#37, * last_name{r}#107, long_noidx{r}#66, salary{r}#108, avg_worked_seconds{f}#46, birth_date{f}#34, height{f}#41, * height.double{f}#42, height.half_float{f}#44, height.scaled_float{f}#43, is_rehired{f}#48, * job_positions{f}#47, languages.int{f}#40, languages.long{f}#38, languages.short{f}#39, salary_change{f}#49, * still_hired{f}#45]] * \_Eval[[null[KEYWORD] AS _meta_field#63, null[TEXT] AS job#64, null[KEYWORD] AS job.raw#65, null[LONG] AS long_noidx#66, * TODOUBLE(emp_no{f}#30) AS $$emp_no$converted_to$double#94, * TOSTRING(gender{f}#33) AS $$gender$converted_to$keyword#96, null[KEYWORD] AS $$emp_no$temp_name$109#110, * null[KEYWORD] AS first_name#106, null[KEYWORD] AS last_name#107, null[KEYWORD] AS salary#108]] * \_Subquery[] * \_Limit[1000[INTEGER],false,false] * \_Filter[languages{f}#37 > 1[INTEGER]] * \_EsRelation[test_mixed_types][avg_worked_seconds{f}#46, birth_date{f}#34, emp_no{..] */ public void testFilterOnMixedDataTypesFields() { var plan = planSubquery(""" FROM test, (FROM test_mixed_types | WHERE languages > 1) | EVAL x = emp_no::double, emp_no = emp_no::long, gender = gender::keyword, y = languages | WHERE emp_no > 10000 AND gender is not null AND y < 5 """); Project project = as(plan, Project.class); List<? extends NamedExpression> projections = project.projections(); assertEquals(26, projections.size()); Limit limit = as(project.child(), Limit.class); Filter filter = as(limit.child(), Filter.class); And and = as(filter.condition(), And.class); LessThan lessThan = as(and.right(), LessThan.class); ReferenceAttribute salary = as(lessThan.left(), ReferenceAttribute.class); assertEquals("languages", salary.name()); Literal right = as(lessThan.right(), Literal.class); assertEquals(5, right.value()); and = as(and.left(), And.class); GreaterThan greaterThan = as(and.left(), GreaterThan.class); ReferenceAttribute emp_no = as(greaterThan.left(), ReferenceAttribute.class); assertEquals("$$emp_no$converted_to$long", emp_no.name()); right = as(greaterThan.right(), Literal.class); assertEquals(10000, right.value()); IsNotNull isNotNull = as(and.right(), IsNotNull.class); ReferenceAttribute gender = as(isNotNull.field(), ReferenceAttribute.class); assertEquals("$$gender$converted_to$keyword", gender.name()); UnionAll unionAll = as(filter.child(), UnionAll.class); assertEquals(2, unionAll.children().size()); EsqlProject child1 = as(unionAll.children().get(0), EsqlProject.class); Eval eval = as(child1.child(), Eval.class); limit = as(eval.child(), Limit.class); EsRelation relation = as(limit.child(), EsRelation.class); assertEquals("test", relation.indexPattern()); Project child2 = as(unionAll.children().get(1), Project.class); eval = as(child2.child(), Eval.class); Subquery subquery = as(eval.child(), Subquery.class); limit = as(subquery.child(), Limit.class); Filter childFilter = as(limit.child(), Filter.class); greaterThan = as(childFilter.condition(), GreaterThan.class); FieldAttribute salaryField = as(greaterThan.left(), FieldAttribute.class); assertEquals("languages", salaryField.name()); right = as(greaterThan.right(), Literal.class); assertEquals(1, right.value()); relation = as(childFilter.child(), EsRelation.class); assertEquals("test_mixed_types", relation.indexPattern()); } /* * Limit[1000[INTEGER],false,false] * \_UnionAll[[_meta_field{r}#27, emp_no{r}#28, first_name{r}#29, gender{r}#30, hire_date{r}#31, job{r}#32, job.raw{r}#33, * languages{r}#34, last_name{r}#35, long_noidx{r}#36, salary{r}#37]] * |_EsqlProject[[_meta_field{f}#11, emp_no{f}#5, first_name{f}#6, gender{f}#7, hire_date{f}#12, job{f}#13, job.raw{f}#14, * languages{f}#8, last_name{f}#9, long_noidx{f}#15, salary{f}#10]] * | \_Limit[1000[INTEGER],false,false] * | \_Filter[:(first_name{f}#6,first[KEYWORD])] * | \_EsRelation[test][_meta_field{f}#11, emp_no{f}#5, first_name{f}#6, ge..] * \_EsqlProject[[_meta_field{f}#22, emp_no{f}#16, first_name{f}#17, gender{f}#18, hire_date{f}#23, job{f}#24, job.raw{f}#25, * languages{f}#19, last_name{f}#20, long_noidx{f}#26, salary{f}#21]] * \_Subquery[] * \_Limit[1000[INTEGER],false,false] * \_Filter[languages{f}#19 > 0[INTEGER] AND :(first_name{f}#17,first[KEYWORD])] * \_EsRelation[test][_meta_field{f}#22, emp_no{f}#16, first_name{f}#17, ..] */ public void testPushDownSingleFullTextFunctionPastUnionAll() { var plan = planSubquery(""" FROM test, (FROM test | WHERE languages > 0) | WHERE first_name:"first" """); Limit limit = as(plan, Limit.class); UnionAll unionAll = as(limit.child(), UnionAll.class); assertEquals(2, unionAll.children().size()); EsqlProject child1 = as(unionAll.children().get(0), EsqlProject.class); Limit childLimit = as(child1.child(), Limit.class); Filter childFilter = as(childLimit.child(), Filter.class); MatchOperator match = as(childFilter.condition(), MatchOperator.class); FieldAttribute first_name = as(match.field(), FieldAttribute.class); assertEquals("first_name", first_name.name()); Literal right = as(match.query(), Literal.class); assertEquals(new BytesRef("first"), right.value()); EsRelation relation = as(childFilter.child(), EsRelation.class); assertEquals("test", relation.indexPattern()); EsqlProject child2 = as(unionAll.children().get(1), EsqlProject.class); Subquery subquery = as(child2.child(), Subquery.class); childLimit = as(subquery.child(), Limit.class); childFilter = as(childLimit.child(), Filter.class); And and = as(childFilter.condition(), And.class); MatchOperator matchOperator = as(and.right(), MatchOperator.class); first_name = as(matchOperator.field(), FieldAttribute.class); assertEquals("first_name", first_name.name()); right = as(matchOperator.query(), Literal.class); assertEquals(new BytesRef("first"), right.value()); GreaterThan greaterThan = as(and.left(), GreaterThan.class); FieldAttribute languages = as(greaterThan.left(), FieldAttribute.class); assertEquals("languages", languages.name()); right = as(greaterThan.right(), Literal.class); assertEquals(0, right.value()); relation = as(childFilter.child(), EsRelation.class); assertEquals("test", relation.indexPattern()); } /* * Limit[1000[INTEGER],false,false] * \_UnionAll[[_meta_field{r}#26, emp_no{r}#27, first_name{r}#28, gender{r}#29, hire_date{r}#30, job{r}#31, job.raw{r}#32, * languages{r}#33, last_name{r}#34, long_noidx{r}#35, salary{r}#36]] * |_EsqlProject[[_meta_field{f}#10, emp_no{f}#4, first_name{f}#5, gender{f}#6, hire_date{f}#11, job{f}#12, job.raw{f}#13, * languages{f}#7, last_name{f}#8, long_noidx{f}#14, salary{f}#9]] * | \_Limit[1000[INTEGER],false,false] * | \_Filter[QSTR(first_name:first[KEYWORD]) AND KQL(last_name:last[KEYWORD])] * | \_EsRelation[test][_meta_field{f}#10, emp_no{f}#4, first_name{f}#5, ge..] * \_EsqlProject[[_meta_field{f}#21, emp_no{f}#15, first_name{f}#16, gender{f}#17, hire_date{f}#22, job{f}#23, job.raw{f}#24, * languages{f}#18, last_name{f}#19, long_noidx{f}#25, salary{f}#20]] * \_Subquery[] * \_Limit[1000[INTEGER],false,false] * \_Filter[languages{f}#18 > 0[INTEGER] AND QSTR(gender:female[KEYWORD]) AND * QSTR(first_name:first[KEYWORD]) AND KQL(last_name:last[KEYWORD])] * \_EsRelation[test][_meta_field{f}#21, emp_no{f}#15, first_name{f}#16, ..] */ public void testPushDownFullTextFunctionNoFieldRequiredPastUnionAll() { var plan = planSubquery(""" FROM test, (FROM test | WHERE languages > 0 AND qstr("gender:female")) | WHERE qstr("first_name:first") == true AND kql("last_name:last") == false """); Limit limit = as(plan, Limit.class); UnionAll unionAll = as(limit.child(), UnionAll.class); assertEquals(2, unionAll.children().size()); EsqlProject child1 = as(unionAll.children().get(0), EsqlProject.class); Limit childLimit = as(child1.child(), Limit.class); Filter childFilter = as(childLimit.child(), Filter.class); And and = as(childFilter.condition(), And.class); QueryString queryString = as(and.left(), QueryString.class); Literal queryStringLiteral = as(queryString.query(), Literal.class); assertEquals(new BytesRef("first_name:first"), queryStringLiteral.value()); Not not = as(and.right(), Not.class); Kql kql = as(not.negate(), Kql.class); Literal kqlLiteral = as(kql.query(), Literal.class); assertEquals(new BytesRef("last_name:last"), kqlLiteral.value()); EsRelation relation = as(childFilter.child(), EsRelation.class); assertEquals("test", relation.indexPattern()); EsqlProject child2 = as(unionAll.children().get(1), EsqlProject.class); Subquery subquery = as(child2.child(), Subquery.class); childLimit = as(subquery.child(), Limit.class); childFilter = as(childLimit.child(), Filter.class); and = as(childFilter.condition(), And.class); And subqueryAnd = as(and.left(), And.class); GreaterThan greaterThan = as(subqueryAnd.left(), GreaterThan.class); FieldAttribute languages = as(greaterThan.left(), FieldAttribute.class); assertEquals("languages", languages.name()); Literal right = as(greaterThan.right(), Literal.class); assertEquals(0, right.value()); queryString = as(subqueryAnd.right(), QueryString.class); queryStringLiteral = as(queryString.query(), Literal.class); assertEquals(new BytesRef("gender:female"), queryStringLiteral.value()); and = as(and.right(), And.class); queryString = as(and.left(), QueryString.class); queryStringLiteral = as(queryString.query(), Literal.class); assertEquals(new BytesRef("first_name:first"), queryStringLiteral.value()); not = as(and.right(), Not.class); Kql kqlFunction = as(not.negate(), Kql.class); kqlLiteral = as(kqlFunction.query(), Literal.class); assertEquals(new BytesRef("last_name:last"), kqlLiteral.value()); relation = as(childFilter.child(), EsRelation.class); assertEquals("test", relation.indexPattern()); } /* * Limit[1000[INTEGER],false,false] * \_UnionAll[[_meta_field{r}#28, emp_no{r}#29, first_name{r}#30, gender{r}#31, hire_date{r}#32, job{r}#33, job.raw{r}#34, * languages{r}#35, last_name{r}#36, long_noidx{r}#37, salary{r}#38]] * |_EsqlProject[[_meta_field{f}#12, emp_no{f}#6, first_name{f}#7, gender{f}#8, hire_date{f}#13, job{f}#14, job.raw{f}#15, * languages{f}#9, last_name{f}#10, long_noidx{f}#16, salary{f}#11]] * | \_Limit[1000[INTEGER],false,false] * | \_Filter[:(first_name{f}#7,first[KEYWORD]) AND MATCH(last_name{f}#10,last[KEYWORD]) AND QSTR(gender:female[KEYWORD])] * | \_EsRelation[test][_meta_field{f}#12, emp_no{f}#6, first_name{f}#7, ge..] * \_EsqlProject[[_meta_field{f}#23, emp_no{f}#17, first_name{f}#18, gender{f}#19, hire_date{f}#24, job{f}#25, job.raw{f}#26, * languages{f}#20, last_name{f}#21, long_noidx{f}#27, salary{f}#22]] * \_Subquery[] * \_Limit[1000[INTEGER],false,false] * \_Filter[languages{f}#20 > 0[INTEGER] AND :(first_name{f}#18,first[KEYWORD]) AND * MATCH(last_name{f}#21,last[KEYWORD]) AND QSTR(gender:female[KEYWORD])] * \_EsRelation[test][_meta_field{f}#23, emp_no{f}#17, first_name{f}#18, ..] */ public void testPushDownConjunctiveFullTextFunctionPastUnionAll() { var plan = planSubquery(""" FROM test, (FROM test | WHERE languages > 0) | WHERE first_name:"first" and match(last_name, "last") and qstr("gender:female") """); Limit limit = as(plan, Limit.class); UnionAll unionAll = as(limit.child(), UnionAll.class); assertEquals(2, unionAll.children().size()); EsqlProject child1 = as(unionAll.children().get(0), EsqlProject.class); Limit childLimit = as(child1.child(), Limit.class); Filter childFilter = as(childLimit.child(), Filter.class); And and = as(childFilter.condition(), And.class); QueryString queryString = as(and.right(), QueryString.class); Literal queryStringLiteral = as(queryString.query(), Literal.class); assertEquals(new BytesRef("gender:female"), queryStringLiteral.value()); and = as(and.left(), And.class); MatchOperator matchOperator = as(and.left(), MatchOperator.class); FieldAttribute first_name = as(matchOperator.field(), FieldAttribute.class); assertEquals("first_name", first_name.name()); Literal right = as(matchOperator.query(), Literal.class); assertEquals(new BytesRef("first"), right.value()); Match matchFunction = as(and.right(), Match.class); FieldAttribute last_name = as(matchFunction.field(), FieldAttribute.class); assertEquals("last_name", last_name.name()); right = as(matchFunction.query(), Literal.class); assertEquals(new BytesRef("last"), right.value()); EsRelation relation = as(childFilter.child(), EsRelation.class); assertEquals("test", relation.indexPattern()); EsqlProject child2 = as(unionAll.children().get(1), EsqlProject.class); Subquery subquery = as(child2.child(), Subquery.class); childLimit = as(subquery.child(), Limit.class); childFilter = as(childLimit.child(), Filter.class); and = as(childFilter.condition(), And.class); GreaterThan greaterThan = as(and.left(), GreaterThan.class); FieldAttribute languages = as(greaterThan.left(), FieldAttribute.class); assertEquals("languages", languages.name()); right = as(greaterThan.right(), Literal.class); assertEquals(0, right.value()); and = as(and.right(), And.class); queryString = as(and.right(), QueryString.class); queryStringLiteral = as(queryString.query(), Literal.class); assertEquals(new BytesRef("gender:female"), queryStringLiteral.value()); and = as(and.left(), And.class); matchOperator = as(and.left(), MatchOperator.class); first_name = as(matchOperator.field(), FieldAttribute.class); assertEquals("first_name", first_name.name()); right = as(matchOperator.query(), Literal.class); assertEquals(new BytesRef("first"), right.value()); matchFunction = as(and.right(), Match.class); last_name = as(matchFunction.field(), FieldAttribute.class); assertEquals("last_name", last_name.name()); right = as(matchFunction.query(), Literal.class); assertEquals(new BytesRef("last"), right.value()); relation = as(childFilter.child(), EsRelation.class); assertEquals("test", relation.indexPattern()); } /* * Limit[1000[INTEGER],false,false] * \_UnionAll[[_meta_field{r}#29, emp_no{r}#30, first_name{r}#31, gender{r}#32, hire_date{r}#33, job{r}#34, job.raw{r}#35, * languages{r}#36, last_name{r}#37, long_noidx{r}#38, salary{r}#39]] * |_EsqlProject[[_meta_field{f}#13, emp_no{f}#7, first_name{f}#8, gender{f}#9, hire_date{f}#14, job{f}#15, job.raw{f}#16, * languages{f}#10, last_name{f}#11, long_noidx{f}#17, salary{f}#12]] * | \_Limit[1000[INTEGER],false,false] * | \_Filter[:(first_name{f}#8,first[KEYWORD]) OR MatchPhrase(last_name{f}#11,last[KEYWORD]) OR KQL(gender:female[KEYWORD])] * | \_EsRelation[test][_meta_field{f}#13, emp_no{f}#7, first_name{f}#8, ge..] * \_EsqlProject[[_meta_field{f}#24, emp_no{f}#18, first_name{f}#19, gender{f}#20, hire_date{f}#25, job{f}#26, job.raw{f}#27, * languages{f}#21, last_name{f}#22, long_noidx{f}#28, salary{f}#23]] * \_Subquery[] * \_Limit[1000[INTEGER],false,false] * \_Filter[languages{f}#21 > 0[INTEGER] AND MATCH(gender{f}#20,F[KEYWORD]) AND :(first_name{f}#19,first[KEYWORD]) OR * MatchPhrase(last_name{f}#22,last[KEYWORD]) OR KQL(gender:female[KEYWORD])] * \_EsRelation[test][_meta_field{f}#24, emp_no{f}#18, first_name{f}#19, ..] */ public void testPushDownDisjunctiveFullTextFunctionPastUnionAll() { var plan = planSubquery(""" FROM test, (FROM test | WHERE languages > 0 and match(gender , "F")) | WHERE first_name:"first" or match_phrase(last_name, "last") or kql("gender:female") """); Limit limit = as(plan, Limit.class); UnionAll unionAll = as(limit.child(), UnionAll.class); assertEquals(2, unionAll.children().size()); EsqlProject child1 = as(unionAll.children().get(0), EsqlProject.class); Limit childLimit = as(child1.child(), Limit.class); Filter childFilter = as(childLimit.child(), Filter.class); Or or = as(childFilter.condition(), Or.class); Kql kql = as(or.right(), Kql.class); Literal kqlLiteral = as(kql.query(), Literal.class); assertEquals(new BytesRef("gender:female"), kqlLiteral.value()); or = as(or.left(), Or.class); MatchOperator matchOperator = as(or.left(), MatchOperator.class); FieldAttribute first_name = as(matchOperator.field(), FieldAttribute.class); assertEquals("first_name", first_name.name()); Literal right = as(matchOperator.query(), Literal.class); assertEquals(new BytesRef("first"), right.value()); MatchPhrase matchPhrase = as(or.right(), MatchPhrase.class); FieldAttribute last_name = as(matchPhrase.field(), FieldAttribute.class); assertEquals("last_name", last_name.name()); right = as(matchPhrase.query(), Literal.class); assertEquals(new BytesRef("last"), right.value()); EsRelation relation = as(childFilter.child(), EsRelation.class); assertEquals("test", relation.indexPattern()); EsqlProject child2 = as(unionAll.children().get(1), EsqlProject.class); Subquery subquery = as(child2.child(), Subquery.class); childLimit = as(subquery.child(), Limit.class); childFilter = as(childLimit.child(), Filter.class); And and = as(childFilter.condition(), And.class); or = as(and.right(), Or.class); kql = as(or.right(), Kql.class); kqlLiteral = as(kql.query(), Literal.class); assertEquals(new BytesRef("gender:female"), kqlLiteral.value()); or = as(or.left(), Or.class); matchOperator = as(or.left(), MatchOperator.class); first_name = as(matchOperator.field(), FieldAttribute.class); assertEquals("first_name", first_name.name()); right = as(matchOperator.query(), Literal.class); assertEquals(new BytesRef("first"), right.value()); matchPhrase = as(or.right(), MatchPhrase.class); last_name = as(matchPhrase.field(), FieldAttribute.class); assertEquals("last_name", last_name.name()); right = as(matchPhrase.query(), Literal.class); assertEquals(new BytesRef("last"), right.value()); and = as(and.left(), And.class); Match matchFunction = as(and.right(), Match.class); FieldAttribute gender = as(matchFunction.field(), FieldAttribute.class); assertEquals("gender", gender.name()); right = as(matchFunction.query(), Literal.class); assertEquals(new BytesRef("F"), right.value()); GreaterThan greaterThan = as(and.left(), GreaterThan.class); FieldAttribute languages = as(greaterThan.left(), FieldAttribute.class); assertEquals("languages", languages.name()); right = as(greaterThan.right(), Literal.class); assertEquals(0, right.value()); relation = as(childFilter.child(), EsRelation.class); assertEquals("test", relation.indexPattern()); } /* * If the field used in the full text function is not present in one of the indices in the UnionAll branches, * the full text function can be pushed down. */ public void testFullTextFunctionCanBePushedDownPastUnionAll() { var plan = planSubquery(""" FROM test, (FROM languages) | WHERE match(language_name, "text") """); // Limit[1000[INTEGER],false,false] Limit limit = as(plan, Limit.class); UnionAll unionAll = as(limit.child(), UnionAll.class); assertEquals(2, unionAll.children().size()); // First child: LocalRelation with EMPTY data since filter on language_name can't be applied to test index LocalRelation child1 = as(unionAll.children().get(0), LocalRelation.class); // Second child: languages subquery with MATCH filter pushed down EsqlProject child2 = as(unionAll.children().get(1), EsqlProject.class); Eval eval2 = as(child2.child(), Eval.class); List<Alias> aliases = eval2.fields(); assertEquals(11, aliases.size()); Subquery subquery = as(eval2.child(), Subquery.class); Limit childLimit = as(subquery.child(), Limit.class); Filter filter = as(childLimit.child(), Filter.class); Match match = as(filter.condition(), Match.class); FieldAttribute languageName = as(match.field(), FieldAttribute.class); assertEquals("language_name", languageName.name()); Literal queryLiteral = as(match.query(), Literal.class); assertEquals(new BytesRef("text"), queryLiteral.value()); EsRelation relation = as(filter.child(), EsRelation.class); assertEquals("languages", relation.indexPattern()); } }
PushDownFilterAndLimitIntoUnionAllTests
java
assertj__assertj-core
assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/recursive/comparison/legacy/RecursiveComparisonAssert_isEqualTo_ignoringFieldsOfTypesMatchingRegexes_Test.java
{ "start": 1663, "end": 5114 }
class ____ extends WithLegacyIntrospectionStrategyBaseTest { @ParameterizedTest(name = "{2}: actual={0} / expected={1} / ignored types={3}") @MethodSource void should_pass_when_fields_whose_types_match_given_regexes_are_ignored(Object actual, Object expected, List<String> regexes, @SuppressWarnings("unused") String testDescription) { assertThat(actual).usingRecursiveComparison(recursiveComparisonConfiguration) .ignoringFieldsOfTypesMatchingRegexes(arrayOf(regexes)) .isEqualTo(expected); } private static Stream<Arguments> should_pass_when_fields_whose_types_match_given_regexes_are_ignored() { Person person1 = new Person("John"); person1.home.address.number = 1; Person person2 = new Person("Jack"); person2.home.address.number = 1; Person person3 = new Person("John"); person3.dateOfBirth = new Date(123); Human person4 = new Human(); person4.name = "Jack"; person4.dateOfBirth = new Date(456); Person person5 = new Person(); person5.home.address.number = 1; Person person6 = new Person(); person6.home.address.number = 2; return Stream.of(arguments(person1, person2, list("java.*String"), "same data and type, except for String"), arguments(person3, person4, list(".*lang\\.String", "java\\.util\\.Date"), "same data, different type, except for String and Date"), arguments(person5, person6, list("org\\.assertj\\.tests\\.core\\.api\\.recursive\\.data.*"), "same data except for one an assertj internal type"), arguments(person5, person6, list(".*Integer"), "primitive types can only be ignored if specifying their corresponding wrapper types")); } @Test void should_fail_when_actual_differs_from_expected_even_when_some_fields_are_ignored_for_types() { // GIVEN Person actual = new Person("John"); actual.id = OptionalLong.of(123); actual.age = OptionalInt.of(30); actual.home.address.number = 1; actual.neighbour = new Person("Jack"); actual.neighbour.home.address.number = 123; actual.neighbour.neighbour = new Person("James"); actual.neighbour.neighbour.age = OptionalInt.of(40); Person expected = new Person("Jack"); expected.id = OptionalLong.of(456); expected.age = OptionalInt.of(50); expected.home.address.number = 2; expected.neighbour = new Person("Jim"); expected.neighbour.home.address.number = 456; expected.neighbour.neighbour = new Person("James"); expected.neighbour.neighbour.age = OptionalInt.of(60); recursiveComparisonConfiguration.ignoreFieldsOfTypesMatchingRegexes(".*lang\\.String", "org\\.assertj.*data\\.Address", "java\\.util\\.OptionalI.*"); // WHEN/THEN compareRecursivelyFailsWithDifferences(actual, expected, javaTypeDiff("id", actual.id, expected.id)); } static
RecursiveComparisonAssert_isEqualTo_ignoringFieldsOfTypesMatchingRegexes_Test
java
resilience4j__resilience4j
resilience4j-timelimiter/src/main/java/io/github/resilience4j/timelimiter/event/TimeLimiterOnTimeoutEvent.java
{ "start": 671, "end": 1058 }
class ____ extends AbstractTimeLimiterEvent { public TimeLimiterOnTimeoutEvent(String timeLimiterName) { super(timeLimiterName, Type.TIMEOUT); } @Override public String toString() { return String.format("%s: TimeLimiter '%s' recorded a timeout exception.", getCreationTime(), getTimeLimiterName()); } }
TimeLimiterOnTimeoutEvent
java
assertj__assertj-core
assertj-core/src/test/java/org/assertj/core/api/floatarray/FloatArrayAssert_hasSizeGreaterThan_Test.java
{ "start": 803, "end": 1154 }
class ____ extends FloatArrayAssertBaseTest { @Override protected FloatArrayAssert invoke_api_method() { return assertions.hasSizeGreaterThan(6); } @Override protected void verify_internal_effects() { verify(arrays).assertHasSizeGreaterThan(getInfo(assertions), getActual(assertions), 6); } }
FloatArrayAssert_hasSizeGreaterThan_Test
java
apache__camel
core/camel-management/src/main/java/org/apache/camel/management/ManagedLoadTimer.java
{ "start": 1220, "end": 1331 }
class ____ extends TimerListenerManager implements NonManagedService { // empty on purpose }
ManagedLoadTimer
java
apache__flink
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/factories/TestFileFactory.java
{ "start": 5897, "end": 6745 }
class ____ implements DynamicTableSink { private final Path path; private TestFileTableSink(Path path) { this.path = path; } @Override public ChangelogMode getChangelogMode(ChangelogMode requestedMode) { return requestedMode; } @Override public SinkRuntimeProvider getSinkRuntimeProvider(Context context) { final FileSink<RowData> fileSink = FileSink.forRowFormat(path, new RowDataEncoder()).build(); return SinkV2Provider.of(fileSink); } @Override public DynamicTableSink copy() { return new TestFileTableSink(path); } @Override public String asSummaryString() { return "test-file-sink"; } } private static
TestFileTableSink
java
apache__rocketmq
remoting/src/main/java/org/apache/rocketmq/remoting/protocol/header/AddBrokerRequestHeader.java
{ "start": 1380, "end": 1744 }
class ____ implements CommandCustomHeader { @CFNullable private String configPath; @Override public void checkFields() throws RemotingCommandException { } public String getConfigPath() { return configPath; } public void setConfigPath(String configPath) { this.configPath = configPath; } }
AddBrokerRequestHeader