language
stringclasses
1 value
repo
stringclasses
60 values
path
stringlengths
22
294
class_span
dict
source
stringlengths
13
1.16M
target
stringlengths
1
113
java
elastic__elasticsearch
x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetOverallBucketsAction.java
{ "start": 1199, "end": 3303 }
class ____ extends BaseRestHandler { @Override public List<Route> routes() { return List.of( new Route(GET, BASE_PATH + "anomaly_detectors/{" + ID + "}/results/overall_buckets"), new Route(POST, BASE_PATH + "anomaly_detectors/{" + ID + "}/results/overall_buckets") ); } @Override public String getName() { return "ml_get_overall_buckets_action"; } @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { String jobId = restRequest.param(Job.ID.getPreferredName()); final Request request; if (restRequest.hasContentOrSourceParam()) { XContentParser parser = restRequest.contentOrSourceParamParser(); request = Request.parseRequest(jobId, parser); } else { request = new Request(jobId); request.setTopN(restRequest.paramAsInt(Request.TOP_N.getPreferredName(), request.getTopN())); if (restRequest.hasParam(Request.BUCKET_SPAN.getPreferredName())) { request.setBucketSpan(restRequest.param(Request.BUCKET_SPAN.getPreferredName())); } request.setOverallScore(Double.parseDouble(restRequest.param(Request.OVERALL_SCORE.getPreferredName(), "0.0"))); request.setExcludeInterim(restRequest.paramAsBoolean(Request.EXCLUDE_INTERIM.getPreferredName(), request.isExcludeInterim())); if (restRequest.hasParam(Request.START.getPreferredName())) { request.setStart(restRequest.param(Request.START.getPreferredName())); } if (restRequest.hasParam(Request.END.getPreferredName())) { request.setEnd(restRequest.param(Request.END.getPreferredName())); } request.setAllowNoMatch(restRequest.paramAsBoolean(Request.ALLOW_NO_MATCH.getPreferredName(), request.allowNoMatch())); } return channel -> client.execute(GetOverallBucketsAction.INSTANCE, request, new RestToXContentListener<>(channel)); } }
RestGetOverallBucketsAction
java
square__retrofit
retrofit/java-test/src/test/java/retrofit2/RetrofitTest.java
{ "start": 3311, "end": 3372 }
interface ____ extends TypeParam<String> {}
ExtendingTypeParam
java
apache__dubbo
dubbo-remoting/dubbo-remoting-http12/src/main/java/org/apache/dubbo/remoting/http12/rest/OpenAPI.java
{ "start": 1285, "end": 1421 }
interface ____ { * ... * } * </pre> */ @Target(ElementType.TYPE) @Retention(RetentionPolicy.RUNTIME) @Documented public @
UserService
java
quarkusio__quarkus
independent-projects/resteasy-reactive/server/runtime/src/main/java/org/jboss/resteasy/reactive/server/spi/ResteasyReactiveExceptionMapper.java
{ "start": 135, "end": 391 }
interface ____<E extends Throwable> extends ExceptionMapper<E> { /** * Convenience method that allows for easy access to the request context */ Response toResponse(E exception, ServerRequestContext context); }
ResteasyReactiveExceptionMapper
java
junit-team__junit5
jupiter-tests/src/test/java/org/junit/jupiter/engine/extension/TestInstanceFactoryTests.java
{ "start": 20117, "end": 20287 }
class ____ { @Test void testShouldNotBeCalled() { callSequence.add("testShouldNotBeCalled"); } } @TestInstance(PER_CLASS) static
NullTestInstanceFactoryTestCase
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java
{ "start": 2870, "end": 3296 }
class ____ focus is to resolve multi-syntax target expressions to resources or concrete indices. This resolution is influenced * by IndicesOptions and other flags passed through the method call. Examples of the functionality it provides: * - Resolve expressions to concrete indices * - Resolve expressions to data stream names * - Resolve expressions to resources (meaning indices, data streams and aliases) * Note: This
main
java
apache__camel
components/camel-box/camel-box-component/src/generated/java/org/apache/camel/component/box/BoxEventLogsManagerEndpointConfigurationConfigurer.java
{ "start": 736, "end": 10490 }
class ____ extends org.apache.camel.support.component.PropertyConfigurerSupport implements GeneratedPropertyConfigurer, ExtendedPropertyConfigurerGetter { private static final Map<String, Object> ALL_OPTIONS; static { Map<String, Object> map = new CaseInsensitiveMap(); map.put("AccessTokenCache", com.box.sdk.IAccessTokenCache.class); map.put("After", java.util.Date.class); map.put("ApiName", org.apache.camel.component.box.internal.BoxApiName.class); map.put("AuthenticationType", java.lang.String.class); map.put("Before", java.util.Date.class); map.put("ClientId", java.lang.String.class); map.put("ClientSecret", java.lang.String.class); map.put("EncryptionAlgorithm", com.box.sdk.EncryptionAlgorithm.class); map.put("EnterpriseId", java.lang.String.class); map.put("HttpParams", java.util.Map.class); map.put("MaxCacheEntries", int.class); map.put("MethodName", java.lang.String.class); map.put("Position", java.lang.String.class); map.put("PrivateKeyFile", java.lang.String.class); map.put("PrivateKeyPassword", java.lang.String.class); map.put("PublicKeyId", java.lang.String.class); map.put("SslContextParameters", org.apache.camel.support.jsse.SSLContextParameters.class); map.put("Types", com.box.sdk.BoxEvent.EventType[].class); map.put("UserId", java.lang.String.class); map.put("UserName", java.lang.String.class); map.put("UserPassword", java.lang.String.class); ALL_OPTIONS = map; } @Override public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) { org.apache.camel.component.box.BoxEventLogsManagerEndpointConfiguration target = (org.apache.camel.component.box.BoxEventLogsManagerEndpointConfiguration) obj; switch (ignoreCase ? name.toLowerCase() : name) { case "accesstokencache": case "accessTokenCache": target.setAccessTokenCache(property(camelContext, com.box.sdk.IAccessTokenCache.class, value)); return true; case "after": target.setAfter(property(camelContext, java.util.Date.class, value)); return true; case "apiname": case "apiName": target.setApiName(property(camelContext, org.apache.camel.component.box.internal.BoxApiName.class, value)); return true; case "authenticationtype": case "authenticationType": target.setAuthenticationType(property(camelContext, java.lang.String.class, value)); return true; case "before": target.setBefore(property(camelContext, java.util.Date.class, value)); return true; case "clientid": case "clientId": target.setClientId(property(camelContext, java.lang.String.class, value)); return true; case "clientsecret": case "clientSecret": target.setClientSecret(property(camelContext, java.lang.String.class, value)); return true; case "encryptionalgorithm": case "encryptionAlgorithm": target.setEncryptionAlgorithm(property(camelContext, com.box.sdk.EncryptionAlgorithm.class, value)); return true; case "enterpriseid": case "enterpriseId": target.setEnterpriseId(property(camelContext, java.lang.String.class, value)); return true; case "httpparams": case "httpParams": target.setHttpParams(property(camelContext, java.util.Map.class, value)); return true; case "maxcacheentries": case "maxCacheEntries": target.setMaxCacheEntries(property(camelContext, int.class, value)); return true; case "methodname": case "methodName": target.setMethodName(property(camelContext, java.lang.String.class, value)); return true; case "position": target.setPosition(property(camelContext, java.lang.String.class, value)); return true; case "privatekeyfile": case "privateKeyFile": target.setPrivateKeyFile(property(camelContext, java.lang.String.class, value)); return true; case "privatekeypassword": case "privateKeyPassword": target.setPrivateKeyPassword(property(camelContext, java.lang.String.class, value)); return true; case "publickeyid": case "publicKeyId": target.setPublicKeyId(property(camelContext, java.lang.String.class, value)); return true; case "sslcontextparameters": case "sslContextParameters": target.setSslContextParameters(property(camelContext, org.apache.camel.support.jsse.SSLContextParameters.class, value)); return true; case "types": target.setTypes(property(camelContext, com.box.sdk.BoxEvent.EventType[].class, value)); return true; case "userid": case "userId": target.setUserId(property(camelContext, java.lang.String.class, value)); return true; case "username": case "userName": target.setUserName(property(camelContext, java.lang.String.class, value)); return true; case "userpassword": case "userPassword": target.setUserPassword(property(camelContext, java.lang.String.class, value)); return true; default: return false; } } @Override public Map<String, Object> getAllOptions(Object target) { return ALL_OPTIONS; } @Override public Class<?> getOptionType(String name, boolean ignoreCase) { switch (ignoreCase ? name.toLowerCase() : name) { case "accesstokencache": case "accessTokenCache": return com.box.sdk.IAccessTokenCache.class; case "after": return java.util.Date.class; case "apiname": case "apiName": return org.apache.camel.component.box.internal.BoxApiName.class; case "authenticationtype": case "authenticationType": return java.lang.String.class; case "before": return java.util.Date.class; case "clientid": case "clientId": return java.lang.String.class; case "clientsecret": case "clientSecret": return java.lang.String.class; case "encryptionalgorithm": case "encryptionAlgorithm": return com.box.sdk.EncryptionAlgorithm.class; case "enterpriseid": case "enterpriseId": return java.lang.String.class; case "httpparams": case "httpParams": return java.util.Map.class; case "maxcacheentries": case "maxCacheEntries": return int.class; case "methodname": case "methodName": return java.lang.String.class; case "position": return java.lang.String.class; case "privatekeyfile": case "privateKeyFile": return java.lang.String.class; case "privatekeypassword": case "privateKeyPassword": return java.lang.String.class; case "publickeyid": case "publicKeyId": return java.lang.String.class; case "sslcontextparameters": case "sslContextParameters": return org.apache.camel.support.jsse.SSLContextParameters.class; case "types": return com.box.sdk.BoxEvent.EventType[].class; case "userid": case "userId": return java.lang.String.class; case "username": case "userName": return java.lang.String.class; case "userpassword": case "userPassword": return java.lang.String.class; default: return null; } } @Override public Object getOptionValue(Object obj, String name, boolean ignoreCase) { org.apache.camel.component.box.BoxEventLogsManagerEndpointConfiguration target = (org.apache.camel.component.box.BoxEventLogsManagerEndpointConfiguration) obj; switch (ignoreCase ? name.toLowerCase() : name) { case "accesstokencache": case "accessTokenCache": return target.getAccessTokenCache(); case "after": return target.getAfter(); case "apiname": case "apiName": return target.getApiName(); case "authenticationtype": case "authenticationType": return target.getAuthenticationType(); case "before": return target.getBefore(); case "clientid": case "clientId": return target.getClientId(); case "clientsecret": case "clientSecret": return target.getClientSecret(); case "encryptionalgorithm": case "encryptionAlgorithm": return target.getEncryptionAlgorithm(); case "enterpriseid": case "enterpriseId": return target.getEnterpriseId(); case "httpparams": case "httpParams": return target.getHttpParams(); case "maxcacheentries": case "maxCacheEntries": return target.getMaxCacheEntries(); case "methodname": case "methodName": return target.getMethodName(); case "position": return target.getPosition(); case "privatekeyfile": case "privateKeyFile": return target.getPrivateKeyFile(); case "privatekeypassword": case "privateKeyPassword": return target.getPrivateKeyPassword(); case "publickeyid": case "publicKeyId": return target.getPublicKeyId(); case "sslcontextparameters": case "sslContextParameters": return target.getSslContextParameters(); case "types": return target.getTypes(); case "userid": case "userId": return target.getUserId(); case "username": case "userName": return target.getUserName(); case "userpassword": case "userPassword": return target.getUserPassword(); default: return null; } } @Override public Object getCollectionValueType(Object target, String name, boolean ignoreCase) { switch (ignoreCase ? name.toLowerCase() : name) { case "httpparams": case "httpParams": return java.lang.Object.class; default: return null; } } }
BoxEventLogsManagerEndpointConfigurationConfigurer
java
google__auto
value/src/test/java/com/google/auto/value/extension/serializable/serializer/utils/TestStringSerializerFactory.java
{ "start": 2207, "end": 2718 }
class ____ implements Serializer { private final TypeMirror typeMirror; TestStringSerializer(TypeMirror typeMirror) { this.typeMirror = typeMirror; } @Override public TypeMirror proxyFieldType() { return typeMirror; } @Override public CodeBlock toProxy(CodeBlock expression) { return CodeBlock.of("$S", "test"); } @Override public CodeBlock fromProxy(CodeBlock expression) { return CodeBlock.of("$S", "test"); } } }
TestStringSerializer
java
apache__camel
core/camel-core/src/test/java/org/apache/camel/processor/aggregator/SplitParallelProcessingStackOverflowIssueTest.java
{ "start": 1091, "end": 2042 }
class ____ extends ContextTestSupport { @Test public void testStackoverflow() throws Exception { int size = 50000; MockEndpoint result = getMockEndpoint("mock:result"); result.expectedMessageCount(size); StringBuilder sb = new StringBuilder(); for (int i = 0; i < size; i++) { sb.append("Line #").append(i); sb.append("\n"); } template.sendBody("direct:start", sb); MockEndpoint.assertIsSatisfied(60, SECONDS, result); } @Override protected RouteBuilder createRouteBuilder() { return new RouteBuilder() { @Override public void configure() { from("direct:start") .split().tokenize("\n").streaming().parallelProcessing() .to("log:result?groupSize=100", "mock:result"); } }; } }
SplitParallelProcessingStackOverflowIssueTest
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/fetchmode/toone/ManyToOneWithCircularityTest.java
{ "start": 1021, "end": 2912 }
class ____ { @BeforeAll public void setUp(SessionFactoryScope scope) { scope.inTransaction( session -> { Connector connector = new Connector( 1L, "connector" ); Sub sub = new Sub( 2L, "sub", connector ); Main main = new Main( 3L, "main", connector, sub ); session.persist( sub ); session.persist( main ); session.persist( connector ); } ); } @Test public void testQuery(SessionFactoryScope scope) { scope.inTransaction( session -> { List<Main> result = session.createQuery( "select m from Main m", Main.class ).getResultList(); assertThat( result.size() ).isEqualTo( 1 ); Main main = result.get( 0 ); Connector connector = main.getConnector(); assertThat( Hibernate.isInitialized( connector ) ).isTrue(); assertThat( connector ).isNotNull(); Sub sub = main.getSub(); assertThat( sub ).isNotNull(); assertThat( Hibernate.isInitialized( sub ) ).isTrue(); assertThat( connector.getSub() ).isSameAs( sub ); assertThat( sub.getConnector() ).isSameAs( connector ); } ); } @Test public void testQuery2(SessionFactoryScope scope) { scope.inTransaction( session -> { List<Main> result = session.createQuery( "select m from Main m where m.connector.id = 1", Main.class ).getResultList(); assertThat( result.size() ).isEqualTo( 1 ); Main main = result.get( 0 ); Connector connector = main.getConnector(); assertThat( Hibernate.isInitialized( connector ) ).isTrue(); assertThat( connector ).isNotNull(); Sub sub = main.getSub(); assertThat( sub ).isNotNull(); assertThat( Hibernate.isInitialized( sub ) ).isTrue(); assertThat( connector.getSub() ).isSameAs( sub ); assertThat( sub.getConnector() ).isSameAs( connector ); } ); } @Entity(name = "Main") public static
ManyToOneWithCircularityTest
java
grpc__grpc-java
api/src/main/java/io/grpc/ServiceDescriptor.java
{ "start": 4201, "end": 6783 }
class ____ { private Builder(String name) { setName(name); } private String name; private List<MethodDescriptor<?, ?>> methods = new ArrayList<>(); private Object schemaDescriptor; /** * Sets the name. This should be non-{@code null}. * * @param name The name of the service. * @return this builder. * @since 1.1.0 */ @ExperimentalApi("https://github.com/grpc/grpc-java/issues/2666") public Builder setName(String name) { this.name = checkNotNull(name, "name"); return this; } /** * Adds a method to this service. This should be non-{@code null}. * * @param method the method to add to the descriptor. * @return this builder. * @since 1.1.0 */ public Builder addMethod(MethodDescriptor<?, ?> method) { methods.add(checkNotNull(method, "method")); return this; } /** * Currently not exposed. Bulk adds methods to this builder. * * @param methods the methods to add. * @return this builder. */ private Builder addAllMethods(Collection<MethodDescriptor<?, ?>> methods) { this.methods.addAll(methods); return this; } /** * Sets the schema descriptor for this builder. A schema descriptor is an object that is not * used by gRPC core but includes information related to the service. The type of the object * is specific to the consumer, so both the code calling this and the code calling * {@link ServiceDescriptor#getSchemaDescriptor()} must coordinate. For example, protobuf * generated code sets this value, in order to be consumed by the server reflection service. * * @param schemaDescriptor an object that describes the service structure. Should be immutable. * @return this builder. * @since 1.1.0 */ public Builder setSchemaDescriptor(@Nullable Object schemaDescriptor) { this.schemaDescriptor = schemaDescriptor; return this; } /** * Constructs a new {@link ServiceDescriptor}. {@link #setName} should have been called with a * non-{@code null} value before calling this. * * @return a new ServiceDescriptor * @since 1.1.0 */ public ServiceDescriptor build() { return new ServiceDescriptor(this); } } @Override public String toString() { return MoreObjects.toStringHelper(this) .add("name", name) .add("schemaDescriptor", schemaDescriptor) .add("methods", methods) .omitNullValues() .toString(); } }
Builder
java
elastic__elasticsearch
x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcException.java
{ "start": 296, "end": 503 }
class ____ extends RuntimeException { JdbcException(String message) { super(message); } JdbcException(Throwable cause, String message) { super(message, cause); } }
JdbcException
java
google__auto
value/src/main/java/com/google/auto/value/processor/BuilderRequiredProperties.java
{ "start": 2691, "end": 5816 }
class ____ { static final BuilderRequiredProperties EMPTY = of(ImmutableSet.of(), ImmutableSet.of()); // Bitmasks are a bit fiddly because we use them in a couple of ways. The first way is where // we are just using the bitmasks to track which primitive properties have been set. Then if // we have three primitive properties we can just check that the bitmask is (1 << 3) - 1, the // all-ones bitmask, to see that they have all been set. The second way is when we are also // handling optional Kotlin parameters. Then the bitmasks are different: we have one bit for every // property, primitive or not, optional or not. To check that the required primitive properties // have been set, we need to check specific bits. For example if properties 1 and 3 are primitive // then we need to check (~set$0 & ((1 << 1) | (1 << 3))) == 0. That tests that bits 1 and 3 are // set, since if either of them is 0 then it will be 1 in ~set$0 and will survive the AND. We can // also isolate the bits representing optional Kotlin parameters similarly, and pass those to the // special Kotlin constructor that handles default parameters. Kotlin uses bitmasks for that too: // they have one bit per parameter, optional or not, but only the bits for optional parameters // matter. We isolate those bits with `&` operations similar to what was described for primitive // properties. We also need the all-ones bitmask to implement a "copy constructor" builder, which // starts out with all properties set. /** All required properties. */ final ImmutableSet<Property> requiredProperties; /** * The bit index for each tracked property. Properties are tracked if they are primitive, or if * this is a Kotlin constructor with default parameters. Non-tracked properties do not appear in * this map. */ final ImmutableMap<Property, Integer> trackedPropertyToIndex; /** * The integer fields that store the bitmask. In the usual case, where there are ≤32 tracked * properties, we can pack the bitmask into one integer field. Its type is the smallest one that * fits the required number of bits, for example {@code byte} if there are ≤8 tracked properties. * * <p>If there are {@literal >32} tracked properties, we will pack them into as few integer fields * as possible. For example if there are 75 tracked properties (this can happen) then we will put * numbers 0 to 31 in an {@code int}, 32 to 63 in a second {@code int}, and 64 to 75 in a {@code * short}. * * <p>When there are {@literal >32} tracked properties, we could potentially pack them better if * we used {@code long}. But sometimes AutoValue code gets translated into JavaScript, which * doesn't handle long values natively. By the time you have that many properties you are probably * not going to notice the difference between 5 ints or 2 longs plus an int. */ final ImmutableList<BitmaskField> bitmaskFields; /** * Represents a field in which we will record which tracked properties from a certain set have * been given a value. */ private static
BuilderRequiredProperties
java
apache__camel
components/camel-sjms/src/test/java/org/apache/camel/component/sjms/consumer/AutowiredConnectionFactoryTest.java
{ "start": 1097, "end": 2388 }
class ____ extends JmsTestSupport { private static final String SJMS_QUEUE_NAME = "sjms:queue:in.only.consumer.queue.AutowiredConnectionFactoryTest"; private static final String MOCK_RESULT = "mock:result"; @Override protected CamelContext createCamelContext() throws Exception { // do not automatic add sjms component as it will be manual configured with CF addSjmsComponent = false; CamelContext context = super.createCamelContext(); // lets autowire the connection factory so we move it to the registry context.getRegistry().bind("myCF", connectionFactory); return context; } @Test public void testAutowired() throws Exception { final String expectedBody = "Hello World"; MockEndpoint mock = getMockEndpoint(MOCK_RESULT); mock.expectedMessageCount(1); mock.expectedBodiesReceived(expectedBody); template.sendBody(SJMS_QUEUE_NAME, expectedBody); mock.assertIsSatisfied(); } @Override protected RouteBuilder createRouteBuilder() { return new RouteBuilder() { public void configure() { from(SJMS_QUEUE_NAME) .to(MOCK_RESULT); } }; } }
AutowiredConnectionFactoryTest
java
apache__hadoop
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/service/launcher/TestServiceInterruptHandling.java
{ "start": 4077, "end": 4346 }
class ____ implements IrqHandler.Interrupted { public IrqHandler.InterruptData interruptData; @Override public void interrupted(IrqHandler.InterruptData data) { LOG.info("Interrupt caught"); this.interruptData = data; } } }
InterruptCatcher
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/serialization/entity/BuildRecord.java
{ "start": 238, "end": 538 }
class ____ { @EmbeddedId private BuildRecordId id; private String name; public BuildRecordId getId() { return id; } public void setId(BuildRecordId id) { this.id = id; } public String getName() { return name; } public void setName(String name) { this.name = name; } }
BuildRecord
java
apache__spark
sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/TableProvider.java
{ "start": 1491, "end": 1594 }
interface ____ to return a {@link Table} for read/write. * </p> * * @since 3.0.0 */ @Evolving public
is
java
apache__flink
flink-connectors/flink-connector-files/src/main/java/org/apache/flink/connector/file/src/util/RecyclableIterator.java
{ "start": 1021, "end": 1162 }
class ____ iterators that accept a recycler. * * @param <E> The type of the records returned by the iterator. */ @Internal public abstract
for
java
lettuce-io__lettuce-core
src/main/java/io/lettuce/core/dynamic/support/ReflectionUtils.java
{ "start": 2134, "end": 7250 }
class ____ introspect * @param name the name of the method * @param paramTypes the parameter types of the method (may be {@code null} to indicate any signature) * @return the Method object, or {@code null} if none found */ public static Method findMethod(Class<?> clazz, String name, Class<?>... paramTypes) { LettuceAssert.notNull(clazz, "Class must not be null"); LettuceAssert.notNull(name, "Method name must not be null"); Class<?> searchType = clazz; while (searchType != null) { Method[] methods = (searchType.isInterface() ? searchType.getMethods() : getDeclaredMethods(searchType)); for (Method method : methods) { if (name.equals(method.getName()) && (paramTypes == null || Arrays.equals(paramTypes, method.getParameterTypes()))) { return method; } } searchType = searchType.getSuperclass(); } return null; } /** * Invoke the specified {@link Method} against the supplied target object with no arguments. The target object can be * {@code null} when invoking a static {@link Method}. * <p> * Thrown exceptions are handled via a call to {@link #handleReflectionException}. * * @param method the method to invoke * @param target the target object to invoke the method on * @return the invocation result, if any * @see #invokeMethod(java.lang.reflect.Method, Object, Object[]) */ public static Object invokeMethod(Method method, Object target) { return invokeMethod(method, target, new Object[0]); } /** * Invoke the specified {@link Method} against the supplied target object with the supplied arguments. The target object can * be {@code null} when invoking a static {@link Method}. * <p> * Thrown exceptions are handled via a call to {@link #handleReflectionException}. * * @param method the method to invoke * @param target the target object to invoke the method on * @param args the invocation arguments (may be {@code null}) * @return the invocation result, if any */ public static Object invokeMethod(Method method, Object target, Object... args) { try { return method.invoke(target, args); } catch (Exception ex) { handleReflectionException(ex); } throw new IllegalStateException("Should never get here"); } /** * Handle the given reflection exception. Should only be called if no checked exception is expected to be thrown by the * target method. * <p> * Throws the underlying RuntimeException or Error in case of an InvocationTargetException with such a root cause. Throws an * IllegalStateException with an appropriate message or UndeclaredThrowableException otherwise. * * @param ex the reflection exception to handle */ public static void handleReflectionException(Exception ex) { if (ex instanceof NoSuchMethodException) { throw new IllegalStateException("Method not found: " + ex.getMessage()); } if (ex instanceof IllegalAccessException) { throw new IllegalStateException("Could not access method: " + ex.getMessage()); } if (ex instanceof InvocationTargetException) { handleInvocationTargetException((InvocationTargetException) ex); } if (ex instanceof RuntimeException) { throw (RuntimeException) ex; } throw new UndeclaredThrowableException(ex); } /** * Handle the given invocation target exception. Should only be called if no checked exception is expected to be thrown by * the target method. * <p> * Throws the underlying RuntimeException or Error in case of such a root cause. Throws an UndeclaredThrowableException * otherwise. * * @param ex the invocation target exception to handle */ public static void handleInvocationTargetException(InvocationTargetException ex) { rethrowRuntimeException(ex.getTargetException()); } /** * Rethrow the given {@link Throwable exception}, which is presumably the <em>target exception</em> of an * {@link InvocationTargetException}. Should only be called if no checked exception is expected to be thrown by the target * method. * <p> * Rethrows the underlying exception cast to a {@link RuntimeException} or {@link Error} if appropriate; otherwise, throws * an {@link UndeclaredThrowableException}. * * @param ex the exception to rethrow * @throws RuntimeException the rethrown exception */ public static void rethrowRuntimeException(Throwable ex) { if (ex instanceof RuntimeException) { throw (RuntimeException) ex; } if (ex instanceof Error) { throw (Error) ex; } throw new UndeclaredThrowableException(ex); } /** * Perform the given callback operation on all matching methods of the given
to
java
elastic__elasticsearch
x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/PivotTests.java
{ "start": 25522, "end": 25733 }
class ____ extends SpatialPlugin { @Override protected XPackLicenseState getLicenseState() { return new XPackLicenseState(System::currentTimeMillis); } } }
TestSpatialPlugin
java
apache__hadoop
hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/protoc/ProtocTestMojo.java
{ "start": 1204, "end": 2048 }
class ____ extends AbstractMojo { @Parameter(defaultValue="${project}", readonly=true) private MavenProject project; @Parameter private File[] imports; @Parameter(defaultValue= "${project.build.directory}/generated-test-sources/java") private File output; @Parameter(required=true) private FileSet source; @Parameter(defaultValue="protoc") private String protocCommand; @Parameter(required=true) private String protocVersion; @Parameter(defaultValue = "${project.build.directory}/hadoop-maven-plugins-protoc-checksums.json") private String checksumPath; public void execute() throws MojoExecutionException { final ProtocRunner protoc = new ProtocRunner(project, imports, output, source, protocCommand, protocVersion, checksumPath, this, true); protoc.execute(); } }
ProtocTestMojo
java
alibaba__nacos
common/src/main/java/com/alibaba/nacos/common/remote/client/grpc/GrpcConstants.java
{ "start": 3443, "end": 4169 }
interface ____ { } static { Class clazz = GrpcConstants.class; Field[] declaredFields = clazz.getDeclaredFields(); for (Field declaredField : declaredFields) { declaredField.setAccessible(true); if (declaredField.getType().equals(String.class) && null != declaredField.getAnnotation( GRpcConfigLabel.class)) { try { CONFIG_NAMES.add((String) declaredField.get(null)); } catch (IllegalAccessException ignored) { } } } } public static Set<String> getRpcParams() { return Collections.unmodifiableSet(CONFIG_NAMES); } }
GRpcConfigLabel
java
quarkusio__quarkus
independent-projects/tools/analytics-common/src/main/java/io/quarkus/analytics/dto/config/AnalyticsRemoteConfig.java
{ "start": 156, "end": 927 }
interface ____ { /** * @return true if the analytics is enabled * @return */ boolean isActive(); /** * List of anonymous UUID representing the users who will not send analytics. * The data from particular UUIDs might contain issues and generation will be disabled at the source. * * @return */ List<String> getDenyAnonymousIds(); /** * List of quarkus versions that will not send analytics. * The data from particular versions might contain issues and generation will be disabled at the source. * * @return */ List<String> getDenyQuarkusVersions(); /** * Configuration refresh interval * * @return */ Duration getRefreshInterval(); }
AnalyticsRemoteConfig
java
apache__flink
flink-runtime/src/main/java/org/apache/flink/runtime/io/compression/BlockCompressor.java
{ "start": 1054, "end": 2539 }
interface ____ { /** Get the max compressed size for a given original size. */ int getMaxCompressedSize(int srcSize); /** * Compress source data read from ({@link ByteBuffer#position()} + {@code srcOff}), and write * the compressed data to dst. * * @param src Uncompressed data to read from * @param srcOff The start offset of uncompressed data * @param srcLen The length of data which want to be compressed * @param dst The target to write compressed data * @param dstOff The start offset to write the compressed data * @return Length of compressed data * @throws BufferCompressionException if exception thrown when compressing */ int compress(ByteBuffer src, int srcOff, int srcLen, ByteBuffer dst, int dstOff) throws BufferCompressionException; /** * Compress data read from src, and write the compressed data to dst. * * @param src Uncompressed data to read from * @param srcOff The start offset of uncompressed data * @param srcLen The length of data which want to be compressed * @param dst The target to write compressed data * @param dstOff The start offset to write the compressed data * @return Length of compressed data * @throws BufferCompressionException if exception thrown when compressing */ int compress(byte[] src, int srcOff, int srcLen, byte[] dst, int dstOff) throws BufferCompressionException; }
BlockCompressor
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/script/Metadata.java
{ "start": 1377, "end": 1410 }
interface ____ setters. */ public
or
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/ObjectDeletedException.java
{ "start": 235, "end": 692 }
class ____ extends UnresolvableObjectException { /** * Constructs an {@code ObjectDeletedException} using the given information. * * @param message A message explaining the exception condition * @param identifier The identifier of the entity * @param entityName The name of the entity */ public ObjectDeletedException(String message, Object identifier, String entityName) { super( message, identifier, entityName ); } }
ObjectDeletedException
java
elastic__elasticsearch
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PruneUnusedIndexMode.java
{ "start": 791, "end": 1317 }
class ____ extends OptimizerRules.OptimizerRule<EsRelation> { public PruneUnusedIndexMode() { super(OptimizerRules.TransformDirection.UP); } @Override protected LogicalPlan rule(EsRelation r) { if (r.indexMode() == IndexMode.TIME_SERIES) { if (Expressions.anyMatch(r.output(), a -> MetadataAttribute.TSID_FIELD.equals(((Attribute) a).name())) == false) { return r.withIndexMode(IndexMode.STANDARD); } } return r; } }
PruneUnusedIndexMode
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/internal/OptimisticLockHelper.java
{ "start": 733, "end": 4323 }
class ____ { private OptimisticLockHelper() { //utility class, not to be constructed } public static void forceVersionIncrement(Object object, EntityEntry entry, SharedSessionContractImplementor session) { final var persister = entry.getPersister(); final Object previousVersion = entry.getVersion(); SoftLock lock = null; final Object cacheKey; if ( persister.canWriteToCache() ) { final var cache = persister.getCacheAccessStrategy(); cacheKey = cache.generateCacheKey( entry.getId(), persister, session.getFactory(), session.getTenantIdentifier() ); lock = cache.lockItem( session, cacheKey, previousVersion ); } else { cacheKey = null; } final Object nextVersion = persister.forceVersionIncrement( entry.getId(), previousVersion, session ); entry.forceLocked( object, nextVersion ); if ( persister.canWriteToCache() ) { final Object cacheEntry = updateCacheItem( object, previousVersion, nextVersion, cacheKey, entry, persister, session ); session.getTransactionCompletionCallbacks() .registerCallback( new CacheCleanupProcess( cacheKey, persister, previousVersion, nextVersion, lock, cacheEntry ) ); } } private static Object updateCacheItem( Object entity, Object previousVersion, Object nextVersion, Object cacheKey, EntityEntry entry, EntityPersister persister, SharedSessionContractImplementor session) { if ( isCacheInvalidationRequired( persister, session ) || entry.getStatus() != Status.MANAGED ) { persister.getCacheAccessStrategy().remove( session, cacheKey ); } else if ( session.getCacheMode().isPutEnabled() ) { //TODO: inefficient if that cache is just going to ignore the updated state! final Object cacheEntry = buildStructuredCacheEntry( entity, nextVersion, entry.getLoadedState(), persister, session ); final boolean put = updateCache( persister, cacheEntry, previousVersion, nextVersion, cacheKey, session ); final var statistics = session.getFactory().getStatistics(); if ( put && statistics.isStatisticsEnabled() ) { statistics.entityCachePut( getRootEntityRole( persister ), persister.getCacheAccessStrategy().getRegion().getName() ); } return cacheEntry; } return null; } private static boolean updateCache( EntityPersister persister, Object cacheEntry, Object previousVersion, Object nextVersion, Object cacheKey, SharedSessionContractImplementor session) { final var eventMonitor = session.getEventMonitor(); final var cachePutEvent = eventMonitor.beginCachePutEvent(); final var cacheAccessStrategy = persister.getCacheAccessStrategy(); final var eventListenerManager = session.getEventListenerManager(); boolean update = false; try { eventListenerManager.cachePutStart(); update = cacheAccessStrategy.update( session, cacheKey, cacheEntry, nextVersion, previousVersion ); return update; } finally { eventMonitor.completeCachePutEvent( cachePutEvent, session, cacheAccessStrategy, persister, update, EventMonitor.CacheActionDescription.ENTITY_UPDATE ); eventListenerManager.cachePutEnd(); } } private static boolean isCacheInvalidationRequired( EntityPersister persister, SharedSessionContractImplementor session) { return persister.isCacheInvalidationRequired() // the cache has to be invalidated when CacheMode is GET or IGNORE || !session.getCacheMode().isPutEnabled(); } private static
OptimisticLockHelper
java
spring-projects__spring-boot
module/spring-boot-actuator/src/test/java/org/springframework/boot/actuate/endpoint/annotation/DiscoveredOperationsFactoryTests.java
{ "start": 5840, "end": 5996 }
class ____ { @ReadOperation String read() { return "read"; } @WriteOperation String write() { return "write"; } } static
ExampleMultiple
java
apache__hadoop
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java
{ "start": 2684, "end": 31512 }
class ____ implements INodeAttributes, Diff.Element<byte[]> { public static final Logger LOG = LoggerFactory.getLogger(INode.class); /** parent is either an {@link INodeDirectory} or an {@link INodeReference}.*/ private INode parent = null; INode(INode parent) { this.parent = parent; } /** Get inode id */ public abstract long getId(); /** * Check whether this is the root inode. */ final boolean isRoot() { return getLocalNameBytes().length == 0; } /** Get the {@link PermissionStatus} */ public abstract PermissionStatus getPermissionStatus(int snapshotId); /** The same as getPermissionStatus(null). */ final PermissionStatus getPermissionStatus() { return getPermissionStatus(Snapshot.CURRENT_STATE_ID); } /** * @param snapshotId * if it is not {@link Snapshot#CURRENT_STATE_ID}, get the result * from the given snapshot; otherwise, get the result from the * current inode. * @return user name */ abstract String getUserName(int snapshotId); /** The same as getUserName(Snapshot.CURRENT_STATE_ID). */ @Override public final String getUserName() { return getUserName(Snapshot.CURRENT_STATE_ID); } /** Set user */ abstract void setUser(String user); /** Set user */ final INode setUser(String user, int latestSnapshotId) { recordModification(latestSnapshotId); setUser(user); return this; } /** * @param snapshotId * if it is not {@link Snapshot#CURRENT_STATE_ID}, get the result * from the given snapshot; otherwise, get the result from the * current inode. * @return group name */ abstract String getGroupName(int snapshotId); /** The same as getGroupName(Snapshot.CURRENT_STATE_ID). */ @Override public final String getGroupName() { return getGroupName(Snapshot.CURRENT_STATE_ID); } /** Set group */ abstract void setGroup(String group); /** Set group */ final INode setGroup(String group, int latestSnapshotId) { recordModification(latestSnapshotId); setGroup(group); return this; } /** * @param snapshotId * if it is not {@link Snapshot#CURRENT_STATE_ID}, get the result * from the given snapshot; otherwise, get the result from the * current inode. * @return permission. */ abstract FsPermission getFsPermission(int snapshotId); /** The same as getFsPermission(Snapshot.CURRENT_STATE_ID). */ @Override public final FsPermission getFsPermission() { return getFsPermission(Snapshot.CURRENT_STATE_ID); } /** Set the {@link FsPermission} of this {@link INode} */ abstract void setPermission(FsPermission permission); /** Set the {@link FsPermission} of this {@link INode} */ INode setPermission(FsPermission permission, int latestSnapshotId) { recordModification(latestSnapshotId); setPermission(permission); return this; } abstract AclFeature getAclFeature(int snapshotId); @Override public final AclFeature getAclFeature() { return getAclFeature(Snapshot.CURRENT_STATE_ID); } abstract void addAclFeature(AclFeature aclFeature); final INode addAclFeature(AclFeature aclFeature, int latestSnapshotId) { recordModification(latestSnapshotId); addAclFeature(aclFeature); return this; } abstract void removeAclFeature(); final INode removeAclFeature(int latestSnapshotId) { recordModification(latestSnapshotId); removeAclFeature(); return this; } /** * @param snapshotId * if it is not {@link Snapshot#CURRENT_STATE_ID}, get the result * from the given snapshot; otherwise, get the result from the * current inode. * @return XAttrFeature */ abstract XAttrFeature getXAttrFeature(int snapshotId); @Override public final XAttrFeature getXAttrFeature() { return getXAttrFeature(Snapshot.CURRENT_STATE_ID); } /** * Set <code>XAttrFeature</code> */ abstract void addXAttrFeature(XAttrFeature xAttrFeature); final INode addXAttrFeature(XAttrFeature xAttrFeature, int latestSnapshotId) { recordModification(latestSnapshotId); addXAttrFeature(xAttrFeature); return this; } /** * Remove <code>XAttrFeature</code> */ abstract void removeXAttrFeature(); final INode removeXAttrFeature(int lastestSnapshotId) { recordModification(lastestSnapshotId); removeXAttrFeature(); return this; } /** * @return if the given snapshot id is {@link Snapshot#CURRENT_STATE_ID}, * return this; otherwise return the corresponding snapshot inode. */ public INodeAttributes getSnapshotINode(final int snapshotId) { return this; } /** Is this inode in the current state? */ public boolean isInCurrentState() { if (isRoot()) { return true; } final INodeDirectory parentDir = getParent(); if (parentDir == null) { return false; // this inode is only referenced in snapshots } if (!parentDir.isInCurrentState()) { return false; } final INode child = parentDir.getChild(getLocalNameBytes(), Snapshot.CURRENT_STATE_ID); if (this == child) { return true; } return child != null && child.isReference() && this.equals(child.asReference().getReferredINode()); } /** Is this inode in the latest snapshot? */ public final boolean isInLatestSnapshot(final int latestSnapshotId) { if (latestSnapshotId == Snapshot.CURRENT_STATE_ID || latestSnapshotId == Snapshot.NO_SNAPSHOT_ID) { return false; } // if parent is a reference node, parent must be a renamed node. We can // stop the check at the reference node. if (parent != null && parent.isReference()) { return true; } final INodeDirectory parentDir = getParent(); if (parentDir == null) { // root return true; } if (!parentDir.isInLatestSnapshot(latestSnapshotId)) { return false; } final INode child = parentDir.getChild(getLocalNameBytes(), latestSnapshotId); if (this == child) { return true; } return child != null && child.isReference() && this == child.asReference().getReferredINode(); } /** @return true if the given inode is an ancestor directory of this inode. */ public final boolean isAncestorDirectory(final INodeDirectory dir) { for(INodeDirectory p = getParent(); p != null; p = p.getParent()) { if (p == dir) { return true; } } return false; } /** * When {@link #recordModification} is called on a referred node, * this method tells which snapshot the modification should be * associated with: the snapshot that belongs to the SRC tree of the rename * operation, or the snapshot belonging to the DST tree. * * @param latestInDst * id of the latest snapshot in the DST tree above the reference node * @return True: the modification should be recorded in the snapshot that * belongs to the SRC tree. False: the modification should be * recorded in the snapshot that belongs to the DST tree. */ public final boolean shouldRecordInSrcSnapshot(final int latestInDst) { Preconditions.checkState(!isReference()); if (latestInDst == Snapshot.CURRENT_STATE_ID) { return true; } INodeReference withCount = getParentReference(); if (withCount != null) { int dstSnapshotId = withCount.getParentReference().getDstSnapshotId(); if (dstSnapshotId != Snapshot.CURRENT_STATE_ID && dstSnapshotId >= latestInDst) { return true; } } return false; } /** * This inode is being modified. The previous version of the inode needs to * be recorded in the latest snapshot. * * @param latestSnapshotId The id of the latest snapshot that has been taken. * Note that it is {@link Snapshot#CURRENT_STATE_ID} * if no snapshots have been taken. */ abstract void recordModification(final int latestSnapshotId); /** Check whether it's a reference. */ public boolean isReference() { return false; } /** Cast this inode to an {@link INodeReference}. */ public INodeReference asReference() { throw new IllegalStateException("Current inode is not a reference: " + this.toDetailString()); } /** * Check whether it's a file. */ public boolean isFile() { return false; } /** * Check if this inode itself has a storage policy set. */ public boolean isSetStoragePolicy() { if (isSymlink()) { return false; } return getLocalStoragePolicyID() != HdfsConstants.BLOCK_STORAGE_POLICY_ID_UNSPECIFIED; } /** Cast this inode to an {@link INodeFile}. */ public INodeFile asFile() { throw new IllegalStateException("Current inode is not a file: " + this.toDetailString()); } /** * Check whether it's a directory */ public boolean isDirectory() { return false; } /** Cast this inode to an {@link INodeDirectory}. */ public INodeDirectory asDirectory() { throw new IllegalStateException("Current inode is not a directory: " + this.toDetailString()); } /** * Check whether it's a symlink */ public boolean isSymlink() { return false; } /** Cast this inode to an {@link INodeSymlink}. */ public INodeSymlink asSymlink() { throw new IllegalStateException("Current inode is not a symlink: " + this.toDetailString()); } /** * Clean the subtree under this inode and collect the blocks from the descents * for further block deletion/update. The current inode can either resides in * the current tree or be stored as a snapshot copy. * * <pre> * In general, we have the following rules. * 1. When deleting a file/directory in the current tree, we have different * actions according to the type of the node to delete. * * 1.1 The current inode (this) is an {@link INodeFile}. * 1.1.1 If {@code prior} is null, there is no snapshot taken on ancestors * before. Thus we simply destroy (i.e., to delete completely, no need to save * snapshot copy) the current INode and collect its blocks for further * cleansing. * 1.1.2 Else do nothing since the current INode will be stored as a snapshot * copy. * * 1.2 The current inode is an {@link INodeDirectory}. * 1.2.1 If {@code prior} is null, there is no snapshot taken on ancestors * before. Similarly, we destroy the whole subtree and collect blocks. * 1.2.2 Else do nothing with the current INode. Recursively clean its * children. * * 1.3 The current inode is a file with snapshot. * Call recordModification(..) to capture the current states. * Mark the INode as deleted. * * 1.4 The current inode is an {@link INodeDirectory} with snapshot feature. * Call recordModification(..) to capture the current states. * Destroy files/directories created after the latest snapshot * (i.e., the inodes stored in the created list of the latest snapshot). * Recursively clean remaining children. * * 2. When deleting a snapshot. * 2.1 To clean {@link INodeFile}: do nothing. * 2.2 To clean {@link INodeDirectory}: recursively clean its children. * 2.3 To clean INodeFile with snapshot: delete the corresponding snapshot in * its diff list. * 2.4 To clean {@link INodeDirectory} with snapshot: delete the corresponding * snapshot in its diff list. Recursively clean its children. * </pre> * * @param reclaimContext * Record blocks and inodes that need to be reclaimed. * @param snapshotId * The id of the snapshot to delete. * {@link Snapshot#CURRENT_STATE_ID} means to delete the current * file/directory. * @param priorSnapshotId * The id of the latest snapshot before the to-be-deleted snapshot. * When deleting a current inode, this parameter captures the latest * snapshot. */ public abstract void cleanSubtree(ReclaimContext reclaimContext, final int snapshotId, int priorSnapshotId); /** * Destroy self and clear everything! If the INode is a file, this method * collects its blocks for further block deletion. If the INode is a * directory, the method goes down the subtree and collects blocks from the * descents, and clears its parent/children references as well. The method * also clears the diff list if the INode contains snapshot diff list. * * @param reclaimContext * Record blocks and inodes that need to be reclaimed. */ public abstract void destroyAndCollectBlocks(ReclaimContext reclaimContext); /** Compute {@link ContentSummary}. Blocking call */ public final ContentSummary computeContentSummary( BlockStoragePolicySuite bsps) throws AccessControlException { return computeAndConvertContentSummary(Snapshot.CURRENT_STATE_ID, new ContentSummaryComputationContext(bsps)); } /** * Compute {@link ContentSummary}. */ public final ContentSummary computeAndConvertContentSummary(int snapshotId, ContentSummaryComputationContext summary) throws AccessControlException { computeContentSummary(snapshotId, summary); final ContentCounts counts = summary.getCounts(); final ContentCounts snapshotCounts = summary.getSnapshotCounts(); final QuotaCounts q = getQuotaCounts(); return new ContentSummary.Builder(). length(counts.getLength()). fileCount(counts.getFileCount() + counts.getSymlinkCount()). directoryCount(counts.getDirectoryCount()). quota(q.getNameSpace()). spaceConsumed(counts.getStoragespace()). spaceQuota(q.getStorageSpace()). typeConsumed(counts.getTypeSpaces()). typeQuota(q.getTypeSpaces().asArray()). snapshotLength(snapshotCounts.getLength()). snapshotFileCount(snapshotCounts.getFileCount()). snapshotDirectoryCount(snapshotCounts.getDirectoryCount()). snapshotSpaceConsumed(snapshotCounts.getStoragespace()). erasureCodingPolicy(summary.getErasureCodingPolicyName(this)). build(); } /** * Count subtree content summary with a {@link ContentCounts}. * * @param snapshotId Specify the time range for the calculation. If this * parameter equals to {@link Snapshot#CURRENT_STATE_ID}, * the result covers both the current states and all the * snapshots. Otherwise the result only covers all the * files/directories contained in the specific snapshot. * @param summary the context object holding counts for the subtree. * @return The same objects as summary. */ public abstract ContentSummaryComputationContext computeContentSummary( int snapshotId, ContentSummaryComputationContext summary) throws AccessControlException; /** * Check and add namespace/storagespace/storagetype consumed to itself and the ancestors. */ public void addSpaceConsumed(QuotaCounts counts) { if (parent != null) { parent.addSpaceConsumed(counts); } } /** * Get the quota set for this inode * @return the quota counts. The count is -1 if it is not set. */ public QuotaCounts getQuotaCounts() { return new QuotaCounts.Builder(). nameSpace(HdfsConstants.QUOTA_RESET). storageSpace(HdfsConstants.QUOTA_RESET). typeSpaces(HdfsConstants.QUOTA_RESET). build(); } public final boolean isQuotaSet() { final QuotaCounts qc = getQuotaCounts(); return qc.anyNsSsCountGreaterOrEqual(0) || qc.anyTypeSpaceCountGreaterOrEqual(0); } /** * Count subtree {@link Quota#NAMESPACE} and {@link Quota#STORAGESPACE} usages. * Entry point for FSDirectory where blockStoragePolicyId is given its initial * value. */ public final QuotaCounts computeQuotaUsage(BlockStoragePolicySuite bsps) { final byte storagePolicyId = isSymlink() ? HdfsConstants.BLOCK_STORAGE_POLICY_ID_UNSPECIFIED : getStoragePolicyID(); return computeQuotaUsage(bsps, storagePolicyId, true, Snapshot.CURRENT_STATE_ID); } /** * Count subtree {@link Quota#NAMESPACE} and {@link Quota#STORAGESPACE} usages. * * With the existence of {@link INodeReference}, the same inode and its * subtree may be referred by multiple {@link WithName} nodes and a * {@link DstReference} node. To avoid circles while quota usage computation, * we have the following rules: * * <pre> * 1. For a {@link DstReference} node, since the node must be in the current * tree (or has been deleted as the end point of a series of rename * operations), we compute the quota usage of the referred node (and its * subtree) in the regular manner, i.e., including every inode in the current * tree and in snapshot copies, as well as the size of diff list. * * 2. For a {@link WithName} node, since the node must be in a snapshot, we * only count the quota usage for those nodes that still existed at the * creation time of the snapshot associated with the {@link WithName} node. * We do not count in the size of the diff list. * </pre> * * @param bsps Block storage policy suite to calculate intended storage type usage * @param blockStoragePolicyId block storage policy id of the current INode * @param useCache Whether to use cached quota usage. Note that * {@link WithName} node never uses cache for its subtree. * @param lastSnapshotId {@link Snapshot#CURRENT_STATE_ID} indicates the * computation is in the current tree. Otherwise the id * indicates the computation range for a * {@link WithName} node. * @return The subtree quota counts. */ public abstract QuotaCounts computeQuotaUsage(BlockStoragePolicySuite bsps, byte blockStoragePolicyId, boolean useCache, int lastSnapshotId); public final QuotaCounts computeQuotaUsage(BlockStoragePolicySuite bsps, boolean useCache) { final byte storagePolicyId = isSymlink() ? HdfsConstants.BLOCK_STORAGE_POLICY_ID_UNSPECIFIED : getStoragePolicyID(); return computeQuotaUsage(bsps, storagePolicyId, useCache, Snapshot.CURRENT_STATE_ID); } /** * @return null if the local name is null; otherwise, return the local name. */ public final String getLocalName() { final byte[] name = getLocalNameBytes(); return name == null? null: DFSUtil.bytes2String(name); } @Override public final byte[] getKey() { return getLocalNameBytes(); } /** * Set local file name */ public abstract void setLocalName(byte[] name); public String getFullPathName() { // Get the full path name of this inode. if (isRoot()) { return Path.SEPARATOR; } // compute size of needed bytes for the path int idx = 0; for (INode inode = this; inode != null; inode = inode.getParent()) { // add component + delimiter (if not tail component) idx += inode.getLocalNameBytes().length + (inode != this ? 1 : 0); } byte[] path = new byte[idx]; for (INode inode = this; inode != null; inode = inode.getParent()) { if (inode != this) { path[--idx] = Path.SEPARATOR_CHAR; } byte[] name = inode.getLocalNameBytes(); idx -= name.length; System.arraycopy(name, 0, path, idx, name.length); } return DFSUtil.bytes2String(path); } public boolean isDeleted() { INode pInode = this; while (pInode != null && !pInode.isRoot()) { pInode = pInode.getParent(); } if (pInode == null) { return true; } else { return !pInode.isRoot(); } } public byte[][] getPathComponents() { int n = 0; for (INode inode = this; inode != null; inode = inode.getParent()) { n++; } byte[][] components = new byte[n][]; for (INode inode = this; inode != null; inode = inode.getParent()) { components[--n] = inode.getLocalNameBytes(); } return components; } @Override public String toString() { return getLocalName(); } @VisibleForTesting public final String getObjectString() { return getClass().getSimpleName() + "@" + Integer.toHexString(super.hashCode()); } /** @return a string description of the parent. */ @VisibleForTesting public final String getParentString() { final INodeReference parentRef = getParentReference(); if (parentRef != null) { return "parentRef=" + parentRef.getLocalName() + "->"; } else { final INodeDirectory parentDir = getParent(); if (parentDir != null) { return "parentDir=" + parentDir.getLocalName() + "/"; } else { return "parent=null"; } } } @VisibleForTesting public String getFullPathAndObjectString() { return getFullPathName() + "(" + getId() + ", " + getObjectString() + ")"; } @VisibleForTesting public String toDetailString() { return toString() + "(" + getId() + ", " + getObjectString() + ", " + getParentString() + ")"; } /** @return the parent directory */ public final INodeDirectory getParent() { return parent == null? null : parent.isReference()? getParentReference().getParent(): parent.asDirectory(); } /** * @return the parent as a reference if this is a referred inode; * otherwise, return null. */ public INodeReference getParentReference() { return parent == null || !parent.isReference()? null: (INodeReference)parent; } /** * @return true if this is a reference and the reference count is 1; * otherwise, return false. */ public boolean isLastReference() { final INodeReference ref = getParentReference(); if (!(ref instanceof WithCount)) { return false; } return ((WithCount)ref).getReferenceCount() == 1; } /** Set parent directory */ public final void setParent(INodeDirectory parent) { this.parent = parent; } /** Set container. */ public final void setParentReference(INodeReference parent) { this.parent = parent; } /** Clear references to other objects. */ public void clear() { setParent(null); } /** * @param snapshotId * if it is not {@link Snapshot#CURRENT_STATE_ID}, get the result * from the given snapshot; otherwise, get the result from the * current inode. * @return modification time. */ abstract long getModificationTime(int snapshotId); /** The same as getModificationTime(Snapshot.CURRENT_STATE_ID). */ @Override public final long getModificationTime() { return getModificationTime(Snapshot.CURRENT_STATE_ID); } /** Update modification time if it is larger than the current value. */ public abstract INode updateModificationTime(long mtime, int latestSnapshotId); /** Set the last modification time of inode. */ public abstract void setModificationTime(long modificationTime); /** Set the last modification time of inode. */ public final INode setModificationTime(long modificationTime, int latestSnapshotId) { recordModification(latestSnapshotId); setModificationTime(modificationTime); return this; } /** * @param snapshotId * if it is not {@link Snapshot#CURRENT_STATE_ID}, get the result * from the given snapshot; otherwise, get the result from the * current inode. * @return access time */ abstract long getAccessTime(int snapshotId); /** The same as getAccessTime(Snapshot.CURRENT_STATE_ID). */ @Override public final long getAccessTime() { return getAccessTime(Snapshot.CURRENT_STATE_ID); } /** * Set last access time of inode. */ public abstract void setAccessTime(long accessTime); /** * Set last access time of inode. */ public final INode setAccessTime(long accessTime, int latestSnapshotId, boolean skipCaptureAccessTimeOnlyChangeInSnapshot) { if (!skipCaptureAccessTimeOnlyChangeInSnapshot) { recordModification(latestSnapshotId); } setAccessTime(accessTime); return this; } /** * @return the latest block storage policy id of the INode. Specifically, * if a storage policy is directly specified on the INode then return the ID * of that policy. Otherwise follow the latest parental path and return the * ID of the first specified storage policy. */ public abstract byte getStoragePolicyID(); /** * @return the storage policy directly specified on the INode. Return * {@link HdfsConstants#BLOCK_STORAGE_POLICY_ID_UNSPECIFIED} if no policy has * been specified. */ public abstract byte getLocalStoragePolicyID(); /** * Get the storage policy ID while computing quota usage * @param parentStoragePolicyId the storage policy ID of the parent directory * @return the storage policy ID of this INode. Note that for an * {@link INodeSymlink} we return {@link HdfsConstants#BLOCK_STORAGE_POLICY_ID_UNSPECIFIED} * instead of throwing Exception */ public byte getStoragePolicyIDForQuota(byte parentStoragePolicyId) { byte localId = isSymlink() ? HdfsConstants.BLOCK_STORAGE_POLICY_ID_UNSPECIFIED : getLocalStoragePolicyID(); return localId != HdfsConstants.BLOCK_STORAGE_POLICY_ID_UNSPECIFIED ? localId : parentStoragePolicyId; } /** * Breaks {@code path} into components. * @return array of byte arrays each of which represents * a single path component. */ @VisibleForTesting public static byte[][] getPathComponents(String path) { checkAbsolutePath(path); return DFSUtil.getPathComponents(path); } /** * Splits an absolute {@code path} into an array of path components. * @throws AssertionError if the given path is invalid. * @return array of path components. */ public static String[] getPathNames(String path) { checkAbsolutePath(path); return StringUtils.split(path, Path.SEPARATOR_CHAR); } /** * Verifies if the path informed is a valid absolute path. * @param path the absolute path to validate. * @return true if the path is valid. */ static boolean isValidAbsolutePath(final String path){ return path != null && path.startsWith(Path.SEPARATOR); } static void checkAbsolutePath(final String path) { if (!isValidAbsolutePath(path)) { throw new AssertionError("Absolute path required, but got '" + path + "'"); } } @Override public final int compareTo(byte[] bytes) { return DFSUtilClient.compareBytes(getLocalNameBytes(), bytes); } @Override public final boolean equals(Object that) { if (this == that) { return true; } if (!(that instanceof INode)) { return false; } return getId() == ((INode) that).getId(); } @Override public final int hashCode() { long id = getId(); return (int)(id^(id>>>32)); } @VisibleForTesting public final StringBuilder dumpParentINodes() { final StringBuilder b = parent == null? new StringBuilder() : parent.dumpParentINodes().append("\n "); return b.append(toDetailString()); } /** * Dump the subtree starting from this inode. * @return a text representation of the tree. */ @VisibleForTesting public final StringBuffer dumpTreeRecursively() { final StringWriter out = new StringWriter(); dumpTreeRecursively(new PrintWriter(out, true), new StringBuilder(), Snapshot.CURRENT_STATE_ID); return out.getBuffer(); } @VisibleForTesting public final void dumpTreeRecursively(PrintStream out) { out.println(dumpTreeRecursively().toString()); } /** * Dump tree recursively. * @param prefix The prefix string that each line should print. */ @VisibleForTesting public void dumpTreeRecursively(PrintWriter out, StringBuilder prefix, int snapshotId) { dumpINode(out, prefix, snapshotId); } public void dumpINode(PrintWriter out, StringBuilder prefix, int snapshotId) { out.print(prefix); out.print(" "); final String name = getLocalName(); out.print(name != null && name.isEmpty()? "/": name); out.print(", isInCurrentState? "); out.print(isInCurrentState()); out.print(" ("); out.print(getObjectString()); out.print("), "); out.print(getParentString()); out.print(", " + getPermissionStatus(snapshotId)); } /** * Information used to record quota usage delta. This data structure is * usually passed along with an operation like {@link #cleanSubtree}. Note * that after the operation the delta counts should be decremented from the * ancestral directories' quota usage. */ public static
INode
java
elastic__elasticsearch
test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java
{ "start": 35553, "end": 36817 }
class ____ implements StringSortScript.Factory { private final MockDeterministicScript script; MockStringSortScriptFactory(MockDeterministicScript script) { this.script = script; } @Override public boolean isResultDeterministic() { return script.isResultDeterministic(); } @Override public StringSortScript.LeafFactory newFactory(Map<String, Object> parameters) { return docReader -> new StringSortScript(parameters, docReader) { @Override public String execute() { Map<String, Object> vars = new HashMap<>(parameters); vars.put("params", parameters); vars.put("doc", getDoc()); try { vars.put("_score", get_score()); } catch (Exception ignore) { // nothing to do: if get_score throws we don't set the _score, likely the scorer is null, // which is ok if _score was not requested e.g. top_hits. } return String.valueOf(script.apply(vars)); } }; } }
MockStringSortScriptFactory
java
spring-projects__spring-boot
core/spring-boot-testcontainers/src/dockerTest/java/org/springframework/boot/testcontainers/lifecycle/ResetStartablesExtension.java
{ "start": 1209, "end": 2111 }
class ____ implements BeforeEachCallback, AfterEachCallback { @Override public void afterEach(ExtensionContext context) throws Exception { reset(); } @Override public void beforeEach(ExtensionContext context) throws Exception { reset(); } private void reset() { try { Object executor = ReflectionTestUtils.getField(Startables.class, "EXECUTOR"); assertThat(executor).isNotNull(); Object threadFactory = ReflectionTestUtils.getField(executor, "threadFactory"); assertThat(threadFactory).isNotNull(); AtomicLong counter = (AtomicLong) ReflectionTestUtils.getField(threadFactory, "COUNTER"); assertThat(counter).isNotNull(); counter.set(0); } catch (InaccessibleObjectException ex) { throw new IllegalStateException( "Unable to reset field. Please run with '--add-opens=java.base/java.util.concurrent=ALL-UNNAMED'", ex); } } }
ResetStartablesExtension
java
apache__hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/CsiAdaptorPlugin.java
{ "start": 1290, "end": 2079 }
interface ____ extends CsiAdaptorProtocol { /** * A csi-adaptor implementation can init its state within this function. * Configuration is available so the implementation can retrieve some * customized configuration from yarn-site.xml. * @param driverName the name of the csi-driver. * @param conf configuration. * @throws YarnException exceptions from yarn servers. */ void init(String driverName, Configuration conf) throws YarnException; /** * Returns the driver name of the csi-driver this adaptor works with. * The name should be consistent on all the places being used, ideally * it should come from the value when init is done. * @return the name of the csi-driver that this adaptor works with. */ String getDriverName(); }
CsiAdaptorPlugin
java
apache__camel
dsl/camel-jbang/camel-jbang-core/src/main/java/org/apache/camel/dsl/jbang/core/common/ResourceDoesNotExist.java
{ "start": 938, "end": 1127 }
class ____ extends CamelException { public ResourceDoesNotExist(Resource resource) { super("The resource does not exist at " + resource.getLocation()); } }
ResourceDoesNotExist
java
apache__commons-lang
src/main/java/org/apache/commons/lang3/RandomStringUtils.java
{ "start": 41660, "end": 43162 }
class ____ all visible ASCII characters and spaces (i.e. anything except control characters). * </p> * * @param count the length of random string to create. * @return the random string. * @throws IllegalArgumentException if {@code count} &lt; 0. * @since 3.5 * @since 3.16.0 */ public String nextPrint(final int count) { return next(count, 32, 126, false, false); } /** * Creates a random string whose length is between the inclusive minimum and the exclusive maximum. * * <p> * Characters will be chosen from the set of \p{Print} characters. * </p> * * @param minLengthInclusive the inclusive minimum length of the string to generate. * @param maxLengthExclusive the exclusive maximum length of the string to generate. * @return the random string. * @since 3.16.0 */ public String nextPrint(final int minLengthInclusive, final int maxLengthExclusive) { return nextPrint(randomUtils().randomInt(minLengthInclusive, maxLengthExclusive)); } /** * Gets the Random. * * @return the Random. */ private Random random() { return randomUtils().random(); } /** * Gets the RandomUtils. * * @return the RandomUtils. */ private RandomUtils randomUtils() { return random.get(); } @Override public String toString() { return "RandomStringUtils [random=" + random() + "]"; } }
includes
java
junit-team__junit5
junit-jupiter-engine/src/main/java/org/junit/jupiter/engine/Constants.java
{ "start": 14363, "end": 18583 }
class ____ of the * {@link ParallelExecutionConfigurationStrategy} to be used for the * {@code custom} configuration strategy: {@value} * * @since 5.3 */ @API(status = STABLE, since = "5.10") public static final String PARALLEL_CONFIG_CUSTOM_CLASS_PROPERTY_NAME = JupiterConfiguration.PARALLEL_CONFIG_PREFIX + CONFIG_CUSTOM_CLASS_PROPERTY_NAME; /** * Property name used to set the default timeout for all testable and * lifecycle methods: {@value}. * * @see Timeout#DEFAULT_TIMEOUT_PROPERTY_NAME */ @API(status = STABLE, since = "5.10") public static final String DEFAULT_TIMEOUT_PROPERTY_NAME = Timeout.DEFAULT_TIMEOUT_PROPERTY_NAME; /** * Property name used to set the default timeout for all testable methods: {@value}. * * @see Timeout#DEFAULT_TESTABLE_METHOD_TIMEOUT_PROPERTY_NAME */ @API(status = STABLE, since = "5.10") public static final String DEFAULT_TESTABLE_METHOD_TIMEOUT_PROPERTY_NAME = Timeout.DEFAULT_TESTABLE_METHOD_TIMEOUT_PROPERTY_NAME; /** * Property name used to set the default timeout for all * {@link Test @Test} methods: {@value}. * * @see Timeout#DEFAULT_TEST_METHOD_TIMEOUT_PROPERTY_NAME */ @API(status = STABLE, since = "5.10") public static final String DEFAULT_TEST_METHOD_TIMEOUT_PROPERTY_NAME = Timeout.DEFAULT_TEST_METHOD_TIMEOUT_PROPERTY_NAME; /** * Property name used to set the default timeout for all * {@link TestTemplate @TestTemplate} methods: {@value}. * * @see Timeout#DEFAULT_TEST_TEMPLATE_METHOD_TIMEOUT_PROPERTY_NAME */ @API(status = STABLE, since = "5.10") public static final String DEFAULT_TEST_TEMPLATE_METHOD_TIMEOUT_PROPERTY_NAME = Timeout.DEFAULT_TEST_TEMPLATE_METHOD_TIMEOUT_PROPERTY_NAME; /** * Property name used to set the default timeout for all * {@link TestFactory @TestFactory} methods: {@value}. * * @see Timeout#DEFAULT_TEST_FACTORY_METHOD_TIMEOUT_PROPERTY_NAME */ @API(status = STABLE, since = "5.10") public static final String DEFAULT_TEST_FACTORY_METHOD_TIMEOUT_PROPERTY_NAME = Timeout.DEFAULT_TEST_FACTORY_METHOD_TIMEOUT_PROPERTY_NAME; /** * Property name used to set the default timeout for all lifecycle methods: {@value}. * * @see Timeout#DEFAULT_LIFECYCLE_METHOD_TIMEOUT_PROPERTY_NAME */ @API(status = STABLE, since = "5.10") public static final String DEFAULT_LIFECYCLE_METHOD_TIMEOUT_PROPERTY_NAME = Timeout.DEFAULT_LIFECYCLE_METHOD_TIMEOUT_PROPERTY_NAME; /** * Property name used to set the default timeout for all * {@link BeforeAll @BeforeAll} methods: {@value}. * * @see Timeout#DEFAULT_BEFORE_ALL_METHOD_TIMEOUT_PROPERTY_NAME */ @API(status = STABLE, since = "5.10") public static final String DEFAULT_BEFORE_ALL_METHOD_TIMEOUT_PROPERTY_NAME = Timeout.DEFAULT_BEFORE_ALL_METHOD_TIMEOUT_PROPERTY_NAME; /** * Property name used to set the default timeout for all * {@link BeforeEach @BeforeEach} methods: {@value}. * * @see Timeout#DEFAULT_BEFORE_EACH_METHOD_TIMEOUT_PROPERTY_NAME */ @API(status = STABLE, since = "5.10") public static final String DEFAULT_BEFORE_EACH_METHOD_TIMEOUT_PROPERTY_NAME = Timeout.DEFAULT_BEFORE_EACH_METHOD_TIMEOUT_PROPERTY_NAME; /** * Property name used to set the default timeout for all * {@link AfterEach @AfterEach} methods: {@value}. * * @see Timeout#DEFAULT_AFTER_EACH_METHOD_TIMEOUT_PROPERTY_NAME */ @API(status = STABLE, since = "5.10") public static final String DEFAULT_AFTER_EACH_METHOD_TIMEOUT_PROPERTY_NAME = Timeout.DEFAULT_AFTER_EACH_METHOD_TIMEOUT_PROPERTY_NAME; /** * Property name used to set the default timeout for all * {@link AfterAll @AfterAll} methods: {@value}. * * @see Timeout#DEFAULT_AFTER_ALL_METHOD_TIMEOUT_PROPERTY_NAME */ @API(status = STABLE, since = "5.10") public static final String DEFAULT_AFTER_ALL_METHOD_TIMEOUT_PROPERTY_NAME = Timeout.DEFAULT_AFTER_ALL_METHOD_TIMEOUT_PROPERTY_NAME; /** * Property name used to configure whether timeouts are applied to tests: {@value}. * * @see Timeout#TIMEOUT_MODE_PROPERTY_NAME */ @API(status = STABLE, since = "5.10") public static final String TIMEOUT_MODE_PROPERTY_NAME = Timeout.TIMEOUT_MODE_PROPERTY_NAME; /** * Property name used to set the default method orderer
name
java
apache__hadoop
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/serde/TestKVSerializer.java
{ "start": 1730, "end": 6036 }
class ____ { private int inputArraySize = 1000; // 1000 bytes Writable elements int bufferSize = 100; // bytes private KV<BytesWritable, BytesWritable>[] inputArray; final ByteArrayOutputStream result = new ByteArrayOutputStream(); private SizedWritable key; private SizedWritable value; private KVSerializer serializer; @BeforeEach public void setUp() throws IOException { this.inputArray = TestInput.getMapInputs(inputArraySize); this.key = new SizedWritable(BytesWritable.class); this.value = new SizedWritable(BytesWritable.class); this.serializer = new KVSerializer(BytesWritable.class, BytesWritable.class); key.reset(inputArray[4].key); value.reset(inputArray[4].value); serializer.updateLength(key, value); } @Test public void testUpdateLength() throws IOException { Mockito.mock(DataOutputStream.class); int kvLength = 0; for (int i = 0; i < inputArraySize; i++) { key.reset(inputArray[i].key); value.reset(inputArray[i].value); serializer.updateLength(key, value); // verify whether the size increase assertTrue(key.length + value.length > kvLength); kvLength = key.length + value.length; } } @Test public void testSerializeKV() throws IOException { final DataOutputStream dataOut = Mockito.mock(DataOutputStream.class); Mockito.when(dataOut.hasUnFlushedData()).thenReturn(true); Mockito.when(dataOut.shortOfSpace(key.length + value.length + Constants.SIZEOF_KV_LENGTH)).thenReturn(true); final int written = serializer.serializeKV(dataOut, key, value); // flush once, write 4 int, and 2 byte array Mockito.verify(dataOut, Mockito.times(1)).flush(); Mockito.verify(dataOut, Mockito.times(4)).writeInt(anyInt()); Mockito.verify(dataOut, Mockito.times(2)).write(any(byte[].class), anyInt(), anyInt()); assertEquals(written, key.length + value.length + Constants.SIZEOF_KV_LENGTH); } @Test public void testSerializeNoFlush() throws IOException { final DataOutputStream dataOut = Mockito.mock(DataOutputStream.class); // suppose there are enough space Mockito.when(dataOut.hasUnFlushedData()).thenReturn(true); Mockito.when(dataOut.shortOfSpace(anyInt())).thenReturn(false); final int written = serializer.serializeKV(dataOut, key, value); // flush 0, write 4 int, and 2 byte array Mockito.verify(dataOut, Mockito.times(0)).flush(); Mockito.verify(dataOut, Mockito.times(4)).writeInt(anyInt()); Mockito.verify(dataOut, Mockito.times(2)).write(any(byte[].class), anyInt(), anyInt()); assertEquals(written, key.length + value.length + Constants.SIZEOF_KV_LENGTH); } @Test public void testSerializePartitionKV() throws IOException { final DataOutputStream dataOut = Mockito.mock(DataOutputStream.class); Mockito.when(dataOut.hasUnFlushedData()).thenReturn(true); Mockito.when( dataOut .shortOfSpace(key.length + value.length + Constants.SIZEOF_KV_LENGTH + Constants.SIZEOF_PARTITION_LENGTH)) .thenReturn(true); final int written = serializer.serializePartitionKV(dataOut, 100, key, value); // flush once, write 4 int, and 2 byte array Mockito.verify(dataOut, Mockito.times(1)).flush(); Mockito.verify(dataOut, Mockito.times(5)).writeInt(anyInt()); Mockito.verify(dataOut, Mockito.times(2)).write(any(byte[].class), anyInt(), anyInt()); assertEquals(written, key.length + value.length + Constants.SIZEOF_KV_LENGTH + Constants.SIZEOF_PARTITION_LENGTH); } @Test public void testDeserializerNoData() throws IOException { final DataInputStream in = Mockito.mock(DataInputStream.class); Mockito.when(in.hasUnReadData()).thenReturn(false); assertEquals(0, serializer.deserializeKV(in, key, value)); } @Test public void testDeserializer() throws IOException { final DataInputStream in = Mockito.mock(DataInputStream.class); Mockito.when(in.hasUnReadData()).thenReturn(true); assertTrue(serializer.deserializeKV(in, key, value) > 0); Mockito.verify(in, Mockito.times(4)).readInt(); Mockito.verify(in, Mockito.times(2)).readFully(any(byte[].class), anyInt(), anyInt()); } }
TestKVSerializer
java
apache__camel
core/camel-support/src/main/java/org/apache/camel/support/LRUCacheFactory.java
{ "start": 1195, "end": 13764 }
class ____ { /** * Factory key */ public static final String FACTORY = "lru-cache-factory"; private static final Logger LOG = LoggerFactory.getLogger(LRUCacheFactory.class); private static final Lock LOCK = new ReentrantLock(); private static volatile LRUCacheFactory instance; /** * Initializes and creates the cache factory if not explicit set. */ public static void init() { if (instance == null) { instance = createLRUCacheFactory(); } } /** * Use this to set a specific LRUCacheFactory instance, such as before starting Camel, that then avoids doing auto * discovery of the cache factory via classpath. */ public static void setLRUCacheFactory(LRUCacheFactory cacheFactory) { instance = cacheFactory; } /** * Gets (and creates if needed) the LRUCacheFactory to use. */ public static LRUCacheFactory getInstance() { if (instance == null) { LOCK.lock(); try { if (instance == null) { instance = createLRUCacheFactory(); } } finally { LOCK.unlock(); } } return instance; } private static LRUCacheFactory createLRUCacheFactory() { LOG.trace("createLRUCacheFactory"); try { ClassLoader classLoader = LRUCacheFactory.class.getClassLoader(); URL url = classLoader.getResource("META-INF/services/org/apache/camel/" + FACTORY); if (url != null) { Properties props = new Properties(); try (InputStream is = url.openStream()) { props.load(is); } String clazzName = props.getProperty("class"); if (clazzName != null) { LOG.trace("Loading class: {}", clazzName); Class<?> clazz = classLoader.loadClass(clazzName); LOG.trace("Creating LRUCacheFactory instance from class: {}", clazzName); Object factory = clazz.getDeclaredConstructor().newInstance(); LOG.trace("Created LRUCacheFactory instance: {}", factory); LOG.info("Detected and using LRUCacheFactory: {}", factory); return (LRUCacheFactory) factory; } } } catch (Exception t) { LOG.warn("Error creating LRUCacheFactory. Will use DefaultLRUCacheFactory.", t); } // use default LOG.debug("Creating DefaultLRUCacheFactory"); return new DefaultLRUCacheFactory(); } /** * Constructs an empty <tt>LRUCache</tt> instance with the specified maximumCacheSize, and will stop on eviction. * * @param maximumCacheSize the max capacity. * @throws IllegalArgumentException if the initial capacity is negative */ public static <K, V> Map<K, V> newLRUCache(int maximumCacheSize) { return getInstance().createLRUCache(maximumCacheSize); } /** * Constructs an empty <tt>LRUCache</tt> instance with the specified maximumCacheSize, and will stop on eviction. * * @param maximumCacheSize the max capacity. * @throws IllegalArgumentException if the initial capacity is negative */ public static <K, V> Map<K, V> newLRUCache(int maximumCacheSize, Consumer<V> onEvict) { return getInstance().createLRUCache(maximumCacheSize, onEvict); } /** * Constructs an empty <tt>LRUCache</tt> instance with the specified initial capacity, maximumCacheSize, and will * stop on eviction. * * @param initialCapacity the initial capacity. * @param maximumCacheSize the max capacity. * @throws IllegalArgumentException if the initial capacity is negative */ public static <K, V> Map<K, V> newLRUCache(int initialCapacity, int maximumCacheSize) { return getInstance().createLRUCache(initialCapacity, maximumCacheSize); } /** * Constructs an empty <tt>LRUCache</tt> instance with the specified initial capacity, maximumCacheSize,load factor * and ordering mode. * * @param initialCapacity the initial capacity. * @param maximumCacheSize the max capacity. * @param stopOnEviction whether to stop service on eviction. * @throws IllegalArgumentException if the initial capacity is negative */ public static <K, V> Map<K, V> newLRUCache(int initialCapacity, int maximumCacheSize, boolean stopOnEviction) { return getInstance().createLRUCache(initialCapacity, maximumCacheSize, stopOnEviction); } /** * Constructs an empty <tt>LRUSoftCache</tt> instance with the specified maximumCacheSize, and will stop on * eviction. * * @param maximumCacheSize the max capacity. * @throws IllegalArgumentException if the initial capacity is negative */ public static <K, V> Map<K, V> newLRUSoftCache(int maximumCacheSize) { return getInstance().createLRUSoftCache(maximumCacheSize); } /** * Constructs an empty <tt>LRUSoftCache</tt> instance with the specified maximumCacheSize, and will stop on * eviction. * * @param initialCapacity the initial capacity. * @param maximumCacheSize the max capacity. * @throws IllegalArgumentException if the initial capacity is negative */ public static <K, V> Map<K, V> newLRUSoftCache(int initialCapacity, int maximumCacheSize) { return getInstance().createLRUSoftCache(initialCapacity, maximumCacheSize); } /** * Constructs an empty <tt>LRUSoftCache</tt> instance with the specified maximumCacheSize, and will stop on * eviction. * * @param initialCapacity the initial capacity. * @param maximumCacheSize the max capacity. * @param stopOnEviction whether to stop service on eviction. * @throws IllegalArgumentException if the initial capacity is negative */ public static <K, V> Map<K, V> newLRUSoftCache(int initialCapacity, int maximumCacheSize, boolean stopOnEviction) { return getInstance().createLRUSoftCache(initialCapacity, maximumCacheSize, stopOnEviction); } /** * Constructs an empty <tt>LRUWeakCache</tt> instance with the specified maximumCacheSize, and will stop on * eviction. * * @param maximumCacheSize the max capacity. * @throws IllegalArgumentException if the initial capacity is negative */ public static <K, V> Map<K, V> newLRUWeakCache(int maximumCacheSize) { return getInstance().createLRUWeakCache(maximumCacheSize); } /** * Constructs an empty <tt>LRUWeakCache</tt> instance with the specified maximumCacheSize, and will stop on * eviction. * * @param initialCapacity the initial capacity. * @param maximumCacheSize the max capacity. * @throws IllegalArgumentException if the initial capacity is negative */ public static <K, V> Map<K, V> newLRUWeakCache(int initialCapacity, int maximumCacheSize) { return getInstance().createLRUWeakCache(initialCapacity, maximumCacheSize); } /** * Constructs an empty <tt>LRUWeakCache</tt> instance with the specified maximumCacheSize, and will stop on * eviction. * * @param initialCapacity the initial capacity. * @param maximumCacheSize the max capacity. * @param stopOnEviction whether to stop service on eviction. * @throws IllegalArgumentException if the initial capacity is negative */ public static <K, V> Map<K, V> newLRUWeakCache(int initialCapacity, int maximumCacheSize, boolean stopOnEviction) { return getInstance().createLRUWeakCache(initialCapacity, maximumCacheSize, stopOnEviction); } /** * Constructs an empty <tt>LRUCache</tt> instance with the specified maximumCacheSize, and will stop on eviction. * * @param maximumCacheSize the max capacity. * @throws IllegalArgumentException if the initial capacity is negative */ public abstract <K, V> Map<K, V> createLRUCache(int maximumCacheSize); /** * Constructs an empty <tt>LRUCache</tt> instance with the specified maximumCacheSize, and will stop on eviction. * * @param maximumCacheSize the max capacity. * @throws IllegalArgumentException if the initial capacity is negative */ public abstract <K, V> Map<K, V> createLRUCache(int maximumCacheSize, Consumer<V> onEvict); /** * Constructs an empty <tt>LRUCache</tt> instance with the specified initial capacity, maximumCacheSize, and will * stop on eviction. * * @param initialCapacity the initial capacity. * @param maximumCacheSize the max capacity. * @throws IllegalArgumentException if the initial capacity is negative */ public abstract <K, V> Map<K, V> createLRUCache(int initialCapacity, int maximumCacheSize); /** * Constructs an empty <tt>LRUCache</tt> instance with the specified initial capacity, maximumCacheSize,load factor * and ordering mode. * * @param initialCapacity the initial capacity. * @param maximumCacheSize the max capacity. * @param stopOnEviction whether to stop service on eviction. * @throws IllegalArgumentException if the initial capacity is negative */ public abstract <K, V> Map<K, V> createLRUCache(int initialCapacity, int maximumCacheSize, boolean stopOnEviction); /** * Constructs an empty <tt>LRUSoftCache</tt> instance with the specified maximumCacheSize, and will stop on * eviction. * * @param maximumCacheSize the max capacity. * @throws IllegalArgumentException if the initial capacity is negative */ public abstract <K, V> Map<K, V> createLRUSoftCache(int maximumCacheSize); /** * Constructs an empty <tt>LRUSoftCache</tt> instance with the specified maximumCacheSize, and will stop on * eviction. * * @param initialCapacity the initial capacity. * @param maximumCacheSize the max capacity. * @throws IllegalArgumentException if the initial capacity is negative */ public abstract <K, V> Map<K, V> createLRUSoftCache(int initialCapacity, int maximumCacheSize); /** * Constructs an empty <tt>LRUSoftCache</tt> instance with the specified maximumCacheSize, and will stop on * eviction. * * @param initialCapacity the initial capacity. * @param maximumCacheSize the max capacity. * @param stopOnEviction whether to stop service on eviction. * @throws IllegalArgumentException if the initial capacity is negative */ public abstract <K, V> Map<K, V> createLRUSoftCache(int initialCapacity, int maximumCacheSize, boolean stopOnEviction); /** * Constructs an empty <tt>LRUWeakCache</tt> instance with the specified maximumCacheSize, and will stop on * eviction. * * @param maximumCacheSize the max capacity. * @throws IllegalArgumentException if the initial capacity is negative */ @Deprecated(since = "4.2.0") public abstract <K, V> Map<K, V> createLRUWeakCache(int maximumCacheSize); /** * Constructs an empty <tt>LRUWeakCache</tt> instance with the specified maximumCacheSize, and will stop on * eviction. * * @param initialCapacity the initial capacity. * @param maximumCacheSize the max capacity. * @throws IllegalArgumentException if the initial capacity is negative */ @Deprecated(since = "4.2.0") public abstract <K, V> Map<K, V> createLRUWeakCache(int initialCapacity, int maximumCacheSize); /** * Constructs an empty <tt>LRUWeakCache</tt> instance with the specified maximumCacheSize, and will stop on * eviction. * * @param initialCapacity the initial capacity. * @param maximumCacheSize the max capacity. * @param stopOnEviction whether to stop service on eviction. * @throws IllegalArgumentException if the initial capacity is negative */ @Deprecated(since = "4.2.0") public abstract <K, V> Map<K, V> createLRUWeakCache(int initialCapacity, int maximumCacheSize, boolean stopOnEviction); }
LRUCacheFactory
java
apache__camel
components/camel-aws/camel-aws2-transcribe/src/generated/java/org/apache/camel/component/aws2/transcribe/Transcribe2EndpointUriFactory.java
{ "start": 525, "end": 3159 }
class ____ extends org.apache.camel.support.component.EndpointUriFactorySupport implements EndpointUriFactory { private static final String BASE = ":label"; private static final Set<String> PROPERTY_NAMES; private static final Set<String> SECRET_PROPERTY_NAMES; private static final Map<String, String> MULTI_VALUE_PREFIXES; static { Set<String> props = new HashSet<>(22); props.add("accessKey"); props.add("label"); props.add("lazyStartProducer"); props.add("operation"); props.add("overrideEndpoint"); props.add("pojoRequest"); props.add("profileCredentialsName"); props.add("protocol"); props.add("proxyHost"); props.add("proxyPassword"); props.add("proxyPort"); props.add("proxyProtocol"); props.add("proxyUsername"); props.add("region"); props.add("secretKey"); props.add("sessionToken"); props.add("transcribeClient"); props.add("trustAllCertificates"); props.add("uriEndpointOverride"); props.add("useDefaultCredentialsProvider"); props.add("useProfileCredentialsProvider"); props.add("useSessionCredentials"); PROPERTY_NAMES = Collections.unmodifiableSet(props); Set<String> secretProps = new HashSet<>(5); secretProps.add("accessKey"); secretProps.add("proxyPassword"); secretProps.add("proxyUsername"); secretProps.add("secretKey"); secretProps.add("sessionToken"); SECRET_PROPERTY_NAMES = Collections.unmodifiableSet(secretProps); MULTI_VALUE_PREFIXES = Collections.emptyMap(); } @Override public boolean isEnabled(String scheme) { return "aws2-transcribe".equals(scheme); } @Override public String buildUri(String scheme, Map<String, Object> properties, boolean encode) throws URISyntaxException { String syntax = scheme + BASE; String uri = syntax; Map<String, Object> copy = new HashMap<>(properties); uri = buildPathParameter(syntax, uri, "label", null, true, copy); uri = buildQueryParameters(uri, copy, encode); return uri; } @Override public Set<String> propertyNames() { return PROPERTY_NAMES; } @Override public Set<String> secretPropertyNames() { return SECRET_PROPERTY_NAMES; } @Override public Map<String, String> multiValuePrefixes() { return MULTI_VALUE_PREFIXES; } @Override public boolean isLenientProperties() { return false; } }
Transcribe2EndpointUriFactory
java
apache__camel
components/camel-spring-parent/camel-spring-xml/src/main/java/org/apache/camel/spring/xml/SSLContextParametersFactoryBean.java
{ "start": 1610, "end": 3813 }
class ____ extends AbstractSSLContextParametersFactoryBean implements FactoryBean<SSLContextParameters>, ApplicationContextAware { private KeyManagersParametersFactoryBean keyManagers; private TrustManagersParametersFactoryBean trustManagers; private SecureRandomParametersFactoryBean secureRandom; private SSLContextClientParametersFactoryBean clientParameters; private SSLContextServerParametersFactoryBean serverParameters; @XmlTransient private ApplicationContext applicationContext; @Override public KeyManagersParametersFactoryBean getKeyManagers() { return keyManagers; } public void setKeyManagers(KeyManagersParametersFactoryBean keyManagers) { this.keyManagers = keyManagers; } @Override public TrustManagersParametersFactoryBean getTrustManagers() { return trustManagers; } public void setTrustManagers(TrustManagersParametersFactoryBean trustManagers) { this.trustManagers = trustManagers; } @Override public SecureRandomParametersFactoryBean getSecureRandom() { return secureRandom; } public void setSecureRandom(SecureRandomParametersFactoryBean secureRandom) { this.secureRandom = secureRandom; } @Override public SSLContextClientParametersFactoryBean getClientParameters() { return clientParameters; } public void setClientParameters(SSLContextClientParametersFactoryBean clientParameters) { this.clientParameters = clientParameters; } @Override public SSLContextServerParametersFactoryBean getServerParameters() { return serverParameters; } public void setServerParameters(SSLContextServerParametersFactoryBean serverParameters) { this.serverParameters = serverParameters; } @Override protected CamelContext getCamelContextWithId(String camelContextId) { return CamelContextResolverHelper.getCamelContextWithId(applicationContext, camelContextId); } @Override public void setApplicationContext(ApplicationContext applicationContext) { this.applicationContext = applicationContext; } }
SSLContextParametersFactoryBean
java
apache__flink
flink-python/src/test/java/org/apache/flink/table/runtime/operators/python/scalar/PythonScalarFunctionOperatorTestBase.java
{ "start": 12062, "end": 12512 }
class ____ implements PythonFunction { private static final long serialVersionUID = 1L; public static final PythonFunction INSTANCE = new DummyPythonFunction(); @Override public byte[] getSerializedPythonFunction() { return new byte[0]; } @Override public PythonEnv getPythonEnv() { return new PythonEnv(PythonEnv.ExecType.PROCESS); } } }
DummyPythonFunction
java
apache__flink
flink-libraries/flink-cep/src/test/java/org/apache/flink/cep/utils/NFATestUtilities.java
{ "start": 1179, "end": 2083 }
class ____ { @Deprecated public static List<List<Event>> feedNFA(List<StreamRecord<Event>> inputEvents, NFA<Event> nfa) throws Exception { NFATestHarness nfaTestHarness = NFATestHarness.forNFA(nfa).build(); return nfaTestHarness.feedRecords(inputEvents); } public static void comparePatterns(List<List<Event>> actual, List<List<Event>> expected) { Assert.assertEquals(expected.size(), actual.size()); for (List<Event> p : actual) { Collections.sort(p, new EventComparator()); } for (List<Event> p : expected) { Collections.sort(p, new EventComparator()); } Collections.sort(actual, new ListEventComparator()); Collections.sort(expected, new ListEventComparator()); Assert.assertArrayEquals(expected.toArray(), actual.toArray()); } private static
NFATestUtilities
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/threadsafety/GuardedByBinderTest.java
{ "start": 8503, "end": 8960 }
class ____ { Other Other = null; } """))) .isEqualTo("(CLASS_LITERAL threadsafety.Other)"); } @Test public void simpleFieldName() { assertThat( bind( "Test", "Other", forSourceLines( "threadsafety/Test.java", """ package threadsafety;
Test
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/annotations/list/ListMappingTest.java
{ "start": 3858, "end": 4718 }
class ____ { private Integer id; private Order order; private String product; private int quantity; private String discountCode; @Id public Integer getId() { return id; } public void setId(Integer id) { this.id = id; } @ManyToOne( optional = false ) @JoinColumn( name = "order_id" ) public Order getOrder() { return order; } public void setOrder(Order order) { this.order = order; } public String getProduct() { return product; } public void setProduct(String product) { this.product = product; } public int getQuantity() { return quantity; } public void setQuantity(int quantity) { this.quantity = quantity; } public String getDiscountCode() { return discountCode; } public void setDiscountCode(String discountCode) { this.discountCode = discountCode; } } }
LineItem
java
quarkusio__quarkus
extensions/vertx-http/deployment/src/test/java/io/quarkus/vertx/http/http2/Http2DisabledTest.java
{ "start": 1131, "end": 3595 }
class ____ { protected static final String PING_DATA = "12345678"; @TestHTTPResource(value = "/ping", tls = true) URL sslUrl; @TestHTTPResource(value = "/ping") URL plainUrl; @RegisterExtension static final QuarkusUnitTest config = new QuarkusUnitTest() .withApplicationRoot(jar -> jar .addClasses(MyBean.class) .addAsResource(new File("target/certs/ssl-test-keystore.jks"), "server-keystore.jks")) .overrideConfigKey("quarkus.http.ssl.certificate.key-store-file", "server-keystore.jks") .overrideConfigKey("quarkus.http.ssl.certificate.key-store-password", "secret") .overrideConfigKey("quarkus.http.http2", "false"); @Test void testHttp2EnabledSsl() throws ExecutionException, InterruptedException { WebClientOptions options = new WebClientOptions() .setUseAlpn(true) .setProtocolVersion(HttpVersion.HTTP_2) .setSsl(true) .setTrustOptions(new JksOptions().setPath("target/certs/ssl-test-truststore.jks").setPassword("secret")); WebClient client = WebClient.create(VertxCoreRecorder.getVertx().get(), options); int port = sslUrl.getPort(); runTest(client, port); } @Test void testHttp2EnabledPlain() throws ExecutionException, InterruptedException { WebClientOptions options = new WebClientOptions() .setProtocolVersion(HttpVersion.HTTP_2) .setHttp2ClearTextUpgrade(true); WebClient client = WebClient.create(VertxCoreRecorder.getVertx().get(), options); runTest(client, plainUrl.getPort()); } private void runTest(WebClient client, int port) throws InterruptedException, ExecutionException { CompletableFuture<HttpResponse<Buffer>> result = new CompletableFuture<>(); client .get(port, "localhost", "/ping") .send(ar -> { if (ar.succeeded()) { result.complete(ar.result()); } else { result.completeExceptionally(ar.cause()); } }); HttpResponse<Buffer> response = result.get(); Assertions.assertNotEquals(HttpVersion.HTTP_2, response.version()); Assertions.assertEquals(PING_DATA, response.bodyAsString()); } @ApplicationScoped static
Http2DisabledTest
java
micronaut-projects__micronaut-core
inject/src/main/java/io/micronaut/context/annotation/Mixin.java
{ "start": 1379, "end": 2838 }
class ____ it's not accessible. In that case the value should be Object.class. * * @return The target of the mixin */ String target() default ""; /** * Filters which annotations are included. The predicate will use {@link String#startsWith(String)} to verify if the annotation name should be included. * * @return The full annotation name or a package to check if the annotation should be included. */ @AliasFor(annotation = Filter.class, member = "includeAnnotations") String[] includeAnnotations() default {}; /** * Opposite of {@link #includeAnnotations()}. Filters which annotations to exclude. * * @return The full annotation name or a package to check if the annotation should not be excluded. */ @AliasFor(annotation = Filter.class, member = "excludeAnnotations") String[] excludeAnnotations() default {}; /** * Remove the annotation from the target element. The predicate will use {@link String#startsWith(String)} to verify if the annotation name should be removed. * * @return The full annotation name or a package to check if the annotation should not be removed. */ @AliasFor(annotation = Filter.class, member = "removeAnnotations") String[] removeAnnotations() default {}; @Experimental @Target({ElementType.TYPE, ElementType.METHOD, ElementType.FIELD, ElementType.PARAMETER}) @Retention(RetentionPolicy.SOURCE) @
if
java
apache__hadoop
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FtpFs.java
{ "start": 1570, "end": 2427 }
class ____ extends DelegateToFileSystem { /** * This constructor has the signature needed by * {@link AbstractFileSystem#createFileSystem(URI, Configuration)}. * * @param theUri which must be that of localFs * @param conf * @throws IOException * @throws URISyntaxException */ FtpFs(final URI theUri, final Configuration conf) throws IOException, URISyntaxException { super(theUri, new FTPFileSystem(), conf, FsConstants.FTP_SCHEME, true); } @Override public int getUriDefaultPort() { return FTP.DEFAULT_PORT; } @Override @Deprecated public FsServerDefaults getServerDefaults() throws IOException { return FtpConfigKeys.getServerDefaults(); } @Override public FsServerDefaults getServerDefaults(final Path f) throws IOException { return FtpConfigKeys.getServerDefaults(); } }
FtpFs
java
spring-projects__spring-framework
spring-webmvc/src/main/java/org/springframework/web/servlet/mvc/method/annotation/JsonViewResponseBodyAdvice.java
{ "start": 2092, "end": 3341 }
class ____ extends AbstractMappingJacksonResponseBodyAdvice { @Override public boolean supports(MethodParameter returnType, Class<? extends HttpMessageConverter<?>> converterType) { return super.supports(returnType, converterType) && returnType.hasMethodAnnotation(JsonView.class); } @Override protected void beforeBodyWriteInternal(MappingJacksonValue bodyContainer, MediaType contentType, MethodParameter returnType, ServerHttpRequest request, ServerHttpResponse response) { bodyContainer.setSerializationView(getJsonView(returnType)); } @Override public @Nullable Map<String, Object> determineWriteHints(@Nullable Object body, MethodParameter returnType, MediaType selectedContentType, Class<? extends HttpMessageConverter<?>> selectedConverterType) { return Collections.singletonMap(JsonView.class.getName(), getJsonView(returnType)); } private static Class<?> getJsonView(MethodParameter returnType) { JsonView ann = returnType.getMethodAnnotation(JsonView.class); Assert.state(ann != null, "No JsonView annotation"); Class<?>[] classes = ann.value(); if (classes.length != 1) { throw new IllegalArgumentException( "@JsonView only supported for response body advice with exactly 1
JsonViewResponseBodyAdvice
java
spring-projects__spring-boot
core/spring-boot/src/main/java/org/springframework/boot/SpringApplication.java
{ "start": 38911, "end": 39074 }
class ____ {@code null} */ public @Nullable Class<?> getMainApplicationClass() { return this.mainApplicationClass; } /** * Set a specific main application
or
java
apache__hadoop
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
{ "start": 10366, "end": 10512 }
interface ____ this daemon.\r\n"; /** * Initial and max size of response buffer */ static int INITIAL_RESP_BUF_SIZE = 10240; static
on
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/querycache/StructuredQueryCacheTest.java
{ "start": 1884, "end": 3061 }
class ____ { @AfterEach public void tearDown(SessionFactoryScope scope){ scope.getSessionFactory().getSchemaManager().truncate(); } @Test @JiraKey( value = "HHH-12107" ) public void testEmbeddedIdInOneToMany(SessionFactoryScope scope) { OneToManyWithEmbeddedIdKey key = new OneToManyWithEmbeddedIdKey( 1234 ); final OneToManyWithEmbeddedId o = new OneToManyWithEmbeddedId( key ); o.setItems( new HashSet<>() ); o.getItems().add( new OneToManyWithEmbeddedIdChild( 1 ) ); scope.inTransaction( session -> session.persist( o ) ); scope.inTransaction( session -> { OneToManyWithEmbeddedId _entity = session.find( OneToManyWithEmbeddedId.class, key ); assertTrue( session.getSessionFactory().getCache().containsEntity( OneToManyWithEmbeddedId.class, key ) ); assertNotNull( _entity ); }); scope.inTransaction( session -> { OneToManyWithEmbeddedId _entity = session.find( OneToManyWithEmbeddedId.class, key ); assertTrue( session.getSessionFactory().getCache().containsEntity( OneToManyWithEmbeddedId.class, key ) ); assertNotNull( _entity ); }); } @Entity(name = "OneToManyWithEmbeddedId") public static
StructuredQueryCacheTest
java
apache__hadoop
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/PersistentLongFile.java
{ "start": 1395, "end": 3216 }
class ____ { private static final Logger LOG = LoggerFactory.getLogger( PersistentLongFile.class); private final File file; private final long defaultVal; private long value; private boolean loaded = false; public PersistentLongFile(File file, long defaultVal) { this.file = file; this.defaultVal = defaultVal; } public long get() throws IOException { if (!loaded) { value = readFile(file, defaultVal); loaded = true; } return value; } public void set(long newVal) throws IOException { if (value != newVal || !loaded) { writeFile(file, newVal); } value = newVal; loaded = true; } /** * Atomically write the given value to the given file, including fsyncing. * * @param file destination file * @param val value to write * @throws IOException if the file cannot be written */ public static void writeFile(File file, long val) throws IOException { AtomicFileOutputStream fos = new AtomicFileOutputStream(file); try { fos.write(String.valueOf(val).getBytes(StandardCharsets.UTF_8)); fos.write('\n'); fos.close(); fos = null; } finally { if (fos != null) { fos.abort(); } } } public static long readFile(File file, long defaultVal) throws IOException { long val = defaultVal; if (file.exists()) { BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream( file), StandardCharsets.UTF_8)); try { val = Long.parseLong(br.readLine()); br.close(); br = null; } catch (NumberFormatException e) { throw new IOException(e); } finally { IOUtils.cleanupWithLogger(LOG, br); } } return val; } }
PersistentLongFile
java
quarkusio__quarkus
extensions/kotlin/deployment/src/main/java/io/quarkus/kotlin/deployment/KotlinCompilationProvider.java
{ "start": 5144, "end": 6108 }
class ____ implements MessageCollector { private final List<String> errors = new ArrayList<>(); @Override public void clear() { } @Override public boolean hasErrors() { return !errors.isEmpty(); } public List<String> getErrors() { return errors; } @Override public void report(CompilerMessageSeverity severity, String s, CompilerMessageSourceLocation location) { if (severity.isError()) { if ((location != null) && (location.getLineContent() != null)) { errors.add(String.format("%s%n%s:%d:%d%nReason: %s", location.getLineContent(), location.getPath(), location.getLine(), location.getColumn(), s)); } else { errors.add(s); } } } } }
SimpleKotlinCompilerMessageCollector
java
FasterXML__jackson-databind
src/test/java/tools/jackson/databind/jsontype/Generic1128Test.java
{ "start": 735, "end": 982 }
class ____ extends HObj<DevBase> { public String tag; // for some reason, setter is needed to expose this... public void setTag(String t) { tag = t; } //public String getTag() { return tag; } } static
DevBase
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/ThrowsUncheckedExceptionTest.java
{ "start": 4902, "end": 5179 }
interface ____ { void f() throws IOException, IOError, RuntimeException; } """) .addOutputLines( "out/Test.java", """ import java.io.IOError; import java.io.IOException;
Test
java
apache__dubbo
dubbo-plugin/dubbo-native/src/main/java/org/apache/dubbo/aot/generate/NativeClassSourceWriter.java
{ "start": 1250, "end": 1290 }
class ____ generated. */ public
dynamically
java
apache__hadoop
hadoop-tools/hadoop-gcp/src/main/java/org/apache/hadoop/fs/gs/GoogleCloudStorageExceptions.java
{ "start": 1481, "end": 3293 }
class ____ { private GoogleCloudStorageExceptions() {} /** Creates FileNotFoundException with suitable message for a GCS bucket or object. */ static FileNotFoundException createFileNotFoundException( String bucketName, String objectName, @Nullable IOException cause) { checkArgument(!isNullOrEmpty(bucketName), "bucketName must not be null or empty"); FileNotFoundException fileNotFoundException = new FileNotFoundException( String.format( "Item not found: '%s'. Note, it is possible that the live version" + " is still available but the requested generation is deleted.", StringPaths.fromComponents(bucketName, nullToEmpty(objectName)))); if (cause != null) { fileNotFoundException.initCause(cause); } return fileNotFoundException; } static FileNotFoundException createFileNotFoundException( StorageResourceId resourceId, @Nullable IOException cause) { return createFileNotFoundException( resourceId.getBucketName(), resourceId.getObjectName(), cause); } public static IOException createCompositeException(Collection<IOException> innerExceptions) { Preconditions.checkArgument( innerExceptions != null && !innerExceptions.isEmpty(), "innerExceptions (%s) must be not null and contain at least one element", innerExceptions); Iterator<IOException> innerExceptionIterator = innerExceptions.iterator(); if (innerExceptions.size() == 1) { return innerExceptionIterator.next(); } IOException combined = new IOException("Multiple IOExceptions."); while (innerExceptionIterator.hasNext()) { combined.addSuppressed(innerExceptionIterator.next()); } return combined; } }
GoogleCloudStorageExceptions
java
google__gson
proto/src/main/java/com/google/gson/protobuf/ProtoTypeAdapter.java
{ "start": 2721, "end": 2882 }
class ____ implements JsonSerializer<Message>, JsonDeserializer<Message> { /** Determines how enum <u>values</u> should be serialized. */ public
ProtoTypeAdapter
java
apache__camel
core/camel-core/src/test/java/org/apache/camel/processor/MulticastUnitOfWorkTest.java
{ "start": 2771, "end": 3432 }
class ____ implements Processor { private final String id; private MyUOWProcessor(String id) { this.id = id; } @Override public void process(Exchange exchange) { exchange.getUnitOfWork().addSynchronization(new Synchronization() { public void onComplete(Exchange exchange) { sync = "onComplete" + id; lastOne = sync; } public void onFailure(Exchange exchange) { sync = "onFailure" + id; lastOne = sync; } }); } } }
MyUOWProcessor
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/idclass/NestedIdClassTest.java
{ "start": 1239, "end": 2331 }
class ____ { @Test @JiraKey("HHH-14340") @FailureExpected(reason = "duplicate column in the generated SQL") public void testIdClass(SessionFactoryScope scope) { scope.inTransaction( session -> { Asset asset = new Asset(); asset.setId(1L); asset.setTenantId(2L); AssetTypeAttribute assetTypeAttribute = new AssetTypeAttribute(); assetTypeAttribute.setId(3L); assetTypeAttribute.setName("TestAttribute"); AssetAssetTypeAttribute assetAssetTypeAttribute = new AssetAssetTypeAttribute(); assetAssetTypeAttribute.setAssetTypeAttributeId(assetTypeAttribute.getId()); assetAssetTypeAttribute.setAsset(asset); asset.setAssetAssetTypeAttributes(new HashSet<>()); asset.getAssetAssetTypeAttributes().add(assetAssetTypeAttribute); session.persist(asset); for (AssetAssetTypeAttribute assetAssetTypeAttribute1 : asset.getAssetAssetTypeAttributes()) { session.persist(assetAssetTypeAttribute1); } } ); } @Entity(name = "Asset") @Table(name = "asset") @IdClass(AssetId.class) public static
NestedIdClassTest
java
apache__dubbo
dubbo-spring-boot-project/dubbo-spring-boot-actuator/src/main/java/org/apache/dubbo/spring/boot/actuate/health/DubboHealthIndicatorProperties.java
{ "start": 2457, "end": 3212 }
class ____ { /** * The defaults names of {@link StatusChecker} * <p> * The defaults : "memory", "load" */ private Set<String> defaults = new LinkedHashSet<>(Arrays.asList("memory", "load")); /** * The extra names of {@link StatusChecker} */ private Set<String> extras = new LinkedHashSet<>(); public Set<String> getDefaults() { return defaults; } public void setDefaults(Set<String> defaults) { this.defaults = defaults; } public Set<String> getExtras() { return extras; } public void setExtras(Set<String> extras) { this.extras = extras; } } }
Status
java
spring-projects__spring-security
saml2/saml2-service-provider/src/main/java/org/springframework/security/saml2/provider/service/authentication/Saml2PostAuthenticationRequest.java
{ "start": 1327, "end": 2367 }
class ____ extends AbstractSaml2AuthenticationRequest { @Serial private static final long serialVersionUID = -6412064305715642123L; Saml2PostAuthenticationRequest(String samlRequest, String relayState, String authenticationRequestUri, String relyingPartyRegistrationId, String id) { super(samlRequest, relayState, authenticationRequestUri, relyingPartyRegistrationId, id); } /** * @return {@link Saml2MessageBinding#POST} */ @Override public Saml2MessageBinding getBinding() { return Saml2MessageBinding.POST; } /** * Constructs a {@link Builder} from a {@link RelyingPartyRegistration} object. * @param registration a relying party registration * @return a modifiable builder object * @since 5.7 */ public static Builder withRelyingPartyRegistration(RelyingPartyRegistration registration) { String location = registration.getAssertingPartyMetadata().getSingleSignOnServiceLocation(); return new Builder(registration).authenticationRequestUri(location); } /** * Builder
Saml2PostAuthenticationRequest
java
spring-projects__spring-boot
module/spring-boot-security-oauth2-authorization-server/src/test/java/org/springframework/boot/security/oauth2/server/authorization/autoconfigure/servlet/OAuth2AuthorizationServerAutoConfigurationTests.java
{ "start": 10322, "end": 10544 }
class ____ { @Bean AuthorizationServerSettings authorizationServerSettings() { return AuthorizationServerSettings.builder().issuer("https://example.com").build(); } } }
TestAuthorizationServerSettingsConfiguration
java
spring-projects__spring-framework
spring-core/src/main/java/org/springframework/core/DecoratingClassLoader.java
{ "start": 827, "end": 1132 }
class ____ decorating ClassLoaders such as {@link OverridingClassLoader} * and {@link org.springframework.instrument.classloading.ShadowingClassLoader}, * providing common handling of excluded packages and classes. * * @author Juergen Hoeller * @author Rod Johnson * @since 2.5.2 */ public abstract
for
java
alibaba__druid
core/src/test/java/com/alibaba/druid/bvt/sql/mysql/create/MySqlCreateTable_showColumns_test_3.java
{ "start": 870, "end": 1968 }
class ____ extends MysqlTest { public void test_0() throws Exception { SchemaRepository repository = new SchemaRepository(JdbcConstants.MYSQL); String sql = "create table yushitai_test.card_record ( id bigint auto_increment) auto_increment=256 " + "alter table yushitai_test.card_record add index index_name(name) ;" + "alter table yushitai_test.card_record add index index_name(name) ;" + "alter table yushitai_test.card_record add Constraint pk_id PRIMARY KEY (id);" + "alter table yushitai_test.card_record add Constraint pk_id PRIMARY KEY (id);"; repository.console(sql); repository.setDefaultSchema("yushitai_test"); SchemaObject table = repository.findTable("card_record"); assertEquals("CREATE TABLE card_record (\n" + "\tid bigint AUTO_INCREMENT,\n" + "\tINDEX index_name(name),\n" + "\tPRIMARY KEY (id)\n" + ") AUTO_INCREMENT = 256", table.getStatement().toString()); } }
MySqlCreateTable_showColumns_test_3
java
google__guava
android/guava-tests/test/com/google/common/collect/ComparisonChainTest.java
{ "start": 986, "end": 1125 }
class ____ extends TestCase { private static final DontCompareMe DONT_COMPARE_ME = new DontCompareMe(); private static
ComparisonChainTest
java
elastic__elasticsearch
x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/notifications/DataFrameAnalyticsAuditMessageTests.java
{ "start": 455, "end": 1228 }
class ____ extends AuditMessageTests<DataFrameAnalyticsAuditMessage> { @Override public String getJobType() { return "data_frame_analytics"; } @Override protected DataFrameAnalyticsAuditMessage doParseInstance(XContentParser parser) { return DataFrameAnalyticsAuditMessage.PARSER.apply(parser, null); } @Override protected DataFrameAnalyticsAuditMessage createTestInstance() { return new DataFrameAnalyticsAuditMessage( randomBoolean() ? null : randomAlphaOfLength(10), randomAlphaOfLengthBetween(1, 20), randomFrom(Level.values()), new Date(), randomBoolean() ? null : randomAlphaOfLengthBetween(1, 20) ); } }
DataFrameAnalyticsAuditMessageTests
java
FasterXML__jackson-databind
src/test/java/tools/jackson/databind/deser/bean/BeanDeserializerModifier4216Test.java
{ "start": 646, "end": 2254 }
class ____ { public Byte[] objArr; public byte[] primArr; } @Test public void testModifierCalledTwice() throws Exception { // Given : Configure and construct AtomicInteger counter = new AtomicInteger(0); ObjectMapper objectMapper = jsonMapperBuilder() .addModules(getSimpleModuleWithCounter(counter)) .build(); // Given : Set-up data WrapperBean4216 test = new WrapperBean4216(); test.primArr = new byte[]{(byte) 0x11}; test.objArr = new Byte[]{(byte) 0x11}; String sample = objectMapper.writeValueAsString(test); // When objectMapper.readValue(sample, WrapperBean4216.class); // Then : modifyArrayDeserializer should be called twice assertEquals(2, counter.get()); } private static SimpleModule getSimpleModuleWithCounter(AtomicInteger counter) { SimpleModule module = new SimpleModule(); module.setDeserializerModifier( new ValueDeserializerModifier() { private static final long serialVersionUID = 1L; @Override public ValueDeserializer<?> modifyArrayDeserializer(DeserializationConfig config, ArrayType valueType, BeanDescription.Supplier beanDescRef, ValueDeserializer<?> deserializer) { // Count invocations counter.incrementAndGet(); return deserializer; } }); return module; } }
WrapperBean4216
java
apache__hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-mawo/hadoop-yarn-applications-mawo-core/src/main/java/org/apache/hadoop/applications/mawo/server/common/TaskId.java
{ "start": 1147, "end": 3507 }
class ____ implements Writable { /** * MaWo TaskIds prefix. */ static final String TASK_ID_PREFIX = "mawo_task_"; /** * MaWo Job ID. */ private JobId jobId = new JobId(); /** * Mawo TaskId. */ private long taskId; /** * TaskId constructor. */ public TaskId() { } /** * TaskId constructor with jobId and taskId. * @param localjobId : Job identifier * @param id : Task identifier */ public TaskId(final JobId localjobId, final int id) { this.jobId = localjobId; this.taskId = id; } /** * Getter method for jobId. * @return JobID: Job identifier */ public final int getJobId() { return jobId.getID(); } /** * Getter method for TaskID. * @return TaskId: Task identifier */ public final long getId() { return taskId; } /** * Print method for TaskId. * @return : Full TaskId which is TaskId_prefix + jobId + _ + TaskId */ public final String toString() { return TASK_ID_PREFIX + jobId.getID() + "_" + taskId; } @Override /** * Hashcode method for TaskId. */ public final int hashCode() { final int prime = 31; final int bits = 32; int result = 1; int jobHash = 0; if (jobId == null) { jobHash = 0; } else { jobHash = jobId.hashCode(); } result = prime * result + jobHash; result = prime * result + (int) (taskId ^ (taskId >>> bits)); return result; } @Override /** * Equal method override for TaskId. */ public final boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } TaskId other = (TaskId) obj; if (jobId == null) { if (other.jobId != null) { return false; } } else if (!jobId.equals(other.jobId)) { return false; } if (taskId != other.taskId) { return false; } return true; } /** {@inheritDoc} */ public final void write(final DataOutput out) throws IOException { jobId.write(out); WritableUtils.writeVLong(out, taskId); } /** {@inheritDoc} */ public final void readFields(final DataInput in) throws IOException { jobId = new JobId(); jobId.readFields(in); this.taskId = WritableUtils.readVLong(in); } }
TaskId
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/sql/Template.java
{ "start": 1501, "end": 15786 }
class ____ { private static final Set<String> KEYWORDS = Set.of( "and", "or", "not", "like", "escape", "is", "in", "between", "null", "select", "distinct", "from", "join", "inner", "outer", "left", "right", "on", "where", "having", "group", "order", "by", "desc", "asc", "limit", "any", "some", "exists", "all", "union", "minus", "except", "intersect", "partition"); private static final Set<String> BEFORE_TABLE_KEYWORDS = Set.of("from", "join"); private static final Set<String> FUNCTION_KEYWORDS = Set.of("as", "leading", "trailing", "from", "case", "when", "then", "else", "end"); private static final Set<String> FUNCTION_WITH_FROM_KEYWORDS = Set.of("extract", "trim"); private static final Set<String> SOFT_KEYWORDS = Set.of("date", "time"); private static final Set<String> LITERAL_PREFIXES = Set.of("n", "x", "varbyte", "bx", "bytea", "date", "time", "timestamp", "zone"); private static final Set<String> FETCH_BIGRAMS = Set.of("first", "next"); private static final Set<String> CURRENT_BIGRAMS = Set.of("date", "time", "timestamp"); private static final String PUNCTUATION = "=><!+-*/()',|&`"; public static final String TEMPLATE = "{@}"; private Template() {} public static String renderTransformerReadFragment( String fragment, String... columnNames) { // NOTE : would need access to SessionFactoryImplementor to make this configurable for ( String columnName : columnNames ) { fragment = fragment.replace( columnName, TEMPLATE + '.' + columnName ); } return fragment; } /** * Takes the SQL fragment provided in the mapping attribute and interpolates the default * {@linkplain #TEMPLATE placeholder value}, which is {@value #TEMPLATE}, using it to * qualify every unqualified column name. * <p> * Handles subselects, quoted identifiers, quoted strings, expressions, SQL functions, * named parameters, literals. * * @param sql The SQL string into which to interpolate the placeholder value * @param dialect The dialect to apply * @return The rendered SQL fragment */ public static String renderWhereStringTemplate( String sql, Dialect dialect, TypeConfiguration typeConfiguration) { return renderWhereStringTemplate( sql, TEMPLATE, dialect, typeConfiguration ); } /** * Takes the SQL fragment provided in the mapping attribute and interpolates the given * alias, using it to qualify every unqualified column name. * <p> * Handles subselects, quoted identifiers, quoted strings, expressions, SQL functions, * named parameters, literals. * * @param sql The SQL string into which to interpolate the alias value * @param alias The alias to be interpolated into the SQL * @param dialect The dialect to apply * @return The rendered SQL fragment */ public static String renderWhereStringTemplate( String sql, String alias, Dialect dialect, TypeConfiguration typeConfiguration) { // IMPL NOTE: The basic process here is to tokenize the incoming string and to iterate over each token // in turn. As we process each token, we set a series of flags used to indicate the type of context in // which the tokens occur. Depending on the state of those flags, we decide whether we need to qualify // identifier references. // WARNING TO MAINTAINERS: This is a simple scanner-based state machine. Please don't attempt to turn it into // a parser for SQL, no matter how "special" your case is. What I mean by this is: don't write code which // attempts to recognize the grammar of SQL, not even little bits of SQL. Previous "enhancements" to this // function did not respect this concept and resulted in code which was fragile and unmaintainable. If // lookahead is truly necessary, use the lookahead() function provided below. final String symbols = PUNCTUATION + WHITESPACE + dialect.openQuote() + dialect.closeQuote(); final var tokens = new StringTokenizer( sql, symbols, true ); final var result = new StringBuilder(); boolean quoted = false; boolean quotedIdentifier = false; boolean beforeTable = false; boolean inFromClause = false; boolean afterFromTable = false; boolean afterCastAs = false; boolean afterFetch = false; boolean afterCurrent = false; int inExtractOrTrim = -1; int inCast = -1; int nestingLevel = 0; boolean hasMore = tokens.hasMoreTokens(); String nextToken = hasMore ? tokens.nextToken() : null; String token = null; String previousToken; while ( hasMore ) { previousToken = token; token = nextToken; String lcToken = token.toLowerCase(Locale.ROOT); hasMore = tokens.hasMoreTokens(); nextToken = hasMore ? tokens.nextToken() : null; boolean isQuoteCharacter = false; if ( !quotedIdentifier && "'".equals(token) ) { quoted = !quoted; isQuoteCharacter = true; } if ( !quoted ) { final boolean isOpenQuote; if ( "`".equals(token) ) { isOpenQuote = !quotedIdentifier; token = lcToken = isOpenQuote ? Character.toString( dialect.openQuote() ) : Character.toString( dialect.closeQuote() ); quotedIdentifier = isOpenQuote; isQuoteCharacter = true; } else if ( !quotedIdentifier && dialect.openQuote()==token.charAt(0) ) { isOpenQuote = true; quotedIdentifier = true; isQuoteCharacter = true; } else if ( quotedIdentifier && dialect.closeQuote()==token.charAt(0) ) { quotedIdentifier = false; isQuoteCharacter = true; isOpenQuote = false; } else { isOpenQuote = false; } if ( isOpenQuote && !inFromClause // don't want to append alias to tokens inside the FROM clause && !endsWithDot( previousToken ) ) { result.append( alias ).append( '.' ); } } final boolean isWhitespace = token.isBlank(); // handle bigrams here final boolean wasAfterFetch = afterFetch; afterFetch = afterFetch && isWhitespace; final boolean wasAfterCurrent = afterCurrent; afterCurrent = afterCurrent && isWhitespace; final String processedToken; final boolean isQuoted = quoted || quotedIdentifier || isQuoteCharacter; if ( isQuoted || isWhitespace ) { processedToken = token; } else if ( beforeTable ) { processedToken = token; beforeTable = false; afterFromTable = true; } else if ( afterFromTable ) { afterFromTable = "as".equals(lcToken); processedToken = token; } else if ( "(".equals(lcToken) ) { nestingLevel ++; processedToken = token; } else if ( ")".equals(lcToken) ) { nestingLevel --; if ( nestingLevel == inExtractOrTrim ) { inExtractOrTrim = -1; } if ( nestingLevel == inCast ) { inCast = -1; afterCastAs = false; } processedToken = token; } else if ( ",".equals(lcToken) ) { if ( inFromClause ) { beforeTable = true; } processedToken = token; } else if ( lcToken.length()==1 && symbols.contains(lcToken) ) { processedToken = token; } else if ( BEFORE_TABLE_KEYWORDS.contains(lcToken) ) { if ( inExtractOrTrim == -1 ) { beforeTable = true; inFromClause = true; } processedToken = token; } else if ( inFromClause || afterCastAs ) { // Don't want to append alias to: // 1. tokens inside the FROM clause // 2. type names after 'CAST(expression AS' processedToken = token; } else if ( isNamedParameter(token) ) { processedToken = token; } else if ( "as".equals( lcToken ) ) { processedToken = token; afterCastAs = inCast>-1; } else if ( isFetch( dialect, lcToken ) ) { processedToken = token; afterFetch = true; } else if ( wasAfterFetch && FETCH_BIGRAMS.contains( lcToken ) ) { processedToken = token; } else if ( isCurrent( lcToken, nextToken, sql, symbols, tokens ) ) { processedToken = token; afterCurrent = true; } else if ( isBoolean( lcToken ) ) { processedToken = dialect.toBooleanValueString( parseBoolean( token ) ); } else if ( isFunctionCall( nextToken, sql, symbols, tokens ) ) { if ( FUNCTION_WITH_FROM_KEYWORDS.contains( lcToken ) ) { inExtractOrTrim = nestingLevel; } if ( "cast".equals( lcToken ) ) { inCast = nestingLevel; } processedToken = token; } else if ( isAliasableIdentifier( token, lcToken, nextToken, sql, symbols, tokens, wasAfterCurrent, dialect, typeConfiguration ) ) { processedToken = alias + '.' + dialect.quote(token); } else { processedToken = token; } result.append( processedToken ); //Yuck: if ( inFromClause && KEYWORDS.contains( lcToken ) // "as" is not in KEYWORDS && !BEFORE_TABLE_KEYWORDS.contains( lcToken ) ) { inFromClause = false; } } return result.toString(); } private static boolean isAliasableIdentifier( String token, String lcToken, String nextToken, String sql, String symbols, StringTokenizer tokens, boolean wasAfterCurrent, Dialect dialect, TypeConfiguration typeConfiguration) { return isUnqualifiedIdentifier( token ) && !isKeyword( lcToken, wasAfterCurrent, dialect, typeConfiguration ) && !isLiteral( lcToken, nextToken, sql, symbols, tokens ); } private static boolean isFunctionCall( String nextToken, String sql, String symbols, StringTokenizer tokens) { if ( nextToken == null ) { return false; } else { return nextToken.isBlank() ? lookPastBlankTokens( sql, symbols, tokens, 1, "("::equals ) : "(".equals( nextToken ); } } private static boolean isCurrent( String lcToken, String nextToken, String sql, String symbols, StringTokenizer tokens) { return "current".equals( lcToken ) && nextToken.isBlank() && lookPastBlankTokens( sql, symbols, tokens, 1, CURRENT_BIGRAMS::contains ); } private static boolean isFetch(Dialect dialect, String lcToken) { return "fetch".equals( lcToken ) && dialect.getKeywords().contains( "fetch" ); } private static boolean endsWithDot(String token) { return token != null && token.endsWith( "." ); } private static boolean isLiteral( String lcToken, String next, String sqlWhereString, String symbols, StringTokenizer tokens) { if ( next == null ) { return false; } else if ( LITERAL_PREFIXES.contains( lcToken ) ) { if ( next.isBlank() ) { // we need to look ahead in the token stream // to find the first non-blank token return lookPastBlankTokens( sqlWhereString, symbols, tokens, 1, nextToken -> "'".equals(nextToken) || lcToken.equals("time") && "with".equals(nextToken) || lcToken.equals("timestamp") && "with".equals(nextToken) || lcToken.equals("time") && "zone".equals(nextToken) ); } else { return "'".equals(next); } } else { return false; } } private static boolean lookPastBlankTokens( String sqlWhereString, String symbols, StringTokenizer tokens, @SuppressWarnings("SameParameterValue") int skip, Function<String, Boolean> check) { final var lookahead = lookahead( sqlWhereString, symbols, tokens, skip ); if ( lookahead.hasMoreTokens() ) { String nextToken; do { nextToken = lookahead.nextToken().toLowerCase(Locale.ROOT); } while ( nextToken.isBlank() && lookahead.hasMoreTokens() ); return check.apply( nextToken ); } else { return false; } } /** * Clone the given token stream, returning a token stream which begins * from the next token. * * @param sql the full SQL we are scanning * @param symbols the delimiter symbols * @param tokens the current token stream * @param skip the number of tokens to skip * @return a cloned token stream */ private static StringTokenizer lookahead(String sql, String symbols, StringTokenizer tokens, int skip) { final var lookahead = new StringTokenizer( sql, symbols, true ); while ( lookahead.countTokens() > tokens.countTokens() + skip ) { lookahead.nextToken(); } return lookahead; } public static List<String> collectColumnNames(String sql, Dialect dialect, TypeConfiguration typeConfiguration) { return collectColumnNames( renderWhereStringTemplate( sql, dialect, typeConfiguration ) ); } public static List<String> collectColumnNames(String template) { final List<String> names = new ArrayList<>(); int begin = 0; int match; while ( ( match = template.indexOf(TEMPLATE, begin) ) >= 0 ) { final int start = match + TEMPLATE.length() + 1; for ( int loc = start;; loc++ ) { if ( loc == template.length() - 1 ) { names.add( template.substring( start ) ); begin = template.length(); break; } else { final char ch = template.charAt( loc ); if ( PUNCTUATION.indexOf(ch) >= 0 || WHITESPACE.indexOf(ch) >= 0 ) { names.add( template.substring( start, loc ) ); begin = loc; break; } } } } return names; } private static boolean isNamedParameter(String token) { return token.charAt(0) == ':'; } private static boolean isKeyword( String lcToken, boolean afterCurrent, Dialect dialect, TypeConfiguration typeConfiguration) { if ( SOFT_KEYWORDS.contains( lcToken ) ) { // these can be column names on some databases // but treat 'current date', 'current time' bigrams as keywords return afterCurrent; } else { return KEYWORDS.contains( lcToken ) || isType( lcToken, typeConfiguration ) || dialect.getKeywords().contains( lcToken ) || FUNCTION_KEYWORDS.contains( lcToken ); } } private static boolean isType(String lcToken, TypeConfiguration typeConfiguration) { return typeConfiguration.getDdlTypeRegistry().isTypeNameRegistered( lcToken ); } private static boolean isUnqualifiedIdentifier(String token) { final char initialChar = token.charAt( 0 ); return initialChar == '`' // allow any identifier quoted with backtick || isLetter( initialChar ) // only recognizes identifiers beginning with a letter && token.indexOf( '.' ) < 0; // don't qualify already-qualified identifiers } private static boolean isBoolean(String lcToken) { return switch ( lcToken ) { case "true", "false" -> true; default -> false; }; } }
Template
java
spring-projects__spring-framework
spring-jdbc/src/main/java/org/springframework/jdbc/datasource/DataSourceUtils.java
{ "start": 1256, "end": 2197 }
class ____ provides static methods for obtaining JDBC {@code Connection}s * from a {@link javax.sql.DataSource}. Includes special support for Spring-managed * transactional {@code Connection}s, for example, managed by {@link DataSourceTransactionManager} * or {@link org.springframework.transaction.jta.JtaTransactionManager}. * * <p>Used internally by Spring's {@link org.springframework.jdbc.core.JdbcTemplate}, * Spring's JDBC operation objects and the JDBC {@link DataSourceTransactionManager}. * Can also be used directly in application code. * * @author Rod Johnson * @author Juergen Hoeller * @see #getConnection * @see #releaseConnection * @see org.springframework.jdbc.core.JdbcTemplate * @see org.springframework.jdbc.support.JdbcTransactionManager * @see org.springframework.transaction.jta.JtaTransactionManager * @see org.springframework.transaction.support.TransactionSynchronizationManager */ public abstract
that
java
quarkusio__quarkus
extensions/kubernetes/spi/src/main/java/io/quarkus/kubernetes/spi/KubernetesInitContainerBuildItem.java
{ "start": 2056, "end": 6598 }
enum ____ the classpath List<String> command, List<String> arguments, Map<String, String> envVars, boolean sharedEnvironment, boolean sharedFilesystem) { this.name = name; this.target = target; this.image = image; this.imagePullPolicy = imagePullPolicy; this.command = command; this.arguments = arguments; this.envVars = envVars; this.sharedEnvironment = sharedEnvironment; this.sharedFilesystem = sharedFilesystem; } public String getName() { return name; } public KubernetesInitContainerBuildItem withName(String name) { return new KubernetesInitContainerBuildItem(name, target, image, imagePullPolicy, command, arguments, envVars, sharedEnvironment, sharedFilesystem); } public String getTarget() { return target; } public KubernetesInitContainerBuildItem withTarget(String target) { return new KubernetesInitContainerBuildItem(name, target, image, imagePullPolicy, command, arguments, envVars, sharedEnvironment, sharedFilesystem); } public String getImage() { return image; } @SuppressWarnings("unused") public KubernetesInitContainerBuildItem withImage(String image) { return new KubernetesInitContainerBuildItem(name, target, image, imagePullPolicy, command, arguments, envVars, sharedEnvironment, sharedFilesystem); } public String getImagePullPolicy() { return imagePullPolicy; } public KubernetesInitContainerBuildItem withImagePullPolicy(String imagePullPolicy) { return new KubernetesInitContainerBuildItem(name, target, image, imagePullPolicy, command, arguments, envVars, sharedEnvironment, sharedFilesystem); } public List<String> getCommand() { return command; } public KubernetesInitContainerBuildItem withCommand(List<String> command) { return new KubernetesInitContainerBuildItem(name, target, image, imagePullPolicy, command, arguments, envVars, sharedEnvironment, sharedFilesystem); } public List<String> getArguments() { return arguments; } public KubernetesInitContainerBuildItem withArguments(List<String> arguments) { return new KubernetesInitContainerBuildItem(name, target, image, imagePullPolicy, command, arguments, envVars, sharedEnvironment, sharedFilesystem); } public Map<String, String> getEnvVars() { return envVars; } @SuppressWarnings("unused") public KubernetesInitContainerBuildItem withEnvVars(Map<String, String> envVars) { return new KubernetesInitContainerBuildItem(name, target, image, imagePullPolicy, command, arguments, envVars, sharedEnvironment, sharedFilesystem); } /** * Flag for tasks that require access to the environment variables of the application. * Often tasks need to access resources, configured via environment variables. This * flag expresses that the task should be executed using the same envrironment variables as the application. * * @return true when the task is meant to share environment variables with the application. */ public boolean isSharedEnvironment() { return sharedEnvironment; } @SuppressWarnings("unused") public KubernetesInitContainerBuildItem withSharedEnvironment(boolean sharedEnvironment) { return new KubernetesInitContainerBuildItem(name, target, image, imagePullPolicy, command, arguments, envVars, sharedEnvironment, sharedFilesystem); } /** * Flag for tasks that need to share filesystem with the application. * Often tasks need to access resources, configured via filesystem (e.g. local config files, kubernetes service binding * etc). * In other cases, tasks may need to produce files needed by the application. * This flag expresses that the task should share filesystem with the application. * * @return true when the task is meant to share filesystem. */ public boolean isSharedFilesystem() { return sharedFilesystem; } @SuppressWarnings("unused") public KubernetesInitContainerBuildItem withSharedFilesystem(boolean sharedFilesystem) { return new KubernetesInitContainerBuildItem(name, target, image, imagePullPolicy, command, arguments, envVars, sharedEnvironment, sharedFilesystem); } }
in
java
elastic__elasticsearch
x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/optimizer/Optimizer.java
{ "start": 15600, "end": 19675 }
class ____ { private final Expression condition; private final KeyedFilter keyedFilter; private final int keyPosition; Constraint(Expression condition, KeyedFilter filter, int keyPosition) { this.condition = condition; this.keyedFilter = filter; this.keyPosition = keyPosition; } Expression constraintFor(KeyedFilter keyed) { if (keyed == keyedFilter) { return null; } Expression localKey = keyed.keys().get(keyPosition); Expression key = keyedFilter.keys().get(keyPosition); Expression newCond = condition.transformDown(e -> key.semanticEquals(e) ? localKey : e); return newCond; } @Override public String toString() { return condition.toString(); } } @Override protected LogicalPlan rule(AbstractJoin join) { List<Constraint> constraints = new ArrayList<>(); // collect constraints for each filter join.queries().forEach(k -> k.forEachDown(Filter.class, f -> constraints.addAll(detectKeyConstraints(f.condition(), k)))); if (constraints.isEmpty() == false) { List<KeyedFilter> queries = join.queries().stream().map(k -> addConstraint(k, constraints)).collect(toList()); if (join instanceof Join j) { join = j.with(queries, j.until(), j.direction()); } else if (join instanceof Sample sample) { join = sample.with(queries); } } return join; } private static List<Constraint> detectKeyConstraints(Expression condition, KeyedFilter filter) { List<Constraint> constraints = new ArrayList<>(); List<? extends NamedExpression> keys = filter.keys(); List<Expression> and = Predicates.splitAnd(condition); for (Expression exp : and) { // if the expression only involves filter keys, it's simple enough (eg. there are no conjunction), and at least one key // matches, save the expression along with the key and its ordinal so it can be replaced if (exp.anyMatch(Or.class::isInstance)) { continue; } // expressions that involve attributes other than the keys have to be discarded if (exp.anyMatch(x -> x instanceof Attribute && keys.stream().noneMatch(k -> x.semanticEquals(k)))) { continue; } exp.anyMatch(e -> { for (int i = 0; i < keys.size(); i++) { Expression key = keys.get(i); if (e.semanticEquals(key)) { constraints.add(new Constraint(exp, filter, i)); return true; } } return false; }); } return constraints; } // adapt constraint to the given filter by replacing the keys accordingly in the expressions private static KeyedFilter addConstraint(KeyedFilter k, List<Constraint> constraints) { Expression constraint = Predicates.combineAnd( constraints.stream().map(c -> c.constraintFor(k)).filter(Objects::nonNull).collect(toList()) ); return constraint != null ? new KeyedFilter( k.source(), new Filter(k.source(), k.child(), constraint), k.keys(), k.timestamp(), k.tiebreaker(), k.isMissingEventFilter() ) : k; } } /** * Align the implicit order with the limit (head means ASC or tail means DESC). */ static final
Constraint
java
google__auto
value/src/main/java/com/google/auto/value/AutoValue.java
{ "start": 1588, "end": 1698 }
interface ____ { /** * Specifies that AutoValue should generate an implementation of the annotated
AutoValue
java
assertj__assertj-core
assertj-core/src/test/java/org/assertj/core/api/float_/FloatAssert_isCloseToPercentage_Float_Test.java
{ "start": 893, "end": 1360 }
class ____ extends FloatAssertBaseTest { private final Percentage percentage = withPercentage(5.0f); private final Float value = 10.0f; @Override protected FloatAssert invoke_api_method() { return assertions.isCloseTo(value, percentage); } @Override protected void verify_internal_effects() { verify(floats).assertIsCloseToPercentage(getInfo(assertions), getActual(assertions), value, percentage); } }
FloatAssert_isCloseToPercentage_Float_Test
java
apache__flink
flink-runtime/src/test/java/org/apache/flink/streaming/api/operators/MailboxWatermarkProcessorTest.java
{ "start": 3929, "end": 5025 }
class ____ implements InternalTimeServiceManager<Object> { @Override public <N> InternalTimerService<N> getInternalTimerService( String name, TypeSerializer<Object> keySerializer, TypeSerializer<N> namespaceSerializer, Triggerable<Object, N> triggerable) { throw new UnsupportedOperationException(); } @Override public void advanceWatermark(Watermark watermark) throws Exception { throw new UnsupportedOperationException(); } @Override public boolean tryAdvanceWatermark( Watermark watermark, ShouldStopAdvancingFn shouldStopAdvancingFn) throws Exception { return !shouldStopAdvancingFn.test(); } @Override public void snapshotToRawKeyedState( KeyedStateCheckpointOutputStream stateCheckpointOutputStream, String operatorName) throws Exception { throw new UnsupportedOperationException(); } } }
NoOpInternalTimeServiceManager
java
apache__flink
flink-core/src/test/java/org/apache/flink/testutils/ClassLoaderUtils.java
{ "start": 12090, "end": 13011 }
class ____ extends java.lang.Exception {}"); } private static <T> ObjectAndClassLoader<T> createObjectFromNewClassLoader( String testClassName, Class<T> testClass, String source) { final Path classDirPath = new File(System.getProperty("java.io.tmpdir"), UUID.randomUUID().toString()) .toPath(); URLClassLoader classLoader = null; try { Files.createDirectories(classDirPath); classLoader = compileAndLoadJava(classDirPath.toFile(), testClassName, source); final Class<?> clazz = classLoader.loadClass(testClassName); final T object = clazz.asSubclass(testClass).getDeclaredConstructor().newInstance(); return new ObjectAndClassLoader<>(object, classLoader); } catch (Exception e) { throw new RuntimeException("Cannot create test
TestExceptionForSerialization
java
apache__kafka
server-common/src/main/java/org/apache/kafka/server/share/persister/PartitionAllData.java
{ "start": 994, "end": 1037 }
interface ____ {@link Persister}. */ public
to
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/entitygraph/ast/CriteriaEntityGraphTest.java
{ "start": 19234, "end": 19276 }
class ____ { @Id String name; } }
Country
java
apache__hadoop
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java
{ "start": 37829, "end": 38714 }
class ____ implements FileSystemAccess.FileSystemExecutor<Void> { private Path path; private List<AclEntry> aclEntries; /** * Creates a set-acl executor. * * @param path path to set the acl. * @param aclSpec acl to set. */ public FSSetAcl(String path, String aclSpec) { this.path = new Path(path); this.aclEntries = AclEntry.parseAclSpec(aclSpec, true); } /** * Executes the filesystem operation. * * @param fs filesystem instance to use. * * @return void. * * @throws IOException thrown if an IO error occurred. */ @Override public Void execute(FileSystem fs) throws IOException { fs.setAcl(path, aclEntries); return null; } } /** * Executor that removes all acls from a file in a FileSystem */ @InterfaceAudience.Private public static
FSSetAcl
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/RedundantOverrideTest.java
{ "start": 2753, "end": 3052 }
class ____ { public boolean frob(Object o) { return false; } } """) .doTest(); } @Test public void considersParameterOrder() { testHelper .addSourceLines( "A.java", """
Bar
java
apache__hadoop
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DU.java
{ "start": 2074, "end": 3286 }
class ____ extends Shell { void startRefresh() throws IOException { run(); } @Override public String toString() { return "du -sk " + getDirPath() + "\n" + used.get() + "\t" + getDirPath(); } @Override protected String[] getExecString() { return new String[]{"du", "-sk", getDirPath()}; } @Override protected void parseExecResult(BufferedReader lines) throws IOException { String line = lines.readLine(); if (line == null) { throw new IOException("Expecting a line not the end of stream"); } String[] tokens = line.split("\t"); if (tokens.length == 0) { throw new IOException("Illegal du output"); } setUsed(Long.parseLong(tokens[0]) * 1024); } } public static void main(String[] args) throws Exception { String path = "."; if (args.length > 0) { path = args[0]; } GetSpaceUsed du = new GetSpaceUsed.Builder().setPath(new File(path)) .setConf(new Configuration()) .build(); String duResult = du.toString(); System.out.println(duResult); } }
DUShell
java
apache__dubbo
dubbo-plugin/dubbo-native/src/main/java/org/apache/dubbo/aot/generate/ResourceConfigWriter.java
{ "start": 1367, "end": 4310 }
class ____ { public static final ResourceConfigWriter INSTANCE = new ResourceConfigWriter(); public void write(BasicJsonWriter writer, ResourceConfigMetadataRepository repository) { Map<String, Object> attributes = new LinkedHashMap<>(); addIfNotEmpty(attributes, "resources", toAttributes(repository.getIncludes(), repository.getExcludes())); handleResourceBundles(attributes, repository.getResourceBundles()); writer.writeObject(attributes); } private Map<String, Object> toAttributes( List<ResourcePatternDescriber> includes, List<ResourcePatternDescriber> excludes) { Map<String, Object> attributes = new LinkedHashMap<>(); addIfNotEmpty( attributes, "includes", includes.stream().distinct().map(this::toAttributes).collect(Collectors.toList())); addIfNotEmpty( attributes, "excludes", excludes.stream().distinct().map(this::toAttributes).collect(Collectors.toList())); return attributes; } private void handleResourceBundles( Map<String, Object> attributes, Set<ResourceBundleDescriber> resourceBundleDescribers) { addIfNotEmpty( attributes, "bundles", resourceBundleDescribers.stream().map(this::toAttributes).collect(Collectors.toList())); } private Map<String, Object> toAttributes(ResourceBundleDescriber describer) { Map<String, Object> attributes = new LinkedHashMap<>(); handleCondition(attributes, describer); attributes.put("name", describer.getName()); return attributes; } private Map<String, Object> toAttributes(ResourcePatternDescriber describer) { Map<String, Object> attributes = new LinkedHashMap<>(); handleCondition(attributes, describer); attributes.put("pattern", describer.toRegex().toString()); return attributes; } private void addIfNotEmpty(Map<String, Object> attributes, String name, Object value) { if (value instanceof Collection<?>) { if (!((Collection<?>) value).isEmpty()) { attributes.put(name, value); } } else if (value instanceof Map<?, ?>) { if (!((Map<?, ?>) value).isEmpty()) { attributes.put(name, value); } } else if (value != null) { attributes.put(name, value); } } private void handleCondition(Map<String, Object> attributes, ConditionalDescriber conditionalDescriber) { if (conditionalDescriber.getReachableType() != null) { Map<String, Object> conditionAttributes = new LinkedHashMap<>(); conditionAttributes.put("typeReachable", conditionalDescriber.getReachableType()); attributes.put("condition", conditionAttributes); } } }
ResourceConfigWriter
java
apache__kafka
trogdor/src/test/java/org/apache/kafka/trogdor/workload/TimeIntervalTransactionsGeneratorTest.java
{ "start": 997, "end": 1990 }
class ____ { @Test public void testCommitsTransactionAfterIntervalPasses() { MockTime time = new MockTime(); TimeIntervalTransactionsGenerator generator = new TimeIntervalTransactionsGenerator(100, time); assertEquals(100, generator.transactionIntervalMs()); assertEquals(TransactionGenerator.TransactionAction.BEGIN_TRANSACTION, generator.nextAction()); assertEquals(TransactionGenerator.TransactionAction.NO_OP, generator.nextAction()); time.sleep(50); assertEquals(TransactionGenerator.TransactionAction.NO_OP, generator.nextAction()); time.sleep(49); assertEquals(TransactionGenerator.TransactionAction.NO_OP, generator.nextAction()); time.sleep(1); assertEquals(TransactionGenerator.TransactionAction.COMMIT_TRANSACTION, generator.nextAction()); assertEquals(TransactionGenerator.TransactionAction.BEGIN_TRANSACTION, generator.nextAction()); } }
TimeIntervalTransactionsGeneratorTest
java
apache__kafka
tools/src/main/java/org/apache/kafka/tools/filter/PartitionFilter.java
{ "start": 1466, "end": 1812 }
class ____ implements PartitionFilter { private final int partition; public UniquePartitionFilter(int partition) { this.partition = partition; } @Override public boolean isPartitionAllowed(int partition) { return partition == this.partition; } }
UniquePartitionFilter
java
quarkusio__quarkus
extensions/smallrye-reactive-messaging-pulsar/runtime/src/main/java/io/quarkus/pulsar/schema/BufferSchema.java
{ "start": 334, "end": 1041 }
class ____ extends AbstractSchema<Buffer> { public static final BufferSchema INSTANCE = new BufferSchema(); private static final SchemaInfo SCHEMA_INFO = SchemaInfoImpl.builder() .name("Buffer") .type(SchemaType.BYTES) .schema(new byte[0]).build(); @Override public Buffer decode(ByteBuf byteBuf) { if (byteBuf == null) return null; return Buffer.buffer(byteBuf); } @Override public byte[] encode(Buffer message) { if (message == null) return null; return message.getBytes(); } @Override public SchemaInfo getSchemaInfo() { return SCHEMA_INFO; } }
BufferSchema
java
spring-projects__spring-framework
spring-core/src/main/java/org/springframework/cglib/core/KeyFactory.java
{ "start": 4818, "end": 6414 }
class ____ since key object still holds a strong reference to the Object and class. * It is recommended to have pre-processing method that would strip Objects and represent Classes as Strings */ @Deprecated public static final Customizer OBJECT_BY_CLASS = (e, type) -> e.invoke_virtual(Constants.TYPE_OBJECT, GET_CLASS); protected KeyFactory() { } public static KeyFactory create(Class keyInterface) { return create(keyInterface, null); } public static KeyFactory create(Class keyInterface, Customizer customizer) { return create(keyInterface.getClassLoader(), keyInterface, customizer); } public static KeyFactory create(Class keyInterface, KeyFactoryCustomizer first, List<KeyFactoryCustomizer> next) { return create(keyInterface.getClassLoader(), keyInterface, first, next); } public static KeyFactory create(ClassLoader loader, Class keyInterface, Customizer customizer) { return create(loader, keyInterface, customizer, Collections.<KeyFactoryCustomizer>emptyList()); } public static KeyFactory create(ClassLoader loader, Class keyInterface, KeyFactoryCustomizer customizer, List<KeyFactoryCustomizer> next) { Generator gen = new Generator(); gen.setInterface(keyInterface); // SPRING PATCH BEGIN gen.setContextClass(keyInterface); // SPRING PATCH END if (customizer != null) { gen.addCustomizer(customizer); } if (next != null && !next.isEmpty()) { for (KeyFactoryCustomizer keyFactoryCustomizer : next) { gen.addCustomizer(keyFactoryCustomizer); } } gen.setClassLoader(loader); return gen.create(); } public static
leak
java
spring-projects__spring-framework
spring-core/src/main/java/org/springframework/core/io/support/PathMatchingResourcePatternResolver.java
{ "start": 19565, "end": 23348 }
class ____ traversal in the same resource layout, * as well as matching the outcome of module path searches. * @param url a URL as returned from the configured ClassLoader * @return the corresponding Resource object * @see java.lang.ClassLoader#getResources * @see #doFindAllClassPathResources * @see #doFindPathMatchingFileResources */ @SuppressWarnings("deprecation") // on JDK 20 (deprecated URL constructor) protected Resource convertClassLoaderURL(URL url) { if (ResourceUtils.URL_PROTOCOL_FILE.equals(url.getProtocol())) { try { // URI decoding for special characters such as spaces. return new FileSystemResource(ResourceUtils.toURI(url).getSchemeSpecificPart()); } catch (URISyntaxException ex) { // Fallback for URLs that are not valid URIs (should hardly ever happen). return new FileSystemResource(url.getFile()); } } else { UrlResource resource = null; String urlString = url.toString(); String cleanedPath = StringUtils.cleanPath(urlString); if (!cleanedPath.equals(urlString)) { // Prefer cleaned URL, aligned with UrlResource#createRelative(String) try { // Retain original URL instance, potentially including custom URLStreamHandler. resource = new UrlResource(new URL(url, cleanedPath)); } catch (MalformedURLException ex) { // Fallback to regular URL construction below... } } // Retain original URL instance, potentially including custom URLStreamHandler. if (resource == null) { resource = new UrlResource(url); } if (this.useCaches != null) { resource.setUseCaches(this.useCaches); } return resource; } } /** * Search all {@link URLClassLoader} URLs for jar file references and add each to the * given set of resources in the form of a pointer to the root of the jar file content. * @param classLoader the ClassLoader to search (including its ancestors) * @param result the set of resources to add jar roots to * @since 4.1.1 */ protected void addAllClassLoaderJarRoots(@Nullable ClassLoader classLoader, Set<Resource> result) { if (classLoader instanceof URLClassLoader urlClassLoader) { try { for (URL url : urlClassLoader.getURLs()) { try { UrlResource jarResource = (ResourceUtils.URL_PROTOCOL_JAR.equals(url.getProtocol()) ? new UrlResource(url) : new UrlResource(ResourceUtils.JAR_URL_PREFIX + url + ResourceUtils.JAR_URL_SEPARATOR)); if (this.useCaches != null) { jarResource.setUseCaches(this.useCaches); } if (jarResource.exists()) { result.add(jarResource); } } catch (MalformedURLException ex) { if (logger.isDebugEnabled()) { logger.debug("Cannot search for matching files underneath [" + url + "] because it cannot be converted to a valid 'jar:' URL: " + ex.getMessage()); } } } } catch (Exception ex) { if (logger.isDebugEnabled()) { logger.debug("Cannot introspect jar files since ClassLoader [" + classLoader + "] does not support 'getURLs()': " + ex); } } } if (classLoader == ClassLoader.getSystemClassLoader()) { // JAR "Class-Path" manifest header evaluation... addClassPathManifestEntries(result); } if (classLoader != null) { try { // Hierarchy traversal... addAllClassLoaderJarRoots(classLoader.getParent(), result); } catch (Exception ex) { if (logger.isDebugEnabled()) { logger.debug("Cannot introspect jar files in parent ClassLoader since [" + classLoader + "] does not support 'getParent()': " + ex); } } } } /** * Determine jar file references from {@code Class-Path} manifest entries (which * are added to the {@code java.class.path} JVM system property by the system *
path
java
google__dagger
dagger-spi/main/java/dagger/spi/model/ComponentPath.java
{ "start": 1052, "end": 3333 }
class ____ { /** Returns a new {@link ComponentPath} from {@code components}. */ public static ComponentPath create(Iterable<DaggerTypeElement> components) { return new AutoValue_ComponentPath(ImmutableList.copyOf(components)); } /** * Returns the component types, starting from the {@linkplain #rootComponent() root * component} and ending with the {@linkplain #currentComponent() current component}. */ public abstract ImmutableList<DaggerTypeElement> components(); /** * Returns the root {@code Component}- or {@code ProductionComponent}-annotated type */ public final DaggerTypeElement rootComponent() { return components().get(0); } /** Returns the component at the end of the path. */ @Memoized public DaggerTypeElement currentComponent() { return getLast(components()); } /** * Returns the parent of the {@linkplain #currentComponent()} current component}. * * @throws IllegalStateException if the current graph is the {@linkplain #atRoot() root component} */ public final DaggerTypeElement parentComponent() { checkState(!atRoot()); return components().reverse().get(1); } /** * Returns this path's parent path. * * @throws IllegalStateException if the current graph is the {@linkplain #atRoot() root component} */ // TODO(ronshapiro): consider memoizing this public final ComponentPath parent() { checkState(!atRoot()); return create(components().subList(0, components().size() - 1)); } /** Returns the path from the root component to the {@code child} of the current component. */ public final ComponentPath childPath(DaggerTypeElement child) { return create( ImmutableList.<DaggerTypeElement>builder().addAll(components()).add(child).build()); } /** * Returns {@code true} if the {@linkplain #currentComponent()} current component} is the * {@linkplain #rootComponent()} root component}. */ public final boolean atRoot() { return components().size() == 1; } @Override public final String toString() { return components().stream().map(Key::qualifiedName).collect(joining(" → ")); } @Memoized @Override public abstract int hashCode(); @Override public abstract boolean equals(Object obj); }
ComponentPath
java
apache__camel
components/camel-irc/src/main/java/org/apache/camel/component/irc/IrcBinding.java
{ "start": 851, "end": 1134 }
class ____ { public Object extractBodyFromIrc(IrcMessage message) { String type = message.getMessageType(); String text = message.getMessage(); if (text != null) { return text; } else { return type; } } }
IrcBinding
java
netty__netty
codec-http2/src/main/java/io/netty/handler/codec/http2/Http2RemoteFlowController.java
{ "start": 850, "end": 4202 }
interface ____ extends Http2FlowController { /** * Get the {@link ChannelHandlerContext} for which to apply flow control on. * <p> * This is intended for us by {@link FlowControlled} implementations only. Use with caution. * @return The {@link ChannelHandlerContext} for which to apply flow control on. */ ChannelHandlerContext channelHandlerContext(); /** * Queues a payload for transmission to the remote endpoint. There is no guarantee as to when the data * will be written or how it will be assigned to frames. * before sending. * <p> * Writes do not actually occur until {@link #writePendingBytes()} is called. * * @param stream the subject stream. Must not be the connection stream object. * @param payload payload to write subject to flow-control accounting and ordering rules. */ void addFlowControlled(Http2Stream stream, FlowControlled payload); /** * Determine if {@code stream} has any {@link FlowControlled} frames currently queued. * @param stream the stream to check if it has flow controlled frames. * @return {@code true} if {@code stream} has any {@link FlowControlled} frames currently queued. */ boolean hasFlowControlled(Http2Stream stream); /** * Write all data pending in the flow controller up to the flow-control limits. * * @throws Http2Exception throws if a protocol-related error occurred. */ void writePendingBytes() throws Http2Exception; /** * Set the active listener on the flow-controller. * * @param listener to notify when the a write occurs, can be {@code null}. */ void listener(Listener listener); /** * Determine if the {@code stream} has bytes remaining for use in the flow control window. * <p> * Note that this method respects channel writability. The channel must be writable for this method to * return {@code true}. * * @param stream The stream to test. * @return {@code true} if the {@code stream} has bytes remaining for use in the flow control window and the * channel is writable, {@code false} otherwise. */ boolean isWritable(Http2Stream stream); /** * Notification that the writability of {@link #channelHandlerContext()} has changed. * @throws Http2Exception If any writes occur as a result of this call and encounter errors. */ void channelWritabilityChanged() throws Http2Exception; /** * Explicitly update the dependency tree. This method is called independently of stream state changes. * @param childStreamId The stream identifier associated with the child stream. * @param parentStreamId The stream identifier associated with the parent stream. May be {@code 0}, * to make {@code childStreamId} and immediate child of the connection. * @param weight The weight which is used relative to other child streams for {@code parentStreamId}. This value * must be between 1 and 256 (inclusive). * @param exclusive If {@code childStreamId} should be the exclusive dependency of {@code parentStreamId}. */ void updateDependencyTree(int childStreamId, int parentStreamId, short weight, boolean exclusive); /** * Implementations of this
Http2RemoteFlowController
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/collection/spi/PersistentList.java
{ "start": 12354, "end": 12591 }
class ____ extends AbstractListValueDelayedOperation { public Add(int index, E addedValue) { super( index, addedValue, null ); } @Override public void operate() { list.add( getIndex(), getAddedInstance() ); } } final
Add
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/android/testdata/stubs/android/R.java
{ "start": 665, "end": 797 }
class ____ { public static final int yes = 0; public static final int no = 1; public static final int copy = 2; } }
string
java
spring-projects__spring-security
config/src/main/java/org/springframework/security/config/annotation/web/configurers/PermitAllSupport.java
{ "start": 1131, "end": 2005 }
class ____ { private PermitAllSupport() { } static void permitAll(HttpSecurityBuilder<? extends HttpSecurityBuilder<?>> http, String... urls) { for (String url : urls) { if (url != null) { permitAll(http, new ExactUrlRequestMatcher(url)); } } } @SuppressWarnings("unchecked") static void permitAll(HttpSecurityBuilder<? extends HttpSecurityBuilder<?>> http, RequestMatcher... requestMatchers) { AuthorizeHttpRequestsConfigurer<?> httpConfigurer = http.getConfigurer(AuthorizeHttpRequestsConfigurer.class); Assert.state(httpConfigurer != null, "permitAll only works with HttpSecurity.authorizeHttpRequests(). Please define one."); for (RequestMatcher matcher : requestMatchers) { if (matcher != null) { httpConfigurer.addFirst(matcher, SingleResultAuthorizationManager.permitAll()); } } } private static final
PermitAllSupport
java
apache__camel
dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/SpringBatchComponentBuilderFactory.java
{ "start": 1895, "end": 5157 }
interface ____ extends ComponentBuilder<SpringBatchComponent> { /** * Explicitly specifies a JobLauncher to be used. * * The option is a: * &lt;code&gt;org.springframework.batch.core.launch.JobLauncher&lt;/code&gt; type. * * Group: producer * * @param jobLauncher the value to set * @return the dsl builder */ default SpringBatchComponentBuilder jobLauncher(org.springframework.batch.core.launch.JobLauncher jobLauncher) { doSetProperty("jobLauncher", jobLauncher); return this; } /** * Explicitly specifies a JobRegistry to be used. * * The option is a: * &lt;code&gt;org.springframework.batch.core.configuration.JobRegistry&lt;/code&gt; type. * * Group: producer * * @param jobRegistry the value to set * @return the dsl builder */ default SpringBatchComponentBuilder jobRegistry(org.springframework.batch.core.configuration.JobRegistry jobRegistry) { doSetProperty("jobRegistry", jobRegistry); return this; } /** * Whether the producer should be started lazy (on the first message). * By starting lazy you can use this to allow CamelContext and routes to * startup in situations where a producer may otherwise fail during * starting and cause the route to fail being started. By deferring this * startup to be lazy then the startup failure can be handled during * routing messages via Camel's routing error handlers. Beware that when * the first message is processed then creating and starting the * producer may take a little time and prolong the total processing time * of the processing. * * The option is a: &lt;code&gt;boolean&lt;/code&gt; type. * * Default: false * Group: producer * * @param lazyStartProducer the value to set * @return the dsl builder */ default SpringBatchComponentBuilder lazyStartProducer(boolean lazyStartProducer) { doSetProperty("lazyStartProducer", lazyStartProducer); return this; } /** * Whether autowiring is enabled. This is used for automatic autowiring * options (the option must be marked as autowired) by looking up in the * registry to find if there is a single instance of matching type, * which then gets configured on the component. This can be used for * automatic configuring JDBC data sources, JMS connection factories, * AWS Clients, etc. * * The option is a: &lt;code&gt;boolean&lt;/code&gt; type. * * Default: true * Group: advanced * * @param autowiredEnabled the value to set * @return the dsl builder */ default SpringBatchComponentBuilder autowiredEnabled(boolean autowiredEnabled) { doSetProperty("autowiredEnabled", autowiredEnabled); return this; } }
SpringBatchComponentBuilder
java
apache__hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/test/java/org/apache/hadoop/yarn/util/TestUnitsConversionUtil.java
{ "start": 1097, "end": 4998 }
class ____ { @Test void testUnitsConversion() { int value = 5; String fromUnit = ""; long test = value; assertEquals(value * 1000L * 1000L * 1000L * 1000L, UnitsConversionUtil.convert(fromUnit, "p", test), "pico test failed"); assertEquals(value * 1000L * 1000L * 1000L, UnitsConversionUtil.convert(fromUnit, "n", test), "nano test failed"); assertEquals(value * 1000L * 1000L, UnitsConversionUtil.convert(fromUnit, "u", test), "micro test failed"); assertEquals(value * 1000L, UnitsConversionUtil.convert(fromUnit, "m", test), "milli test failed"); test = value * 1000L * 1000L * 1000L * 1000L * 1000L; fromUnit = ""; assertEquals(test / 1000L, UnitsConversionUtil.convert(fromUnit, "k", test), "kilo test failed"); assertEquals(test / (1000L * 1000L), UnitsConversionUtil.convert(fromUnit, "M", test), "mega test failed"); assertEquals(test / (1000L * 1000L * 1000L), UnitsConversionUtil.convert(fromUnit, "G", test), "giga test failed"); assertEquals(test / (1000L * 1000L * 1000L * 1000L), UnitsConversionUtil.convert(fromUnit, "T", test), "tera test failed"); assertEquals(test / (1000L * 1000L * 1000L * 1000L * 1000L), UnitsConversionUtil.convert(fromUnit, "P", test), "peta test failed"); assertEquals(value * 1000L, UnitsConversionUtil.convert("n", "p", value), "nano to pico test failed"); assertEquals(value, UnitsConversionUtil.convert("M", "G", value * 1000L), "mega to giga test failed"); assertEquals(value, UnitsConversionUtil.convert("Mi", "Gi", value * 1024L), "Mi to Gi test failed"); assertEquals(value * 1024, UnitsConversionUtil.convert("Mi", "Ki", value), "Mi to Ki test failed"); assertEquals(5 * 1024, UnitsConversionUtil.convert("Ki", "", 5), "Ki to base units test failed"); assertEquals(1073741, UnitsConversionUtil.convert("Mi", "k", 1024), "Mi to k test failed"); assertEquals(953, UnitsConversionUtil.convert("M", "Mi", 1000), "M to Mi test failed"); } @Test void testOverflow() { long test = 5 * 1000L * 1000L * 1000L * 1000L * 1000L; try { UnitsConversionUtil.convert("P", "p", test); fail("this operation should result in an overflow"); } catch (IllegalArgumentException ie) { // do nothing } try { UnitsConversionUtil.convert("m", "p", Long.MAX_VALUE - 1); fail("this operation should result in an overflow"); } catch (IllegalArgumentException ie) { // do nothing } } @Test void testCompare() { String unitA = "P"; long valueA = 1; String unitB = "p"; long valueB = 2; assertEquals(1, UnitsConversionUtil.compare(unitA, valueA, unitB, valueB)); assertEquals(-1, UnitsConversionUtil.compare(unitB, valueB, unitA, valueA)); assertEquals(0, UnitsConversionUtil.compare(unitA, valueA, unitA, valueA)); assertEquals(-1, UnitsConversionUtil.compare(unitA, valueA, unitA, valueB)); assertEquals(1, UnitsConversionUtil.compare(unitA, valueB, unitA, valueA)); unitB = "T"; assertEquals(1, UnitsConversionUtil.compare(unitA, valueA, unitB, valueB)); assertEquals(-1, UnitsConversionUtil.compare(unitB, valueB, unitA, valueA)); assertEquals(0, UnitsConversionUtil.compare(unitA, valueA, unitB, 1000L)); unitA = "p"; unitB = "n"; assertEquals(-1, UnitsConversionUtil.compare(unitA, valueA, unitB, valueB)); assertEquals(1, UnitsConversionUtil.compare(unitB, valueB, unitA, valueA)); assertEquals(0, UnitsConversionUtil.compare(unitA, 1000L, unitB, valueA)); } }
TestUnitsConversionUtil
java
elastic__elasticsearch
x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/GeoLineAggregatorTests.java
{ "start": 25360, "end": 27787 }
class ____ extends TypeSafeMatcher<long[]> { private final int checkCount; private final long[] expectedLine; private final ArrayList<String> failures = new ArrayList<>(); private TestGeoLineLongArrayMatcher(int checkCount, long[] expectedLine) { this.checkCount = checkCount; this.expectedLine = expectedLine; } @Override public boolean matchesSafely(long[] actualLine) { failures.clear(); if (checkCount == expectedLine.length && actualLine.length != expectedLine.length) { failures.add("Expected length " + expectedLine.length + " but got " + actualLine.length); } for (int i = 0; i < checkCount; i++) { Point actual = asPoint(actualLine[i]); Point expected = asPoint(expectedLine[i]); if (actual.equals(expected) == false) { failures.add("At line position " + i + " expected " + expected + " but got " + actual); } } return failures.isEmpty(); } @Override public void describeMismatchSafely(long[] item, Description description) { description.appendText("had ").appendValue(failures.size()).appendText(" failures"); for (String failure : failures) { description.appendText("\n\t").appendText(failure); } } @Override public void describeTo(Description description) { description.appendText( "Should be geoline of " + expectedLine.length + " starting with " + asPoint(expectedLine[0]) + " and ending with " + asPoint(expectedLine[expectedLine.length - 1]) ); } private static Point asPoint(long encoded) { double latitude = GeoEncodingUtils.decodeLatitude((int) (encoded & 0xffffffffL)); double longitude = GeoEncodingUtils.decodeLongitude((int) (encoded >>> 32)); return new Point(longitude, latitude); } } /** * Wrapper for points and sort fields that is also usable in the GeometrySimplifier library, * allowing us to track which points will survive geometry simplification during geo_line aggregations. */ static
TestGeoLineLongArrayMatcher
java
netty__netty
codec-http/src/main/java/io/netty/handler/codec/http/ReadOnlyHttpHeaders.java
{ "start": 1498, "end": 1693 }
class ____ a copy of the array. * <p> * This may be a good alternative to {@link DefaultHttpHeaders} if your have a fixed set of headers which will not * change. */ @UnstableApi public final
with
java
apache__hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/placement/csmappingrule/MappingRuleMatchers.java
{ "start": 1113, "end": 1317 }
class ____ { /** * Utility class, hiding constructor. */ private MappingRuleMatchers() {} /** * MatchAllMatcher is a matcher which matches everything. */ public static
MappingRuleMatchers
java
google__dagger
javatests/dagger/hilt/android/AndroidEntryPointBaseClassTest.java
{ "start": 1785, "end": 1868 }
class ____ { @AndroidEntryPoint public static final
AndroidEntryPointBaseClassTest