language
stringclasses
1 value
repo
stringclasses
60 values
path
stringlengths
22
294
class_span
dict
source
stringlengths
13
1.16M
target
stringlengths
1
113
java
spring-projects__spring-security
oauth2/oauth2-resource-server/src/main/java/org/springframework/security/oauth2/server/resource/authentication/BearerTokenAuthentication.java
{ "start": 1434, "end": 2808 }
class ____ extends AbstractOAuth2TokenAuthenticationToken<OAuth2AccessToken> { private static final long serialVersionUID = 620L; private final Map<String, Object> attributes; /** * Constructs a {@link BearerTokenAuthentication} with the provided arguments * @param principal The OAuth 2.0 attributes * @param credentials The verified token * @param authorities The authorities associated with the given token */ public BearerTokenAuthentication(OAuth2AuthenticatedPrincipal principal, OAuth2AccessToken credentials, Collection<? extends GrantedAuthority> authorities) { super(credentials, principal, credentials, authorities); Assert.isTrue(credentials.getTokenType() == OAuth2AccessToken.TokenType.BEARER, "credentials must be a bearer token"); this.attributes = Collections.unmodifiableMap(new LinkedHashMap<>(principal.getAttributes())); setAuthenticated(true); } protected BearerTokenAuthentication(Builder<?> builder) { super(builder); this.attributes = Collections.unmodifiableMap(new LinkedHashMap<>(builder.attributes)); } @Override public Map<String, Object> getTokenAttributes() { return this.attributes; } @Override public Builder<?> toBuilder() { return new Builder<>(this); } /** * A builder preserving the concrete {@link Authentication} type * * @since 7.0 */ public static
BearerTokenAuthentication
java
apache__camel
components/camel-ai/camel-pinecone/src/main/java/org/apache/camel/component/pinecone/transform/PineconeEmbeddingsDataTypeTransformer.java
{ "start": 1523, "end": 2239 }
class ____ extends Transformer { @Override public void transform(Message message, DataType fromType, DataType toType) { Embedding embedding = message.getHeader(CamelLangchain4jAttributes.CAMEL_LANGCHAIN4J_EMBEDDING_VECTOR, Embedding.class); String indexId = message.getHeader(PineconeVectorDbHeaders.INDEX_ID, UUID.randomUUID(), String.class); String indexName = message.getHeader(PineconeVectorDbHeaders.INDEX_NAME, "embeddings", String.class); message.setHeader(PineconeVectorDbHeaders.INDEX_NAME, indexName); message.setHeader(PineconeVectorDbHeaders.INDEX_ID, indexId); message.setBody(embedding.vectorAsList()); } }
PineconeEmbeddingsDataTypeTransformer
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java
{ "start": 91134, "end": 97052 }
class ____ extends PeerFinder { CoordinatorPeerFinder( Settings settings, TransportService transportService, TransportAddressConnector transportAddressConnector, ConfiguredHostsResolver configuredHostsResolver ) { super( settings, transportService, transportAddressConnector, singleNodeDiscovery ? hostsResolver -> {} : configuredHostsResolver ); } @Override protected void onActiveMasterFound(DiscoveryNode masterNode, long term) { synchronized (mutex) { ensureTermAtLeast(masterNode, term); joinHelper.sendJoinRequest(masterNode, getCurrentTerm(), joinWithDestination(lastJoin, masterNode, term)); } } @Override protected void startProbe(TransportAddress transportAddress) { if (singleNodeDiscovery == false) { super.startProbe(transportAddress); } } @Override protected void onFoundPeersUpdated() { synchronized (mutex) { if (mode == Mode.CANDIDATE) { final VoteCollection expectedVotes = new VoteCollection(); getFoundPeers().forEach(expectedVotes::addVote); expectedVotes.addVote(Coordinator.this.getLocalNode()); final boolean foundQuorum = coordinationState.get().isElectionQuorum(expectedVotes); if (foundQuorum) { if (electionScheduler == null) { logger.debug("preparing election scheduler, expecting votes [{}]", expectedVotes); startElectionScheduler(); } } else { closePrevotingRound(); if (electionScheduler != null) { logger.debug("closing election scheduler, expecting votes [{}]", expectedVotes); electionScheduler.close(); electionScheduler = null; } } } } peerFinderListeners.forEach(PeerFinderListener::onFoundPeersUpdated); } } private void startElectionScheduler() { assert Thread.holdsLock(mutex) : "Coordinator mutex not held"; assert electionScheduler == null : electionScheduler; if (getLocalNode().isMasterNode() == false) { logger.debug("local node is not the master, skipping election scheduler"); return; } logger.debug("starting election scheduler"); final TimeValue gracePeriod = TimeValue.ZERO; electionScheduler = electionSchedulerFactory.startElectionScheduler(gracePeriod, new Runnable() { @Override public void run() { synchronized (mutex) { if (mode == Mode.CANDIDATE) { final ClusterState lastAcceptedState = coordinationState.get().getLastAcceptedState(); final var nodeEligibility = localNodeMayWinElection(lastAcceptedState, electionStrategy); if (nodeEligibility.mayWin() == false) { assert nodeEligibility.reason().isEmpty() == false; logger.info( "skip prevoting as local node may not win election ({}): {}", nodeEligibility.reason(), lastAcceptedState.coordinationMetadata() ); return; } final StatusInfo statusInfo = nodeHealthService.getHealth(); if (statusInfo.getStatus() == UNHEALTHY) { logger.debug("skip prevoting as local node is unhealthy: [{}]", statusInfo.getInfo()); return; } if (prevotingRound != null) { prevotingRound.close(); } prevotingRound = preVoteCollector.start(lastAcceptedState, getDiscoveredNodes()); } } } @Override public String toString() { return "scheduling of new prevoting round"; } }); } private void closeElectionScheduler() { assert ThreadPool.assertCurrentThreadPool(ClusterApplierService.CLUSTER_UPDATE_THREAD_NAME); final long term = applierState.term(); if (electionScheduler != null) { clusterCoordinationExecutor.execute(new Runnable() { @Override public void run() { synchronized (mutex) { if (electionScheduler != null && mode != Mode.CANDIDATE && getCurrentTerm() == term) { logger.debug("stabilised in term [{}], closing election scheduler", term); electionScheduler.close(); electionScheduler = null; } } } @Override public String toString() { return "closeElectionScheduler in term [" + term + ']'; } }); } } public Iterable<DiscoveryNode> getFoundPeers() { return peerFinder.getFoundPeers(); } public PeerFinder getPeerFinder() { return this.peerFinder; } private void beforeCommit(long term, long version, ActionListener<Void> listener) { electionStrategy.beforeCommit(term, version, listener); }
CoordinatorPeerFinder
java
apache__kafka
streams/src/test/java/org/apache/kafka/streams/processor/internals/InternalTopicConfigTest.java
{ "start": 1212, "end": 6621 }
class ____ { @Test public void shouldThrowNpeIfTopicConfigIsNull() { assertThrows(NullPointerException.class, () -> new RepartitionTopicConfig("topic", null)); } @Test public void shouldThrowIfNameIsNull() { assertThrows(NullPointerException.class, () -> new RepartitionTopicConfig(null, Collections.emptyMap())); } @Test public void shouldThrowIfNameIsInvalid() { assertThrows(InvalidTopicException.class, () -> new RepartitionTopicConfig("foo bar baz", Collections.emptyMap())); } @Test public void shouldSetCreateTimeByDefaultForWindowedChangelog() { final WindowedChangelogTopicConfig topicConfig = new WindowedChangelogTopicConfig("name", Collections.emptyMap(), 10); final Map<String, String> properties = topicConfig.properties(Collections.emptyMap(), 0); assertEquals("CreateTime", properties.get(TopicConfig.MESSAGE_TIMESTAMP_TYPE_CONFIG)); } @Test public void shouldSetCreateTimeByDefaultForUnwindowedUnversionedChangelog() { final UnwindowedUnversionedChangelogTopicConfig topicConfig = new UnwindowedUnversionedChangelogTopicConfig("name", Collections.emptyMap()); final Map<String, String> properties = topicConfig.properties(Collections.emptyMap(), 0); assertEquals("CreateTime", properties.get(TopicConfig.MESSAGE_TIMESTAMP_TYPE_CONFIG)); } @Test public void shouldSetCreateTimeByDefaultForVersionedChangelog() { final VersionedChangelogTopicConfig topicConfig = new VersionedChangelogTopicConfig("name", Collections.emptyMap(), 12); final Map<String, String> properties = topicConfig.properties(Collections.emptyMap(), 0); assertEquals("CreateTime", properties.get(TopicConfig.MESSAGE_TIMESTAMP_TYPE_CONFIG)); } @Test public void shouldSetCreateTimeByDefaultForRepartitionTopic() { final RepartitionTopicConfig topicConfig = new RepartitionTopicConfig("name", Collections.emptyMap()); final Map<String, String> properties = topicConfig.properties(Collections.emptyMap(), 0); assertEquals("CreateTime", properties.get(TopicConfig.MESSAGE_TIMESTAMP_TYPE_CONFIG)); } @Test public void shouldAugmentRetentionMsWithWindowedChangelog() { final WindowedChangelogTopicConfig topicConfig = new WindowedChangelogTopicConfig("name", Collections.emptyMap(), 10); assertEquals("30", topicConfig.properties(Collections.emptyMap(), 20).get(TopicConfig.RETENTION_MS_CONFIG)); } @Test public void shouldAugmentCompactionLagMsWithVersionedChangelog() { final VersionedChangelogTopicConfig topicConfig = new VersionedChangelogTopicConfig("name", Collections.emptyMap(), 12); assertEquals(Long.toString(12 + 24 * 60 * 60 * 1000L), topicConfig.properties(Collections.emptyMap(), 20).get(TopicConfig.MIN_COMPACTION_LAG_MS_CONFIG)); } @Test public void shouldUseSuppliedConfigsForWindowedChangelogConfig() { final Map<String, String> configs = new HashMap<>(); configs.put("message.timestamp.type", "LogAppendTime"); final WindowedChangelogTopicConfig topicConfig = new WindowedChangelogTopicConfig("name", configs, 10); final Map<String, String> properties = topicConfig.properties(Collections.emptyMap(), 0); assertEquals("LogAppendTime", properties.get(TopicConfig.MESSAGE_TIMESTAMP_TYPE_CONFIG)); } @Test public void shouldUseSuppliedConfigsForVersionedChangelogConfig() { final Map<String, String> configs = new HashMap<>(); configs.put("message.timestamp.type", "LogAppendTime"); final VersionedChangelogTopicConfig topicConfig = new VersionedChangelogTopicConfig("name", configs, 12); final Map<String, String> properties = topicConfig.properties(Collections.emptyMap(), 0); assertEquals("LogAppendTime", properties.get(TopicConfig.MESSAGE_TIMESTAMP_TYPE_CONFIG)); } @Test public void shouldUseSuppliedConfigsForUnwindowedUnversionedChangelogConfig() { final Map<String, String> configs = new HashMap<>(); configs.put("retention.ms", "1000"); configs.put("retention.bytes", "10000"); configs.put("message.timestamp.type", "LogAppendTime"); final UnwindowedUnversionedChangelogTopicConfig topicConfig = new UnwindowedUnversionedChangelogTopicConfig("name", configs); final Map<String, String> properties = topicConfig.properties(Collections.emptyMap(), 0); assertEquals("1000", properties.get(TopicConfig.RETENTION_MS_CONFIG)); assertEquals("10000", properties.get(TopicConfig.RETENTION_BYTES_CONFIG)); assertEquals("LogAppendTime", properties.get(TopicConfig.MESSAGE_TIMESTAMP_TYPE_CONFIG)); } @Test public void shouldUseSuppliedConfigsForRepartitionConfig() { final Map<String, String> configs = new HashMap<>(); configs.put("retention.ms", "1000"); configs.put("message.timestamp.type", "LogAppendTime"); final RepartitionTopicConfig topicConfig = new RepartitionTopicConfig("name", configs); final Map<String, String> properties = topicConfig.properties(Collections.emptyMap(), 0); assertEquals("1000", properties.get(TopicConfig.RETENTION_MS_CONFIG)); assertEquals("LogAppendTime", properties.get(TopicConfig.MESSAGE_TIMESTAMP_TYPE_CONFIG)); } }
InternalTopicConfigTest
java
google__dagger
javatests/dagger/hilt/android/processor/internal/aggregateddeps/TestInstallInTest.java
{ "start": 2571, "end": 2986 }
interface ____ {}"); Source testInstallInModule = HiltCompilerTests.javaSource( "test.TestInstallInModule", "package test;", "", "import dagger.Module;", "import dagger.hilt.testing.TestInstallIn;", "", "@Module", "@TestInstallIn(components = {}, replaces = InstallInModule.class)", "
InstallInModule
java
junit-team__junit5
junit-jupiter-api/src/main/java/org/junit/jupiter/api/parallel/ResourceLock.java
{ "start": 1688, "end": 2319 }
class ____ method. For * example, if a test method is annotated with {@code @ResourceLock} the lock * will be acquired before any {@link org.junit.jupiter.api.BeforeEach @BeforeEach} * methods are executed and released after all * {@link org.junit.jupiter.api.AfterEach @AfterEach} methods have been executed. * * <p>This annotation can be repeated to declare the use of multiple shared resources. * * <p>Uniqueness of a shared resource is determined by both the {@link #value()} * and the {@link #mode()}. Duplicated shared resources do not cause errors. * * <p>This annotation is {@linkplain Inherited inherited} within
or
java
grpc__grpc-java
xds/src/test/java/io/grpc/xds/internal/security/trust/XdsTrustManagerFactoryTest.java
{ "start": 1961, "end": 15151 }
class ____ { @Test public void constructor_fromFile() throws CertificateException, IOException, CertStoreException { XdsTrustManagerFactory factory = new XdsTrustManagerFactory(getCertContextFromPath(CA_PEM_FILE)); assertThat(factory).isNotNull(); TrustManager[] tms = factory.getTrustManagers(); assertThat(tms).isNotNull(); assertThat(tms).hasLength(1); TrustManager myTm = tms[0]; assertThat(myTm).isInstanceOf(XdsX509TrustManager.class); XdsX509TrustManager xdsX509TrustManager = (XdsX509TrustManager) myTm; X509Certificate[] acceptedIssuers = xdsX509TrustManager.getAcceptedIssuers(); assertThat(acceptedIssuers).isNotNull(); assertThat(acceptedIssuers).hasLength(1); X509Certificate caCert = acceptedIssuers[0]; assertThat(caCert) .isEqualTo(CertificateUtils.toX509Certificates(TlsTesting.loadCert(CA_PEM_FILE))[0]); } @Test public void constructor_fromInlineBytes() throws CertificateException, IOException, CertStoreException { XdsTrustManagerFactory factory = new XdsTrustManagerFactory(getCertContextFromPathAsInlineBytes(CA_PEM_FILE)); assertThat(factory).isNotNull(); TrustManager[] tms = factory.getTrustManagers(); assertThat(tms).isNotNull(); assertThat(tms).hasLength(1); TrustManager myTm = tms[0]; assertThat(myTm).isInstanceOf(XdsX509TrustManager.class); XdsX509TrustManager xdsX509TrustManager = (XdsX509TrustManager) myTm; X509Certificate[] acceptedIssuers = xdsX509TrustManager.getAcceptedIssuers(); assertThat(acceptedIssuers).isNotNull(); assertThat(acceptedIssuers).hasLength(1); X509Certificate caCert = acceptedIssuers[0]; assertThat(caCert) .isEqualTo(CertificateUtils.toX509Certificates(TlsTesting.loadCert(CA_PEM_FILE))[0]); } @Test public void constructor_fromRootCert() throws CertificateException, IOException, CertStoreException { X509Certificate x509Cert = TestUtils.loadX509Cert(CA_PEM_FILE); CertificateValidationContext staticValidationContext = buildStaticValidationContext("san1", "san2"); XdsTrustManagerFactory factory = new XdsTrustManagerFactory(new X509Certificate[]{x509Cert}, staticValidationContext, false); assertThat(factory).isNotNull(); TrustManager[] tms = factory.getTrustManagers(); assertThat(tms).isNotNull(); assertThat(tms).hasLength(1); TrustManager myTm = tms[0]; assertThat(myTm).isInstanceOf(XdsX509TrustManager.class); XdsX509TrustManager xdsX509TrustManager = (XdsX509TrustManager) myTm; X509Certificate[] acceptedIssuers = xdsX509TrustManager.getAcceptedIssuers(); assertThat(acceptedIssuers).isNotNull(); assertThat(acceptedIssuers).hasLength(1); X509Certificate caCert = acceptedIssuers[0]; assertThat(caCert) .isEqualTo(CertificateUtils.toX509Certificates(TlsTesting.loadCert(CA_PEM_FILE))[0]); } @Test public void constructor_fromSpiffeTrustMap() throws CertificateException, IOException, CertStoreException { X509Certificate x509Cert = TestUtils.loadX509Cert(CA_PEM_FILE); CertificateValidationContext staticValidationContext = buildStaticValidationContext("san1", "san2"); // Single domain and single cert XdsTrustManagerFactory factory = new XdsTrustManagerFactory(ImmutableMap .of("example.com", ImmutableList.of(x509Cert)), staticValidationContext, false); assertThat(factory).isNotNull(); TrustManager[] tms = factory.getTrustManagers(); assertThat(tms).isNotNull(); assertThat(tms).hasLength(1); TrustManager myTm = tms[0]; assertThat(myTm).isInstanceOf(XdsX509TrustManager.class); XdsX509TrustManager xdsX509TrustManager = (XdsX509TrustManager) myTm; assertThat(xdsX509TrustManager.getAcceptedIssuers()).isNotNull(); assertThat(xdsX509TrustManager.getAcceptedIssuers()).hasLength(1); assertThat(xdsX509TrustManager.getAcceptedIssuers()[0].getIssuerX500Principal().getName()) .isEqualTo("CN=testca,O=Internet Widgits Pty Ltd,ST=Some-State,C=AU"); // Multiple domains and multiple certs for one of it X509Certificate anotherCert = TestUtils.loadX509Cert(CLIENT_PEM_FILE); factory = new XdsTrustManagerFactory(ImmutableMap .of("example.com", ImmutableList.of(x509Cert), "google.com", ImmutableList.of(x509Cert, anotherCert)), staticValidationContext, false); assertThat(factory).isNotNull(); tms = factory.getTrustManagers(); assertThat(tms).isNotNull(); assertThat(tms).hasLength(1); myTm = tms[0]; assertThat(myTm).isInstanceOf(XdsX509TrustManager.class); xdsX509TrustManager = (XdsX509TrustManager) myTm; assertThat(xdsX509TrustManager.getAcceptedIssuers()).isNotNull(); assertThat(xdsX509TrustManager.getAcceptedIssuers()).hasLength(2); assertThat(xdsX509TrustManager.getAcceptedIssuers()[0].getIssuerX500Principal().getName()) .isEqualTo("CN=testca,O=Internet Widgits Pty Ltd,ST=Some-State,C=AU"); assertThat(xdsX509TrustManager.getAcceptedIssuers()[1].getIssuerX500Principal().getName()) .isEqualTo("CN=testca,O=Internet Widgits Pty Ltd,ST=Some-State,C=AU"); } @Test public void constructorRootCert_checkServerTrusted() throws CertificateException, IOException, CertStoreException { X509Certificate x509Cert = TestUtils.loadX509Cert(CA_PEM_FILE); CertificateValidationContext staticValidationContext = buildStaticValidationContext("san1", "waterzooi.test.google.be"); XdsTrustManagerFactory factory = new XdsTrustManagerFactory(new X509Certificate[]{x509Cert}, staticValidationContext, false); XdsX509TrustManager xdsX509TrustManager = (XdsX509TrustManager) factory.getTrustManagers()[0]; X509Certificate[] serverChain = CertificateUtils.toX509Certificates(TlsTesting.loadCert(SERVER_1_PEM_FILE)); xdsX509TrustManager.checkServerTrusted(serverChain, "RSA"); } @Test public void constructorRootCert_nonStaticContext_throwsException() throws CertificateException, IOException, CertStoreException { X509Certificate x509Cert = TestUtils.loadX509Cert(CA_PEM_FILE); try { new XdsTrustManagerFactory( new X509Certificate[] {x509Cert}, getCertContextFromPath(CA_PEM_FILE), false); Assert.fail("no exception thrown"); } catch (IllegalArgumentException expected) { assertThat(expected) .hasMessageThat() .contains("only static certificateValidationContext expected"); } } @Test public void constructorRootCert_nonStaticContext_systemRootCerts_valid() throws CertificateException, IOException, CertStoreException { X509Certificate x509Cert = TestUtils.loadX509Cert(CA_PEM_FILE); CertificateValidationContext certValidationContext = CertificateValidationContext.newBuilder() .setTrustedCa( DataSource.newBuilder().setFilename(TestUtils.loadCert(CA_PEM_FILE).getAbsolutePath())) .setSystemRootCerts(CertificateValidationContext.SystemRootCerts.getDefaultInstance()) .build(); XdsTrustManagerFactory unused = new XdsTrustManagerFactory(new X509Certificate[] {x509Cert}, certValidationContext, false); } @Test public void constructorRootCert_checkServerTrusted_throwsException() throws CertificateException, IOException, CertStoreException { X509Certificate x509Cert = TestUtils.loadX509Cert(CA_PEM_FILE); CertificateValidationContext staticValidationContext = buildStaticValidationContext("san1", "san2"); XdsTrustManagerFactory factory = new XdsTrustManagerFactory(new X509Certificate[]{x509Cert}, staticValidationContext, false); XdsX509TrustManager xdsX509TrustManager = (XdsX509TrustManager) factory.getTrustManagers()[0]; X509Certificate[] serverChain = CertificateUtils.toX509Certificates(TlsTesting.loadCert(SERVER_1_PEM_FILE)); try { xdsX509TrustManager.checkServerTrusted(serverChain, "RSA"); Assert.fail("no exception thrown"); } catch (CertificateException expected) { assertThat(expected) .hasMessageThat() .contains("Peer certificate SAN check failed"); } } @Test public void constructorRootCert_checkClientTrusted_throwsException() throws CertificateException, IOException, CertStoreException { X509Certificate x509Cert = TestUtils.loadX509Cert(CA_PEM_FILE); CertificateValidationContext staticValidationContext = buildStaticValidationContext("san1", "san2"); XdsTrustManagerFactory factory = new XdsTrustManagerFactory(new X509Certificate[]{x509Cert}, staticValidationContext, false); XdsX509TrustManager xdsX509TrustManager = (XdsX509TrustManager) factory.getTrustManagers()[0]; X509Certificate[] clientChain = CertificateUtils.toX509Certificates(TlsTesting.loadCert(SERVER_1_PEM_FILE)); try { xdsX509TrustManager.checkClientTrusted(clientChain, "RSA"); Assert.fail("no exception thrown"); } catch (CertificateException expected) { assertThat(expected) .hasMessageThat() .contains("Peer certificate SAN check failed"); } } @Test public void checkServerTrusted_goodCert() throws CertificateException, IOException, CertStoreException { XdsTrustManagerFactory factory = new XdsTrustManagerFactory(getCertContextFromPath(CA_PEM_FILE)); XdsX509TrustManager xdsX509TrustManager = (XdsX509TrustManager) factory.getTrustManagers()[0]; X509Certificate[] serverChain = CertificateUtils.toX509Certificates(TlsTesting.loadCert(SERVER_1_PEM_FILE)); xdsX509TrustManager.checkServerTrusted(serverChain, "RSA"); } @Test public void checkClientTrusted_goodCert() throws CertificateException, IOException, CertStoreException { XdsTrustManagerFactory factory = new XdsTrustManagerFactory(getCertContextFromPath(CA_PEM_FILE)); XdsX509TrustManager xdsX509TrustManager = (XdsX509TrustManager) factory.getTrustManagers()[0]; X509Certificate[] clientChain = CertificateUtils.toX509Certificates(TlsTesting.loadCert(CLIENT_PEM_FILE)); xdsX509TrustManager.checkClientTrusted(clientChain, "RSA"); } @Test public void checkServerTrusted_badCert_throwsException() throws CertificateException, IOException, CertStoreException { XdsTrustManagerFactory factory = new XdsTrustManagerFactory(getCertContextFromPath(CA_PEM_FILE)); XdsX509TrustManager xdsX509TrustManager = (XdsX509TrustManager) factory.getTrustManagers()[0]; X509Certificate[] serverChain = CertificateUtils.toX509Certificates(TlsTesting.loadCert(BAD_SERVER_PEM_FILE)); try { xdsX509TrustManager.checkServerTrusted(serverChain, "RSA"); Assert.fail("no exception thrown"); } catch (CertificateException expected) { assertThat(expected) .hasMessageThat() .contains("unable to find valid certification path to requested target"); } } @Test public void checkClientTrusted_badCert_throwsException() throws CertificateException, IOException, CertStoreException { XdsTrustManagerFactory factory = new XdsTrustManagerFactory(getCertContextFromPath(CA_PEM_FILE)); XdsX509TrustManager xdsX509TrustManager = (XdsX509TrustManager) factory.getTrustManagers()[0]; X509Certificate[] clientChain = CertificateUtils.toX509Certificates(TlsTesting.loadCert(BAD_CLIENT_PEM_FILE)); try { xdsX509TrustManager.checkClientTrusted(clientChain, "RSA"); Assert.fail("no exception thrown"); } catch (CertificateException expected) { assertThat(expected) .hasMessageThat() .contains("unable to find valid certification path to requested target"); } } /** constructs CertificateValidationContext from the resources file-path. */ private static final CertificateValidationContext getCertContextFromPath(String pemFilePath) throws IOException { return CertificateValidationContext.newBuilder() .setTrustedCa( DataSource.newBuilder().setFilename(TestUtils.loadCert(pemFilePath).getAbsolutePath())) .build(); } /** constructs CertificateValidationContext from pemFilePath and sets contents as inline-bytes. */ private static final CertificateValidationContext getCertContextFromPathAsInlineBytes( String pemFilePath) throws IOException, CertificateException { X509Certificate x509Cert = TestUtils.loadX509Cert(pemFilePath); return CertificateValidationContext.newBuilder() .setTrustedCa( DataSource.newBuilder().setInlineBytes(ByteString.copyFrom(x509Cert.getEncoded()))) .build(); } private static final CertificateValidationContext buildStaticValidationContext( String... verifySans) { CertificateValidationContext.Builder builder = CertificateValidationContext.newBuilder(); for (String san : verifySans) { @SuppressWarnings("deprecation") CertificateValidationContext.Builder unused = builder.addMatchSubjectAltNames(StringMatcher.newBuilder().setExact(san)); } return builder.build(); } }
XdsTrustManagerFactoryTest
java
apache__flink
flink-libraries/flink-state-processing-api/src/main/java/org/apache/flink/state/api/input/operator/WindowReaderOperator.java
{ "start": 11852, "end": 12514 }
class ____<T> implements CloseableIterator<T> { private final Iterator<T> iterator; private final AutoCloseable resource; private IteratorWithRemove(Stream<T> stream) { this.iterator = stream.iterator(); this.resource = stream; } @Override public boolean hasNext() { return iterator.hasNext(); } @Override public T next() { return iterator.next(); } @Override public void remove() {} @Override public void close() throws Exception { resource.close(); } } }
IteratorWithRemove
java
grpc__grpc-java
okhttp/src/main/java/io/grpc/okhttp/InternalOkHttpServerBuilder.java
{ "start": 1004, "end": 1635 }
class ____ { public static InternalServer buildTransportServers(OkHttpServerBuilder builder, List<? extends ServerStreamTracer.Factory> streamTracerFactories) { return builder.buildTransportServers(streamTracerFactories); } public static void setTransportTracerFactory(OkHttpServerBuilder builder, TransportTracer.Factory transportTracerFactory) { builder.setTransportTracerFactory(transportTracerFactory); } public static void setStatsEnabled(OkHttpServerBuilder builder, boolean value) { builder.setStatsEnabled(value); } private InternalOkHttpServerBuilder() {} }
InternalOkHttpServerBuilder
java
apache__hadoop
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestWrappedRecordReaderClassloader.java
{ "start": 1547, "end": 1611 }
class ____ { /** * Tests the
TestWrappedRecordReaderClassloader
java
spring-projects__spring-framework
spring-test/src/main/java/org/springframework/test/context/util/TestContextResourceUtils.java
{ "start": 2775, "end": 3565 }
class ____ defined. Such a path will be prepended with * the {@code classpath:} prefix and the path to the package for the class. * <li>A path starting with a slash will be treated as an absolute path * within the classpath, for example: {@code "/org/example/schema.sql"}. * Such a path will be prepended with the {@code classpath:} prefix. * <li>A path which is already prefixed with a URL protocol (for example, * {@code classpath:}, {@code file:}, {@code http:}, etc.) will not have its * protocol modified. * </ul> * <p>Each path will then be {@linkplain StringUtils#cleanPath cleaned}, * unless the {@code preservePlaceholders} flag is {@code true} and the path * contains one or more placeholders in the form <code>${placeholder.name}</code>. * @param clazz the
is
java
apache__camel
dsl/camel-jbang/camel-jbang-core/src/main/java/org/apache/camel/dsl/jbang/core/commands/Complete.java
{ "start": 1045, "end": 1588 }
class ____ extends CamelCommand { public Complete(CamelJBangMain main) { super(main); } @Override public Integer doCall() throws Exception { String script = AutoComplete.bash( spec.parent().name(), spec.parent().commandLine()); // not PrintWriter.println: scripts with Windows line separators fail in strange // ways! PrintStream out = System.out; out.print(script); out.print('\n'); out.flush(); return 0; } }
Complete
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/DefaultCharsetTest.java
{ "start": 3357, "end": 4510 }
class ____ { static final boolean CONST = true; void f(File f, String s, boolean flag) throws Exception { // BUG: Diagnostic contains: Files.newBufferedWriter(Paths.get(s), UTF_8); new FileWriter(s); // BUG: Diagnostic contains: Files.newBufferedWriter(Paths.get(s), UTF_8, CREATE, APPEND); new FileWriter(s, true); // BUG: Diagnostic contains: Files.newBufferedWriter(Paths.get(s), UTF_8, CREATE, APPEND); new FileWriter(s, CONST); // BUG: Diagnostic contains: Files.newBufferedWriter(f.toPath(), UTF_8); new FileWriter(f); // BUG: Diagnostic contains: Files.newBufferedWriter(f.toPath(), UTF_8, CREATE, APPEND); new FileWriter(f, true); // BUG: Diagnostic contains: Files.newBufferedWriter(f.toPath(), UTF_8); new FileWriter(f, false); // BUG: Diagnostic contains: Files.newBufferedWriter(f.toPath(), UTF_8, flag ? new // StandardOpenOption[] {CREATE, APPEND} : new StandardOpenOption[] {CREATE} new FileWriter(f, flag); } } """) .doTest(); } @Test public void buffered() { compilationHelper .addSourceLines( "Test.java", """ import java.io.*;
Test
java
apache__hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/server/api/ResourceManagerAdministrationProtocol.java
{ "start": 6241, "end": 12046 }
interface ____ by admin to update nodes' resources to the * <code>ResourceManager</code> </p>. * * <p>The admin client is required to provide details such as a map from * {@link NodeId} to {@link ResourceOption} required to update resources on * a list of <code>RMNode</code> in <code>ResourceManager</code> etc. * via the {@link UpdateNodeResourceRequest}.</p> * * @param request request to update resource for a node in cluster. * @return (empty) response on accepting update. * @throws YarnException exceptions from yarn servers. * @throws IOException io error occur. */ @Private @Idempotent public UpdateNodeResourceResponse updateNodeResource( UpdateNodeResourceRequest request) throws YarnException, IOException; @Private @Evolving @Idempotent public RefreshNodesResourcesResponse refreshNodesResources( RefreshNodesResourcesRequest request) throws YarnException, IOException; @Private @Idempotent public AddToClusterNodeLabelsResponse addToClusterNodeLabels( AddToClusterNodeLabelsRequest request) throws YarnException, IOException; @Private @Idempotent public RemoveFromClusterNodeLabelsResponse removeFromClusterNodeLabels( RemoveFromClusterNodeLabelsRequest request) throws YarnException, IOException; @Private @Idempotent public ReplaceLabelsOnNodeResponse replaceLabelsOnNode( ReplaceLabelsOnNodeRequest request) throws YarnException, IOException; @Private @Idempotent public CheckForDecommissioningNodesResponse checkForDecommissioningNodes( CheckForDecommissioningNodesRequest checkForDecommissioningNodesRequest) throws YarnException, IOException; @Private @Idempotent public RefreshClusterMaxPriorityResponse refreshClusterMaxPriority( RefreshClusterMaxPriorityRequest request) throws YarnException, IOException; @Private @Idempotent NodesToAttributesMappingResponse mapAttributesToNodes( NodesToAttributesMappingRequest request) throws YarnException, IOException; /** * In YARN Federation mode, We allow users to mark subClusters * With no heartbeat for a long time as SC_LOST state. * * If we include a specific subClusterId in the request, check for the specified subCluster. * If subClusterId is empty, all subClusters are checked. * * @param request deregisterSubCluster request. * The request contains the id of to deregister sub-cluster. * @return Response from deregisterSubCluster. * @throws YarnException exceptions from yarn servers. * @throws IOException if an IO error occurred. */ @Private @Idempotent DeregisterSubClusterResponse deregisterSubCluster(DeregisterSubClusterRequest request) throws YarnException, IOException; /** * In YARN-Federation mode, We will be storing the Policy information for Queues. * * @param request saveFederationQueuePolicy Request * @return Response from saveFederationQueuePolicy. * @throws YarnException exceptions from yarn servers. * @throws IOException if an IO error occurred. */ @Private @Idempotent SaveFederationQueuePolicyResponse saveFederationQueuePolicy( SaveFederationQueuePolicyRequest request) throws YarnException, IOException; /** * In YARN-Federation mode, this method provides a way to save queue policies in batches. * * @param request BatchSaveFederationQueuePolicies Request. * @return Response from batchSaveFederationQueuePolicies. * @throws YarnException exceptions from yarn servers. * @throws IOException if an IO error occurred. */ @Private @Idempotent BatchSaveFederationQueuePoliciesResponse batchSaveFederationQueuePolicies( BatchSaveFederationQueuePoliciesRequest request) throws YarnException, IOException; /** * In YARN-Federation mode, this method provides a way to list policies. * * @param request QueryFederationQueuePoliciesRequest Request. * @return Response from listFederationQueuePolicies. * @throws YarnException exceptions from yarn servers. * @throws IOException if an IO error occurred. */ @Private @Idempotent QueryFederationQueuePoliciesResponse listFederationQueuePolicies( QueryFederationQueuePoliciesRequest request) throws YarnException, IOException; /** * In YARN-Federation mode, this method provides a way to delete federation application. * * @param request DeleteFederationApplicationRequest Request. * @return Response from deleteFederationApplication. * @throws YarnException exceptions from yarn servers. * @throws IOException if an IO error occurred. */ @Private @Idempotent DeleteFederationApplicationResponse deleteFederationApplication( DeleteFederationApplicationRequest request) throws YarnException, IOException; /** * In YARN-Federation mode, this method provides a way to get federation subcluster list. * * @param request GetSubClustersRequest Request. * @return Response from getFederationSubClusters. * @throws YarnException exceptions from yarn servers. * @throws IOException if an IO error occurred. */ @Private @Idempotent GetSubClustersResponse getFederationSubClusters(GetSubClustersRequest request) throws YarnException, IOException; /** * In YARN-Federation mode, this method provides a way to delete queue weight policies. * * @param request DeleteFederationQueuePoliciesRequest Request. * @return Response from DeleteFederationQueuePolicies. * @throws YarnException exceptions from yarn servers. * @throws IOException if an IO error occurred. */ @Private @Idempotent DeleteFederationQueuePoliciesResponse deleteFederationPoliciesByQueues( DeleteFederationQueuePoliciesRequest request) throws YarnException, IOException; }
used
java
apache__kafka
clients/src/test/java/org/apache/kafka/common/requests/DescribeAclsResponseTest.java
{ "start": 1835, "end": 5762 }
class ____ { private static final short V1 = 1; private static final AclDescription ALLOW_CREATE_ACL = buildAclDescription( "127.0.0.1", "User:ANONYMOUS", AclOperation.CREATE, AclPermissionType.ALLOW); private static final AclDescription DENY_READ_ACL = buildAclDescription( "127.0.0.1", "User:ANONYMOUS", AclOperation.READ, AclPermissionType.DENY); private static final DescribeAclsResource UNKNOWN_ACL = buildResource( "foo", ResourceType.UNKNOWN, PatternType.LITERAL, Collections.singletonList(DENY_READ_ACL)); private static final DescribeAclsResource PREFIXED_ACL1 = buildResource( "prefix", ResourceType.GROUP, PatternType.PREFIXED, Collections.singletonList(ALLOW_CREATE_ACL)); private static final DescribeAclsResource LITERAL_ACL1 = buildResource( "foo", ResourceType.TOPIC, PatternType.LITERAL, Collections.singletonList(ALLOW_CREATE_ACL)); @Test public void shouldThrowIfUnknown() { assertThrows(IllegalArgumentException.class, () -> buildResponse(10, Errors.NONE, Collections.singletonList(UNKNOWN_ACL)).serialize(V1)); } @Test public void shouldRoundTripV1() { List<DescribeAclsResource> resources = Arrays.asList(LITERAL_ACL1, PREFIXED_ACL1); final DescribeAclsResponse original = buildResponse(100, Errors.NONE, resources); final Readable readable = original.serialize(V1); final DescribeAclsResponse result = DescribeAclsResponse.parse(readable, V1); assertResponseEquals(original, result); final DescribeAclsResponse result2 = buildResponse(100, Errors.NONE, DescribeAclsResponse.aclsResources( DescribeAclsResponse.aclBindings(resources))); assertResponseEquals(original, result2); } @Test public void testAclBindings() { final AclBinding original = new AclBinding(new ResourcePattern(ResourceType.TOPIC, "foo", PatternType.LITERAL), new AccessControlEntry("User:ANONYMOUS", "127.0.0.1", AclOperation.CREATE, AclPermissionType.ALLOW)); final List<AclBinding> result = DescribeAclsResponse.aclBindings(Collections.singletonList(LITERAL_ACL1)); assertEquals(1, result.size()); assertEquals(original, result.get(0)); } private static void assertResponseEquals(final DescribeAclsResponse original, final DescribeAclsResponse actual) { final Set<DescribeAclsResource> originalBindings = new HashSet<>(original.acls()); final Set<DescribeAclsResource> actualBindings = new HashSet<>(actual.acls()); assertEquals(originalBindings, actualBindings); } private static DescribeAclsResponse buildResponse(int throttleTimeMs, Errors error, List<DescribeAclsResource> resources) { return new DescribeAclsResponse(new DescribeAclsResponseData() .setThrottleTimeMs(throttleTimeMs) .setErrorCode(error.code()) .setErrorMessage(error.message()) .setResources(resources)); } private static DescribeAclsResource buildResource(String name, ResourceType type, PatternType patternType, List<AclDescription> acls) { return new DescribeAclsResource() .setResourceName(name) .setResourceType(type.code()) .setPatternType(patternType.code()) .setAcls(acls); } private static AclDescription buildAclDescription(String host, String principal, AclOperation operation, AclPermissionType permission) { return new AclDescription() .setHost(host) .setPrincipal(principal) .setOperation(operation.code()) .setPermissionType(permission.code()); } }
DescribeAclsResponseTest
java
netty__netty
codec-http2/src/main/java/io/netty/handler/codec/http2/DefaultHttp2DataFrame.java
{ "start": 1030, "end": 5628 }
class ____ extends AbstractHttp2StreamFrame implements Http2DataFrame { private final ByteBuf content; private final boolean endStream; private final int padding; private final int initialFlowControlledBytes; /** * Equivalent to {@code new DefaultHttp2DataFrame(content, false)}. * * @param content non-{@code null} payload */ public DefaultHttp2DataFrame(ByteBuf content) { this(content, false); } /** * Equivalent to {@code new DefaultHttp2DataFrame(Unpooled.EMPTY_BUFFER, endStream)}. * * @param endStream whether this data should terminate the stream */ public DefaultHttp2DataFrame(boolean endStream) { this(Unpooled.EMPTY_BUFFER, endStream); } /** * Equivalent to {@code new DefaultHttp2DataFrame(content, endStream, 0)}. * * @param content non-{@code null} payload * @param endStream whether this data should terminate the stream */ public DefaultHttp2DataFrame(ByteBuf content, boolean endStream) { this(content, endStream, 0); } /** * Construct a new data message. * * @param content non-{@code null} payload * @param endStream whether this data should terminate the stream * @param padding additional bytes that should be added to obscure the true content size. Must be between 0 and * 256 (inclusive). */ public DefaultHttp2DataFrame(ByteBuf content, boolean endStream, int padding) { this.content = checkNotNull(content, "content"); this.endStream = endStream; verifyPadding(padding); this.padding = padding; if (content().readableBytes() + (long) padding > Integer.MAX_VALUE) { throw new IllegalArgumentException("content + padding must be <= Integer.MAX_VALUE"); } initialFlowControlledBytes = content().readableBytes() + padding; } @Override public DefaultHttp2DataFrame stream(Http2FrameStream stream) { super.stream(stream); return this; } @Override public String name() { return "DATA"; } @Override public boolean isEndStream() { return endStream; } @Override public int padding() { return padding; } @Override public ByteBuf content() { return ByteBufUtil.ensureAccessible(content); } @Override public int initialFlowControlledBytes() { return initialFlowControlledBytes; } @Override public DefaultHttp2DataFrame copy() { return replace(content().copy()); } @Override public DefaultHttp2DataFrame duplicate() { return replace(content().duplicate()); } @Override public DefaultHttp2DataFrame retainedDuplicate() { return replace(content().retainedDuplicate()); } @Override public DefaultHttp2DataFrame replace(ByteBuf content) { return new DefaultHttp2DataFrame(content, endStream, padding); } @Override public int refCnt() { return content.refCnt(); } @Override public boolean release() { return content.release(); } @Override public boolean release(int decrement) { return content.release(decrement); } @Override public DefaultHttp2DataFrame retain() { content.retain(); return this; } @Override public DefaultHttp2DataFrame retain(int increment) { content.retain(increment); return this; } @Override public String toString() { return StringUtil.simpleClassName(this) + "(stream=" + stream() + ", content=" + content + ", endStream=" + endStream + ", padding=" + padding + ')'; } @Override public DefaultHttp2DataFrame touch() { content.touch(); return this; } @Override public DefaultHttp2DataFrame touch(Object hint) { content.touch(hint); return this; } @Override public boolean equals(Object o) { if (!(o instanceof DefaultHttp2DataFrame)) { return false; } DefaultHttp2DataFrame other = (DefaultHttp2DataFrame) o; return super.equals(other) && content.equals(other.content()) && endStream == other.endStream && padding == other.padding; } @Override public int hashCode() { int hash = super.hashCode(); hash = hash * 31 + content.hashCode(); hash = hash * 31 + (endStream ? 0 : 1); hash = hash * 31 + padding; return hash; } }
DefaultHttp2DataFrame
java
apache__maven
impl/maven-core/src/main/java/org/apache/maven/plugin/internal/PluginDependenciesResolver.java
{ "start": 1498, "end": 1587 }
interface ____ be * changed or deleted without prior notice. * * @since 3.0 */ public
can
java
google__dagger
hilt-compiler/main/java/dagger/hilt/processor/internal/Components.java
{ "start": 1429, "end": 4468 }
class ____ { /** Returns the {@link dagger.hilt.InstallIn} components for a given element. */ public static ImmutableSet<ClassName> getComponents(XElement element) { ImmutableSet<ClassName> components; if (element.hasAnnotation(ClassNames.INSTALL_IN) || element.hasAnnotation(ClassNames.TEST_INSTALL_IN)) { components = getHiltInstallInComponents(element); } else { // Check the enclosing element in case it passed in module is a companion object. This helps // in cases where the element was arrived at by checking a binding method and moving outward. XElement enclosing = element.getEnclosingElement(); if (enclosing != null && isTypeElement(enclosing) && isTypeElement(element) && enclosing.hasAnnotation(ClassNames.MODULE) && asTypeElement(element).isCompanionObject()) { return getComponents(enclosing); } if (Processors.hasErrorTypeAnnotation(element)) { throw new BadInputException( String.format( "Error annotation found on element %s. Look above for compilation errors", XElements.toStableString(element)), element); } else { throw new BadInputException( String.format( "An @InstallIn annotation is required for: %s." , XElements.toStableString(element)), element); } } return components; } public static AnnotationSpec getInstallInAnnotationSpec(ImmutableSet<ClassName> components) { Preconditions.checkArgument(!components.isEmpty()); AnnotationSpec.Builder builder = AnnotationSpec.builder(ClassNames.INSTALL_IN); components.forEach(component -> builder.addMember("value", "$T.class", component)); return builder.build(); } private static ImmutableSet<ClassName> getHiltInstallInComponents(XElement element) { Preconditions.checkArgument( element.hasAnnotation(ClassNames.INSTALL_IN) || element.hasAnnotation(ClassNames.TEST_INSTALL_IN)); ImmutableList<XTypeElement> components = element.hasAnnotation(ClassNames.INSTALL_IN) ? Processors.getAnnotationClassValues( element.getAnnotation(ClassNames.INSTALL_IN), "value") : Processors.getAnnotationClassValues( element.getAnnotation(ClassNames.TEST_INSTALL_IN), "components"); ImmutableSet<XTypeElement> undefinedComponents = components.stream() .filter(component -> !component.hasAnnotation(ClassNames.DEFINE_COMPONENT)) .collect(toImmutableSet()); ProcessorErrors.checkState( undefinedComponents.isEmpty(), element, "@InstallIn, can only be used with @DefineComponent-annotated classes, but found: %s", undefinedComponents.stream().map(XElements::toStableString).collect(toImmutableList())); return components.stream().map(XTypeElement::getClassName).collect(toImmutableSet()); } private Components() {} }
Components
java
apache__hadoop
hadoop-tools/hadoop-resourceestimator/src/main/java/org/apache/hadoop/resourceestimator/skylinestore/exceptions/RecurrenceIdNotFoundException.java
{ "start": 987, "end": 1214 }
class ____ extends SkylineStoreException { private static final long serialVersionUID = -684069387367879218L; public RecurrenceIdNotFoundException(final String message) { super(message); } }
RecurrenceIdNotFoundException
java
google__guava
android/guava/src/com/google/common/primitives/Booleans.java
{ "start": 11729, "end": 14393 }
enum ____ implements Comparator<boolean[]> { INSTANCE; @Override public int compare(boolean[] left, boolean[] right) { // do not static import Math.min due to https://bugs.openjdk.org/browse/JDK-8357219 @SuppressWarnings("StaticImportPreferred") int minLength = Math.min(left.length, right.length); for (int i = 0; i < minLength; i++) { int result = Boolean.compare(left[i], right[i]); if (result != 0) { return result; } } return left.length - right.length; } @Override public String toString() { return "Booleans.lexicographicalComparator()"; } } /** * Copies a collection of {@code Boolean} instances into a new array of primitive {@code boolean} * values. * * <p>Elements are copied from the argument collection as if by {@code collection.toArray()}. * Calling this method is as thread-safe as calling that method. * * <p><b>Note:</b> consider representing the collection as a {@link java.util.BitSet} instead. * * @param collection a collection of {@code Boolean} objects * @return an array containing the same values as {@code collection}, in the same order, converted * to primitives * @throws NullPointerException if {@code collection} or any of its elements is null */ public static boolean[] toArray(Collection<Boolean> collection) { if (collection instanceof BooleanArrayAsList) { return ((BooleanArrayAsList) collection).toBooleanArray(); } Object[] boxedArray = collection.toArray(); int len = boxedArray.length; boolean[] array = new boolean[len]; for (int i = 0; i < len; i++) { // checkNotNull for GWT (do not optimize) array[i] = (Boolean) checkNotNull(boxedArray[i]); } return array; } /** * Returns a fixed-size list backed by the specified array, similar to {@link * Arrays#asList(Object[])}. The list supports {@link List#set(int, Object)}, but any attempt to * set a value to {@code null} will result in a {@link NullPointerException}. * * <p>There are at most two distinct objects in this list, {@code (Boolean) true} and {@code * (Boolean) false}. Java guarantees that those are always represented by the same objects. * * <p>The returned list is serializable. * * @param backingArray the array to back the list * @return a list view of the array */ public static List<Boolean> asList(boolean... backingArray) { if (backingArray.length == 0) { return Collections.emptyList(); } return new BooleanArrayAsList(backingArray); } private static final
LexicographicalComparator
java
quarkusio__quarkus
extensions/redis-client/runtime/src/main/java/io/quarkus/redis/runtime/datasource/BlockingCountMinCommandsImpl.java
{ "start": 311, "end": 1764 }
class ____<K, V> extends AbstractRedisCommandGroup implements CountMinCommands<K, V> { private final ReactiveCountMinCommands<K, V> reactive; public BlockingCountMinCommandsImpl(RedisDataSource ds, ReactiveCountMinCommands<K, V> reactive, Duration timeout) { super(ds, timeout); this.reactive = reactive; } @Override public long cmsIncrBy(K key, V value, long increment) { return reactive.cmsIncrBy(key, value, increment).await().atMost(timeout); } @Override public Map<V, Long> cmsIncrBy(K key, Map<V, Long> couples) { return reactive.cmsIncrBy(key, couples).await().atMost(timeout); } @Override public void cmsInitByDim(K key, long width, long depth) { reactive.cmsInitByDim(key, width, depth).await().atMost(timeout); } @Override public void cmsInitByProb(K key, double error, double probability) { reactive.cmsInitByProb(key, error, probability).await().atMost(timeout); } @Override public long cmsQuery(K key, V item) { return reactive.cmsQuery(key, item).await().atMost(timeout); } @Override public List<Long> cmsQuery(K key, V... items) { return reactive.cmsQuery(key, items).await().atMost(timeout); } @Override public void cmsMerge(K dest, List<K> src, List<Integer> weight) { reactive.cmsMerge(dest, src, weight).await().atMost(timeout); } }
BlockingCountMinCommandsImpl
java
apache__flink
flink-kubernetes/src/main/java/org/apache/flink/kubernetes/configuration/KubernetesDeploymentTarget.java
{ "start": 1148, "end": 1243 }
class ____ all the supported deployment target names for Kubernetes. */ @Internal public
containing
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/collection/orderby/OrderByMappedByTest.java
{ "start": 534, "end": 1148 }
class ____ { @Test void test(EntityManagerFactoryScope scope) { scope.inTransaction( em -> { Book book = new Book( "XXX" ); em.persist( book ); book.pages.add( new Page("XXX", 1, "Lorem ipsum") ); } ); scope.inTransaction( em -> { Book book = em.find( Book.class, "XXX" ); book.pages.add( new Page("XXX", 2, "Lorem ipsum") ); } ); List<String> queries = ((SQLStatementInspector) scope.getStatementInspector()).getSqlQueries(); assertEquals( 4, queries.size() ); scope.inTransaction( em -> { assertEquals( em.find( Book.class, "XXX" ).pages.size(), 2 ); } ); } }
OrderByMappedByTest
java
apache__flink
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/stream/sql/FunctionITCase.java
{ "start": 79113, "end": 79729 }
class ____ extends TableFunction<Object> { @FunctionHint(output = @DataTypeHint("STRING")) public void eval(String s) { if (s == null) { fail("unknown failure"); } else { collect(s + " is a string"); } } @FunctionHint(output = @DataTypeHint("INT")) public void eval(Integer i) { if (i == null) { collect(null); } else { collect(i); } } } /** Function that returns a string or integer. */ public static
DynamicTableFunction
java
quarkusio__quarkus
extensions/redis-client/runtime/src/main/java/io/quarkus/redis/client/RedisClientName.java
{ "start": 1040, "end": 1439 }
class ____ extends AnnotationLiteral<RedisClientName> implements RedisClientName { public static Literal of(String value) { return new Literal(value); } private final String value; public Literal(String value) { this.value = value; } @Override public String value() { return value; } } }
Literal
java
elastic__elasticsearch
modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapperTests.java
{ "start": 2654, "end": 10201 }
class ____ extends MapperTestCase { @Override protected Collection<Plugin> getPlugins() { return List.of(new MapperExtrasPlugin()); } @Override protected Object getSampleValueForDocument() { return "value"; } public void testExistsStandardSource() throws IOException { assertExistsQuery(createMapperService(fieldMapping(b -> b.field("type", "match_only_text")))); } public void testExistsSyntheticSource() throws IOException { assertExistsQuery(createSytheticSourceMapperService(fieldMapping(b -> b.field("type", "match_only_text")))); } public void testPhraseQueryStandardSource() throws IOException { assertPhraseQuery(createMapperService(fieldMapping(b -> b.field("type", "match_only_text")))); } public void testPhraseQuerySyntheticSource() throws IOException { assertPhraseQuery(createSytheticSourceMapperService(fieldMapping(b -> b.field("type", "match_only_text")))); } private void assertPhraseQuery(MapperService mapperService) throws IOException { try (Directory directory = newDirectory()) { RandomIndexWriter iw = new RandomIndexWriter(random(), directory); LuceneDocument doc = mapperService.documentMapper().parse(source(b -> b.field("field", "the quick brown fox"))).rootDoc(); iw.addDocument(doc); iw.close(); try (DirectoryReader reader = DirectoryReader.open(directory)) { SearchExecutionContext context = createSearchExecutionContext(mapperService, newSearcher(reader)); MatchPhraseQueryBuilder queryBuilder = new MatchPhraseQueryBuilder("field", "brown fox"); TopDocs docs = context.searcher().search(queryBuilder.toQuery(context), 1); assertThat(docs.totalHits.value(), equalTo(1L)); assertThat(docs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(docs.scoreDocs[0].doc, equalTo(0)); } } } @Override protected void registerParameters(ParameterChecker checker) throws IOException { checker.registerUpdateCheck( b -> { b.field("meta", Collections.singletonMap("format", "mysql.access")); }, m -> assertEquals(Collections.singletonMap("format", "mysql.access"), m.fieldType().meta()) ); } @Override protected void minimalMapping(XContentBuilder b) throws IOException { b.field("type", "match_only_text"); } @Override protected void minimalStoreMapping(XContentBuilder b) throws IOException { // 'store' is always true minimalMapping(b); } public void testDefaults() throws IOException { DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping)); assertEquals(Strings.toString(fieldMapping(this::minimalMapping)), mapper.mappingSource().toString()); ParsedDocument doc = mapper.parse(source(b -> b.field("field", "1234"))); List<IndexableField> fields = doc.rootDoc().getFields("field"); assertEquals(1, fields.size()); var reader = fields.get(0).readerValue(); char[] buff = new char[20]; assertEquals(4, reader.read(buff)); assertEquals("1234", new String(buff, 0, 4)); IndexableFieldType fieldType = fields.get(0).fieldType(); assertThat(fieldType.omitNorms(), equalTo(true)); assertTrue(fieldType.tokenized()); assertFalse(fieldType.stored()); assertThat(fieldType.indexOptions(), equalTo(IndexOptions.DOCS)); assertThat(fieldType.storeTermVectors(), equalTo(false)); assertThat(fieldType.storeTermVectorOffsets(), equalTo(false)); assertThat(fieldType.storeTermVectorPositions(), equalTo(false)); assertThat(fieldType.storeTermVectorPayloads(), equalTo(false)); assertEquals(DocValuesType.NONE, fieldType.docValuesType()); } public void testNullConfigValuesFail() throws MapperParsingException { Exception e = expectThrows( MapperParsingException.class, () -> createDocumentMapper(fieldMapping(b -> b.field("type", "match_only_text").field("meta", (String) null))) ); assertThat(e.getMessage(), containsString("[meta] on mapper [field] of type [match_only_text] must not have a [null] value")); } public void testSimpleMerge() throws IOException { XContentBuilder startingMapping = fieldMapping(b -> b.field("type", "match_only_text")); MapperService mapperService = createMapperService(startingMapping); assertThat(mapperService.documentMapper().mappers().getMapper("field"), instanceOf(MatchOnlyTextFieldMapper.class)); merge(mapperService, startingMapping); assertThat(mapperService.documentMapper().mappers().getMapper("field"), instanceOf(MatchOnlyTextFieldMapper.class)); XContentBuilder newField = mapping(b -> { b.startObject("field").field("type", "match_only_text").startObject("meta").field("key", "value").endObject().endObject(); b.startObject("other_field").field("type", "keyword").endObject(); }); merge(mapperService, newField); assertThat(mapperService.documentMapper().mappers().getMapper("field"), instanceOf(MatchOnlyTextFieldMapper.class)); assertThat(mapperService.documentMapper().mappers().getMapper("other_field"), instanceOf(KeywordFieldMapper.class)); } public void testDisabledSource() throws IOException { XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("_doc"); { mapping.startObject("properties"); { mapping.startObject("foo"); { mapping.field("type", "match_only_text"); } mapping.endObject(); } mapping.endObject(); mapping.startObject("_source"); { mapping.field("enabled", false); } mapping.endObject(); } mapping.endObject().endObject(); MapperService mapperService = createMapperService(mapping); MappedFieldType ft = mapperService.fieldType("foo"); SearchExecutionContext context = createSearchExecutionContext(mapperService); TokenStream ts = new CannedTokenStream(new Token("a", 0, 3), new Token("b", 4, 7)); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> ft.phraseQuery(ts, 0, true, context)); assertThat(e.getMessage(), Matchers.containsString("cannot run positional queries since [_source] is disabled")); // Term queries are ok ft.termQuery("a", context); // no exception } @Override protected Object generateRandomInputValue(MappedFieldType ft) { assumeFalse("We don't have a way to assert things here", true); return null; } @Override protected void randomFetchTestFieldConfig(XContentBuilder b) throws IOException { assumeFalse("We don't have a way to assert things here", true); } @Override protected boolean supportsIgnoreMalformed() { return false; } @Override protected SyntheticSourceSupport syntheticSourceSupport(boolean ignoreMalformed) { assertFalse("match_only_text doesn't support ignoreMalformed", ignoreMalformed); return new MatchOnlyTextSyntheticSourceSupport(); } static
MatchOnlyTextFieldMapperTests
java
mapstruct__mapstruct
processor/src/test/java/org/mapstruct/ap/test/subclassmapping/ErroneousInverseSubclassMapper.java
{ "start": 649, "end": 1125 }
interface ____ { ErroneousInverseSubclassMapper INSTANCE = Mappers.getMapper( ErroneousInverseSubclassMapper.class ); @SubclassMapping( source = Bike.class, target = VehicleDto.class ) @SubclassMapping( source = Car.class, target = VehicleDto.class ) @Mapping( target = "maker", source = "vehicleManufacturingCompany" ) VehicleDto map(Vehicle vehicle); @InheritInverseConfiguration Vehicle mapInverse(VehicleDto dto); }
ErroneousInverseSubclassMapper
java
apache__hadoop
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestMultipleProtocolServer.java
{ "start": 1008, "end": 1748 }
class ____ extends TestRpcBase { private static RPC.Server server; @BeforeEach public void setUp() throws Exception { super.setupConf(); server = setupTestServer(conf, 2); } @AfterEach public void tearDown() throws Exception { server.stop(); } // Now test a PB service - a server hosts both PB and Writable Rpcs. @Test public void testPBService() throws Exception { // Set RPC engine to protobuf RPC engine Configuration conf2 = new Configuration(); RPC.setProtocolEngine(conf2, TestRpcService.class, ProtobufRpcEngine2.class); TestRpcService client = RPC.getProxy(TestRpcService.class, 0, addr, conf2); TestProtoBufRpc.testProtoBufRpc(client); } }
TestMultipleProtocolServer
java
apache__camel
core/camel-core/src/test/java/org/apache/camel/impl/event/EventNotifierExchangeSentParallelTest.java
{ "start": 1030, "end": 2721 }
class ____ extends EventNotifierExchangeSentTest { @Override @Test public void testExchangeSentRecipient() throws Exception { getMockEndpoint("mock:result").expectedMessageCount(1); template.sendBodyAndHeader("direct:foo", "Hello World", "foo", "direct:cool,direct:start"); // wait for the message to be fully done using oneExchangeDone assertMockEndpointsSatisfied(); assertTrue(oneExchangeDone.matchesWaitTime()); // stop Camel to let all the events complete context.stop(); assertTrue(events.size() >= 11, "Should be 11 or more, was: " + events.size()); // we run parallel so just assert we got 6 sending and 6 sent events int sent = 0; int sending = 0; for (CamelEvent event : events) { if (event instanceof ExchangeSendingEvent) { sending++; } else { sent++; } } assertTrue(sending >= 5, "There should be 5 or more, was " + sending); assertTrue(sent >= 5, "There should be 5 or more, was " + sent); } @Override protected RouteBuilder createRouteBuilder() { return new RouteBuilder() { @Override public void configure() { from("direct:start").to("log:foo").to("direct:bar").to("mock:result"); from("direct:bar").delay(500); from("direct:foo").recipientList(header("foo")).parallelProcessing(); from("direct:cool").delay(1000); from("direct:tap").wireTap("log:foo").to("mock:result"); } }; } }
EventNotifierExchangeSentParallelTest
java
apache__avro
lang/java/mapred/src/test/java/org/apache/avro/hadoop/io/TestAvroSequenceFile.java
{ "start": 1377, "end": 7744 }
class ____ { // Disable checkstyle for this variable. It must be public to work with JUnit // @Rule. // CHECKSTYLE:OFF @TempDir public File mTempDir; // CHECKSTYLE:ON /** Tests that reading and writing avro data works. */ @Test @SuppressWarnings("unchecked") void readAvro() throws IOException { Path sequenceFilePath = new Path(new File(mTempDir, "output.seq").getPath()); writeSequenceFile(sequenceFilePath, AvroKey.class, AvroValue.class, Schema.create(Schema.Type.STRING), Schema.create(Schema.Type.INT), new AvroKey<CharSequence>("one"), new AvroValue<>(1), new AvroKey<CharSequence>("two"), new AvroValue<>(2)); Configuration conf = new Configuration(); FileSystem fs = FileSystem.get(conf); AvroSequenceFile.Reader.Options options = new AvroSequenceFile.Reader.Options().withFileSystem(fs) .withInputPath(sequenceFilePath).withKeySchema(Schema.create(Schema.Type.STRING)) .withValueSchema(Schema.create(Schema.Type.INT)).withConfiguration(conf); try (SequenceFile.Reader reader = new AvroSequenceFile.Reader(options)) { AvroKey<CharSequence> key = new AvroKey<>(); AvroValue<Integer> value = new AvroValue<>(); // Read the first record. key = (AvroKey<CharSequence>) reader.next(key); assertNotNull(key); assertEquals("one", key.datum().toString()); value = (AvroValue<Integer>) reader.getCurrentValue(value); assertNotNull(value); assertEquals(1, value.datum().intValue()); // Read the second record. key = (AvroKey<CharSequence>) reader.next(key); assertNotNull(key); assertEquals("two", key.datum().toString()); value = (AvroValue<Integer>) reader.getCurrentValue(value); assertNotNull(value); assertEquals(2, value.datum().intValue()); assertNull(reader.next(key), "Should be no more records."); } } /** * Tests that reading and writing avro records without a reader schema works. */ @Test @SuppressWarnings("unchecked") void readAvroWithoutReaderSchemas() throws IOException { Path sequenceFilePath = new Path(new File(mTempDir, "output.seq").getPath()); writeSequenceFile(sequenceFilePath, AvroKey.class, AvroValue.class, Schema.create(Schema.Type.STRING), Schema.create(Schema.Type.INT), new AvroKey<CharSequence>("one"), new AvroValue<>(1), new AvroKey<CharSequence>("two"), new AvroValue<>(2)); Configuration conf = new Configuration(); FileSystem fs = FileSystem.get(conf); AvroSequenceFile.Reader.Options options = new AvroSequenceFile.Reader.Options().withFileSystem(fs) .withInputPath(sequenceFilePath).withConfiguration(conf); try (SequenceFile.Reader reader = new AvroSequenceFile.Reader(options)) { AvroKey<CharSequence> key = new AvroKey<>(); AvroValue<Integer> value = new AvroValue<>(); // Read the first record. key = (AvroKey<CharSequence>) reader.next(key); assertNotNull(key); assertEquals("one", key.datum().toString()); value = (AvroValue<Integer>) reader.getCurrentValue(value); assertNotNull(value); assertEquals(1, value.datum().intValue()); // Read the second record. key = (AvroKey<CharSequence>) reader.next(key); assertNotNull(key); assertEquals("two", key.datum().toString()); value = (AvroValue<Integer>) reader.getCurrentValue(value); assertNotNull(value); assertEquals(2, value.datum().intValue()); assertNull(reader.next(key), "Should be no more records."); } } /** Tests that reading and writing ordinary Writables still works. */ @Test void readWritables() throws IOException { Path sequenceFilePath = new Path(new File(mTempDir, "output.seq").getPath()); writeSequenceFile(sequenceFilePath, Text.class, IntWritable.class, null, null, new Text("one"), new IntWritable(1), new Text("two"), new IntWritable(2)); Configuration conf = new Configuration(); FileSystem fs = FileSystem.get(conf); AvroSequenceFile.Reader.Options options = new AvroSequenceFile.Reader.Options().withFileSystem(fs) .withInputPath(sequenceFilePath).withConfiguration(conf); try (SequenceFile.Reader reader = new AvroSequenceFile.Reader(options)) { Text key = new Text(); IntWritable value = new IntWritable(); // Read the first record. assertTrue(reader.next(key)); assertEquals("one", key.toString()); reader.getCurrentValue(value); assertNotNull(value); assertEquals(1, value.get()); // Read the second record. assertTrue(reader.next(key)); assertEquals("two", key.toString()); reader.getCurrentValue(value); assertNotNull(value); assertEquals(2, value.get()); assertFalse(reader.next(key), "Should be no more records."); } } /** * Writes a sequence file of records. * * @param file The target file path. * @param keySchema The schema of the key if using Avro, else null. * @param valueSchema The schema of the value if using Avro, else null. * @param records <i>key1</i>, <i>value1</i>, <i>key2</i>, <i>value2</i>, * ... */ private void writeSequenceFile(Path file, Class<?> keyClass, Class<?> valueClass, Schema keySchema, Schema valueSchema, Object... records) throws IOException { // Make sure the key/value records have an even size. if (0 != records.length % 2) { throw new IllegalArgumentException("Expected a value for each key record."); } // Open a AvroSequenceFile writer. Configuration conf = new Configuration(); FileSystem fs = FileSystem.get(conf); AvroSequenceFile.Writer.Options options = new AvroSequenceFile.Writer.Options().withFileSystem(fs) .withConfiguration(conf).withOutputPath(file); if (null != keySchema) { options.withKeySchema(keySchema); } else { options.withKeyClass(keyClass); } if (null != valueSchema) { options.withValueSchema(valueSchema); } else { options.withValueClass(valueClass); } try (SequenceFile.Writer writer = new AvroSequenceFile.Writer(options)) { // Write some records. for (int i = 0; i < records.length; i += 2) { writer.append(records[i], records[i + 1]); } } } }
TestAvroSequenceFile
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/GetClassOnAnnotationTest.java
{ "start": 865, "end": 1174 }
class ____ { private final CompilationTestHelper compilationHelper = CompilationTestHelper.newInstance(GetClassOnAnnotation.class, getClass()); @Test public void positive() { compilationHelper .addSourceLines( "Test.java", """
GetClassOnAnnotationTest
java
redisson__redisson
redisson/src/main/java/org/redisson/RedissonSpinLock.java
{ "start": 1597, "end": 11996 }
class ____ extends RedissonBaseLock { protected long internalLockLeaseTime; protected final LockOptions.BackOff backOff; final CommandAsyncExecutor commandExecutor; RedissonSpinLock(CommandAsyncExecutor commandExecutor, String name, LockOptions.BackOff backOff) { super(commandExecutor, name); this.commandExecutor = commandExecutor; this.internalLockLeaseTime = getServiceManager().getCfg().getLockWatchdogTimeout(); this.backOff = backOff; } @Override public void lock() { try { lockInterruptibly(-1, null); } catch (InterruptedException e) { throw new IllegalStateException(); } } @Override public void lock(long leaseTime, TimeUnit unit) { try { lockInterruptibly(leaseTime, unit); } catch (InterruptedException e) { throw new IllegalStateException(); } } @Override public void lockInterruptibly() throws InterruptedException { lockInterruptibly(-1, null); } @Override public void lockInterruptibly(long leaseTime, TimeUnit unit) throws InterruptedException { long threadId = Thread.currentThread().getId(); Long ttl = tryAcquire(leaseTime, unit, threadId); // lock acquired if (ttl == null) { return; } LockOptions.BackOffPolicy backOffPolicy = backOff.create(); while (ttl != null) { long nextSleepPeriod = backOffPolicy.getNextSleepPeriod(); Thread.sleep(nextSleepPeriod); ttl = tryAcquire(leaseTime, unit, threadId); } } private Long tryAcquire(long leaseTime, TimeUnit unit, long threadId) { return get(tryAcquireAsync(leaseTime, unit, threadId)); } private <T> RFuture<Long> tryAcquireAsync(long leaseTime, TimeUnit unit, long threadId) { if (leaseTime > 0) { RFuture<Long> acquiredFuture = tryLockInnerAsync(leaseTime, unit, threadId, RedisCommands.EVAL_LONG); CompletionStage<Long> s = handleNoSync(threadId, acquiredFuture); return new CompletableFutureWrapper<>(s); } RFuture<Long> ttlRemainingFuture = tryLockInnerAsync(internalLockLeaseTime, TimeUnit.MILLISECONDS, threadId, RedisCommands.EVAL_LONG); CompletionStage<Long> s = handleNoSync(threadId, ttlRemainingFuture); ttlRemainingFuture = new CompletableFutureWrapper<>(s); ttlRemainingFuture.thenAccept(ttlRemaining -> { // lock acquired if (ttlRemaining == null) { scheduleExpirationRenewal(threadId); } }); return ttlRemainingFuture; } @Override public boolean tryLock() { return get(tryLockAsync()); } <T> RFuture<T> tryLockInnerAsync(long leaseTime, TimeUnit unit, long threadId, RedisStrictCommand<T> command) { internalLockLeaseTime = unit.toMillis(leaseTime); return commandExecutor.syncedEvalNoRetry(getRawName(), LongCodec.INSTANCE, command, "if (redis.call('exists', KEYS[1]) == 0) then " + "redis.call('hincrby', KEYS[1], ARGV[2], 1); " + "redis.call('pexpire', KEYS[1], ARGV[1]); " + "return nil; " + "end; " + "if (redis.call('hexists', KEYS[1], ARGV[2]) == 1) then " + "redis.call('hincrby', KEYS[1], ARGV[2], 1); " + "redis.call('pexpire', KEYS[1], ARGV[1]); " + "return nil; " + "end; " + "return redis.call('pttl', KEYS[1]);", Collections.singletonList(getRawName()), internalLockLeaseTime, getLockName(threadId)); } @Override public boolean tryLock(long waitTime, long leaseTime, TimeUnit unit) throws InterruptedException { final long time = unit.toMillis(waitTime); final long current = System.currentTimeMillis(); final long threadId = Thread.currentThread().getId(); Long ttl = tryAcquire(leaseTime, unit, threadId); // lock acquired if (ttl == null) { return true; } if (System.currentTimeMillis() - current >= time) { acquireFailed(waitTime, unit, threadId); return false; } LockOptions.BackOffPolicy backOffPolicy = backOff.create(); while (true) { Thread.sleep(backOffPolicy.getNextSleepPeriod()); ttl = tryAcquire(leaseTime, unit, threadId); if (ttl == null) { return true; } if (System.currentTimeMillis() - current >= time) { acquireFailed(waitTime, unit, threadId); return false; } } } @Override public boolean tryLock(long waitTime, TimeUnit unit) throws InterruptedException { return tryLock(waitTime, -1, unit); } @Override protected void cancelExpirationRenewal(Long threadId, Boolean unlockResult) { super.cancelExpirationRenewal(threadId, unlockResult); if (unlockResult == null || unlockResult) { internalLockLeaseTime = getServiceManager().getCfg().getLockWatchdogTimeout(); } } @Override public RFuture<Boolean> forceUnlockAsync() { cancelExpirationRenewal(null, null); return commandExecutor.syncedEvalWithRetry(getRawName(), LongCodec.INSTANCE, RedisCommands.EVAL_BOOLEAN, "if (redis.call('del', KEYS[1]) == 1) then " + "return 1 " + "else " + "return 0 " + "end", Collections.singletonList(getRawName())); } protected RFuture<Boolean> unlockInnerAsync(long threadId, String requestId, int timeout) { return evalWriteSyncedNoRetryAsync(getRawName(), LongCodec.INSTANCE, RedisCommands.EVAL_BOOLEAN, "local val = redis.call('get', KEYS[2]); " + "if val ~= false then " + "return tonumber(val);" + "end; " + "if (redis.call('hexists', KEYS[1], ARGV[2]) == 0) then " + "return nil;" + "end; " + "local counter = redis.call('hincrby', KEYS[1], ARGV[2], -1); " + "if (counter > 0) then " + "redis.call('pexpire', KEYS[1], ARGV[1]); " + "redis.call('set', KEYS[2], 0, 'px', ARGV[3]); " + "return 0; " + "else " + "redis.call('del', KEYS[1]); " + "redis.call('set', KEYS[2], 1, 'px', ARGV[3]); " + "return 1; " + "end; ", Arrays.asList(getRawName(), getUnlockLatchName(requestId)), internalLockLeaseTime, getLockName(threadId), timeout); } @Override public RFuture<Void> lockAsync(long leaseTime, TimeUnit unit, long currentThreadId) { CompletableFuture<Void> result = new CompletableFuture<>(); LockOptions.BackOffPolicy backOffPolicy = backOff.create(); lockAsync(leaseTime, unit, currentThreadId, result, backOffPolicy); return new CompletableFutureWrapper<>(result); } private void lockAsync(long leaseTime, TimeUnit unit, long currentThreadId, CompletableFuture<Void> result, LockOptions.BackOffPolicy backOffPolicy) { RFuture<Long> ttlFuture = tryAcquireAsync(leaseTime, unit, currentThreadId); ttlFuture.whenComplete((ttl, e) -> { if (e != null) { result.completeExceptionally(e); return; } // lock acquired if (ttl == null) { if (!result.complete(null)) { unlockAsync(currentThreadId); } return; } long nextSleepPeriod = backOffPolicy.getNextSleepPeriod(); getServiceManager().newTimeout( timeout -> lockAsync(leaseTime, unit, currentThreadId, result, backOffPolicy), nextSleepPeriod, TimeUnit.MILLISECONDS); }); } @Override public RFuture<Boolean> tryLockAsync(long threadId) { RFuture<Long> longRFuture = tryAcquireAsync(-1, null, threadId); CompletionStage<Boolean> f = longRFuture.thenApply(res -> res == null); return new CompletableFutureWrapper<>(f); } @Override public RFuture<Boolean> tryLockAsync(long waitTime, long leaseTime, TimeUnit unit, long currentThreadId) { CompletableFuture<Boolean> result = new CompletableFuture<>(); LockOptions.BackOffPolicy backOffPolicy = backOff.create(); tryLock(System.currentTimeMillis(), leaseTime, unit, currentThreadId, result, unit.toMillis(waitTime), backOffPolicy); return new CompletableFutureWrapper<>(result); } private void tryLock(long startTime, long leaseTime, TimeUnit unit, long currentThreadId, CompletableFuture<Boolean> result, long waitTime, LockOptions.BackOffPolicy backOffPolicy) { RFuture<Long> ttlFuture = tryAcquireAsync(leaseTime, unit, currentThreadId); ttlFuture.whenComplete((ttl, e) -> { if (e != null) { result.completeExceptionally(e); return; } // lock acquired if (ttl == null) { if (!result.complete(true)) { unlockAsync(currentThreadId); } return; } if (System.currentTimeMillis() - startTime >= waitTime) { trySuccessFalse(currentThreadId, result); return; } long nextSleepPeriod = backOffPolicy.getNextSleepPeriod(); getServiceManager().newTimeout( timeout -> tryLock(startTime, leaseTime, unit, currentThreadId, result, waitTime, backOffPolicy), nextSleepPeriod, TimeUnit.MILLISECONDS); }); } }
RedissonSpinLock
java
apache__hadoop
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CanSetReadahead.java
{ "start": 1050, "end": 1560 }
interface ____ { /** * Set the readahead on this stream. * * @param readahead The readahead to use. null means to use the default. * @throws IOException If there was an error changing the dropBehind * setting. * UnsupportedOperationException If this stream doesn't support * setting readahead. */ public void setReadahead(Long readahead) throws IOException, UnsupportedOperationException; }
CanSetReadahead
java
apache__flink
flink-runtime/src/main/java/org/apache/flink/runtime/jobmaster/factories/JobManagerJobMetricGroupFactory.java
{ "start": 1099, "end": 1434 }
interface ____ { /** * Create a new {@link JobManagerJobMetricGroup}. * * @param executionPlan for which to create a new {@link JobManagerJobMetricGroup}. * @return {@link JobManagerJobMetricGroup} */ JobManagerJobMetricGroup create(@Nonnull ExecutionPlan executionPlan); }
JobManagerJobMetricGroupFactory
java
apache__flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/logical/StructuredType.java
{ "start": 5322, "end": 5857 }
class ____ extends UserDefinedType { private static final long serialVersionUID = 1L; public static final String CATALOG_FORMAT = "%s"; public static final String INLINE_FORMAT = "STRUCTURED<'%s', %s>"; public static final Class<?> FALLBACK_CONVERSION = Row.class; private static final Set<String> INPUT_OUTPUT_CONVERSION = conversionSet(Row.class.getName(), RowData.class.getName()); /** Defines an attribute of a {@link StructuredType}. */ @PublicEvolving public static final
StructuredType
java
spring-projects__spring-framework
spring-beans/src/main/java/org/springframework/beans/factory/support/AbstractBeanFactory.java
{ "start": 65320, "end": 71876 }
class ____ the factory method that creates it. * If subclasses do instantiate the FactoryBean, they should consider trying the * {@code getObjectType} method without fully populating the bean. If this fails, * a full FactoryBean creation as performed by this implementation should be used * as fallback. * @param beanName the name of the bean * @param mbd the merged bean definition for the bean * @param allowInit if initialization of the FactoryBean is permitted if the type * cannot be determined another way * @return the type for the bean if determinable, otherwise {@code ResolvableType.NONE} * @since 5.2 * @see org.springframework.beans.factory.FactoryBean#getObjectType() * @see #getBean(String) */ protected ResolvableType getTypeForFactoryBean(String beanName, RootBeanDefinition mbd, boolean allowInit) { try { ResolvableType result = getTypeForFactoryBeanFromAttributes(mbd); if (result != ResolvableType.NONE) { return result; } } catch (IllegalArgumentException ex) { throw new BeanDefinitionStoreException(mbd.getResourceDescription(), beanName, String.valueOf(ex.getMessage())); } if (allowInit && mbd.isSingleton()) { try { FactoryBean<?> factoryBean = doGetBean(FACTORY_BEAN_PREFIX + beanName, FactoryBean.class, null, true); Class<?> objectType = getTypeForFactoryBean(factoryBean); return (objectType != null ? ResolvableType.forClass(objectType) : ResolvableType.NONE); } catch (BeanCreationException ex) { if (ex.contains(BeanCurrentlyInCreationException.class)) { logger.trace(LogMessage.format("Bean currently in creation on FactoryBean type check: %s", ex)); } else if (mbd.isLazyInit()) { logger.trace(LogMessage.format("Bean creation exception on lazy FactoryBean type check: %s", ex)); } else { logger.debug(LogMessage.format("Bean creation exception on eager FactoryBean type check: %s", ex)); } onSuppressedException(ex); } } // FactoryBean type not resolvable return ResolvableType.NONE; } /** * Mark the specified bean as already created (or about to be created). * <p>This allows the bean factory to optimize its caching for repeated * creation of the specified bean. * @param beanName the name of the bean */ protected void markBeanAsCreated(String beanName) { if (!this.alreadyCreated.contains(beanName)) { synchronized (this.mergedBeanDefinitions) { if (!isBeanEligibleForMetadataCaching(beanName)) { // Let the bean definition get re-merged now that we're actually creating // the bean... just in case some of its metadata changed in the meantime. clearMergedBeanDefinition(beanName); } this.alreadyCreated.add(beanName); } } } /** * Perform appropriate cleanup of cached metadata after bean creation failed. * @param beanName the name of the bean */ protected void cleanupAfterBeanCreationFailure(String beanName) { synchronized (this.mergedBeanDefinitions) { this.alreadyCreated.remove(beanName); } } /** * Determine whether the specified bean is eligible for having * its bean definition metadata cached. * @param beanName the name of the bean * @return {@code true} if the bean's metadata may be cached * at this point already */ protected boolean isBeanEligibleForMetadataCaching(String beanName) { return this.alreadyCreated.contains(beanName); } /** * Remove the singleton instance (if any) for the given bean name, * but only if it hasn't been used for other purposes than type checking. * @param beanName the name of the bean * @return {@code true} if actually removed, {@code false} otherwise */ protected boolean removeSingletonIfCreatedForTypeCheckOnly(String beanName) { if (!this.alreadyCreated.contains(beanName)) { removeSingleton(beanName); return true; } else { return false; } } /** * Check whether this factory's bean creation phase already started, * i.e. whether any bean has been marked as created in the meantime. * @since 4.2.2 * @see #markBeanAsCreated */ protected boolean hasBeanCreationStarted() { return !this.alreadyCreated.isEmpty(); } /** * Get the object for the given bean instance, either the bean * instance itself or its created object in case of a FactoryBean. * @param beanInstance the shared bean instance * @param name the name that may include factory dereference prefix * @param beanName the canonical bean name * @param mbd the merged bean definition * @return the object to expose for the bean */ protected Object getObjectForBeanInstance(Object beanInstance, @Nullable Class<?> requiredType, String name, String beanName, @Nullable RootBeanDefinition mbd) { // Don't let calling code try to dereference the factory if the bean isn't a factory. if (BeanFactoryUtils.isFactoryDereference(name)) { if (beanInstance instanceof NullBean) { return beanInstance; } if (!(beanInstance instanceof FactoryBean)) { throw new BeanIsNotAFactoryException(beanName, beanInstance.getClass()); } if (mbd != null) { mbd.isFactoryBean = true; } return beanInstance; } // Now we have the bean instance, which may be a normal bean or a FactoryBean. // If it's a FactoryBean, we use it to create a bean instance, unless the // caller actually wants a reference to the factory. if (!(beanInstance instanceof FactoryBean<?> factoryBean)) { return beanInstance; } Object object = null; if (mbd != null) { mbd.isFactoryBean = true; } else { object = getCachedObjectForFactoryBean(beanName); } if (object == null) { // Return bean instance from factory. // Caches object obtained from FactoryBean if it is a singleton. if (mbd == null && containsBeanDefinition(beanName)) { mbd = getMergedLocalBeanDefinition(beanName); } boolean synthetic = (mbd != null && mbd.isSynthetic()); object = getObjectFromFactoryBean(factoryBean, requiredType, beanName, !synthetic); } return object; } /** * Determine whether the given bean name is already in use within this factory, * i.e. whether there is a local bean or alias registered under this name or * an inner bean created with this name. * @param beanName the name to check */ public boolean isBeanNameInUse(String beanName) { return isAlias(beanName) || containsLocalBean(beanName) || hasDependentBean(beanName); } /** * Determine whether the given bean requires destruction on shutdown. * <p>The default implementation checks the DisposableBean
or
java
junit-team__junit5
platform-tests/src/test/java/org/junit/platform/commons/util/PreconditionsTests.java
{ "start": 1088, "end": 6728 }
class ____ { @Test void notNullPassesForNonNullObject() { var object = new Object(); var nonNullObject = notNull(object, "message"); assertSame(object, nonNullObject); } @Test void notNullThrowsForNullObject() { var message = "argument is null"; assertPreconditionViolationFor(() -> notNull(null, message)).withMessage(message); } @Test void notNullThrowsForNullObjectAndMessageSupplier() { var message = "argument is null"; Object object = null; assertPreconditionViolationFor(() -> notNull(object, () -> message)).withMessage(message); } @Test void notEmptyPassesForNonEmptyArray() { var array = new String[] { "a", "b", "c" }; var nonEmptyArray = notEmpty(array, () -> "should not fail"); assertSame(array, nonEmptyArray); } @Test void notEmptyPassesForNonEmptyCollection() { Collection<String> collection = List.of("a", "b", "c"); var nonEmptyCollection = notEmpty(collection, () -> "should not fail"); assertSame(collection, nonEmptyCollection); } @Test void notEmptyPassesForArrayWithNullElements() { notEmpty(new String[] { null }, "message"); } @Test void notEmptyPassesForCollectionWithNullElements() { notEmpty(singletonList(null), "message"); } @Test void notEmptyThrowsForNullArray() { var message = "array is empty"; assertPreconditionViolationFor(() -> notEmpty((Object[]) null, message)).withMessage(message); } @Test void notEmptyThrowsForNullCollection() { var message = "collection is empty"; assertPreconditionViolationFor(() -> notEmpty((Collection<?>) null, message)).withMessage(message); } @Test void notEmptyThrowsForEmptyArray() { var message = "array is empty"; assertPreconditionViolationFor(() -> notEmpty(new Object[0], message)).withMessage(message); } @Test void notEmptyThrowsForEmptyCollection() { var message = "collection is empty"; assertPreconditionViolationFor(() -> notEmpty(List.of(), message)).withMessage(message); } @Test void containsNoNullElementsPassesForArrayThatIsNullOrEmpty() { containsNoNullElements((Object[]) null, "array is null"); containsNoNullElements((Object[]) null, () -> "array is null"); containsNoNullElements(new Object[0], "array is empty"); containsNoNullElements(new Object[0], () -> "array is empty"); } @Test void containsNoNullElementsPassesForCollectionThatIsNullOrEmpty() { containsNoNullElements((List<?>) null, "collection is null"); containsNoNullElements(List.of(), "collection is empty"); containsNoNullElements((List<?>) null, () -> "collection is null"); containsNoNullElements(List.of(), () -> "collection is empty"); } @Test void containsNoNullElementsPassesForArrayContainingNonNullElements() { var input = new String[] { "a", "b", "c" }; var output = containsNoNullElements(input, "message"); assertSame(input, output); } @Test void containsNoNullElementsPassesForCollectionContainingNonNullElements() { var input = List.of("a", "b", "c"); var output = containsNoNullElements(input, "message"); assertSame(input, output); output = containsNoNullElements(input, () -> "message"); assertSame(input, output); } @Test void containsNoNullElementsThrowsForArrayContainingNullElements() { var message = "array contains null elements"; Object[] array = { new Object(), null, new Object() }; assertPreconditionViolationFor(() -> containsNoNullElements(array, message)).withMessage(message); } @Test void containsNoNullElementsThrowsForCollectionContainingNullElements() { var message = "collection contains null elements"; assertPreconditionViolationFor(() -> containsNoNullElements(singletonList(null), message)).withMessage(message); } @Test void notBlankPassesForNonBlankString() { var string = "abc"; var nonBlankString = notBlank(string, "message"); assertSame(string, nonBlankString); } @Test void notBlankThrowsForNullString() { var message = "string shouldn't be blank"; assertPreconditionViolationFor(() -> notBlank(null, message)).withMessage(message); } @Test void notBlankThrowsForNullStringWithMessageSupplier() { var message = "string shouldn't be blank"; assertPreconditionViolationFor(() -> notBlank(null, () -> message)).withMessage(message); } @Test void notBlankThrowsForEmptyString() { var message = "string shouldn't be blank"; assertPreconditionViolationFor(() -> notBlank("", message)).withMessage(message); } @Test void notBlankThrowsForEmptyStringWithMessageSupplier() { var message = "string shouldn't be blank"; assertPreconditionViolationFor(() -> notBlank("", () -> message)).withMessage(message); } @Test void notBlankThrowsForBlankString() { var message = "string shouldn't be blank"; assertPreconditionViolationFor(() -> notBlank(" ", message)).withMessage(message); } @Test void notBlankThrowsForBlankStringWithMessageSupplier() { var message = "string shouldn't be blank"; assertPreconditionViolationFor(() -> notBlank(" ", () -> message)).withMessage(message); } @Test void conditionPassesForTruePredicate() { condition(true, "error message"); } @Test void conditionPassesForTruePredicateWithMessageSupplier() { condition(true, () -> "error message"); } @Test void conditionThrowsForFalsePredicate() { var message = "condition does not hold"; assertPreconditionViolationFor(() -> condition(false, message)).withMessage(message); } @Test void conditionThrowsForFalsePredicateWithMessageSupplier() { var message = "condition does not hold"; assertPreconditionViolationFor(() -> condition(false, () -> message)).withMessage(message); } }
PreconditionsTests
java
alibaba__nacos
api/src/main/java/com/alibaba/nacos/api/ai/model/mcp/registry/Remote.java
{ "start": 975, "end": 1513 }
class ____ { private String type; private String url; private List<KeyValueInput> headers; public String getType() { return type; } public void setType(String type) { this.type = type; } public String getUrl() { return url; } public void setUrl(String url) { this.url = url; } public List<KeyValueInput> getHeaders() { return headers; } public void setHeaders(List<KeyValueInput> headers) { this.headers = headers; } }
Remote
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/metamodel/attributeInSuper/WorkOrderId.java
{ "start": 251, "end": 301 }
class ____ extends AbstractWorkOrderId { }
WorkOrderId
java
elastic__elasticsearch
distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerCliTests.java
{ "start": 25663, "end": 27299 }
class ____ implements SecureSettingsLoader { boolean loaded = false; LoadedSecrets secrets = null; String password = null; boolean bootstrapped = false; Environment environment = null; boolean verifiedEnv = false; @Override public SecureSettingsLoader.LoadedSecrets load(Environment environment, Terminal terminal) throws IOException { loaded = true; // Stash the environment pointer, so we can compare it. Environment shouldn't be changed for // loaders that don't autoconfigure. this.environment = environment; SecureString password = null; if (terminal.getReader().ready() == false) { this.password = null; } else { password = new SecureString(terminal.readSecret("Enter a password")); this.password = password.toString(); } secrets = new SecureSettingsLoader.LoadedSecrets( KeyStoreWrapper.create(), password == null ? Optional.empty() : Optional.of(password) ); return secrets; } @Override public SecureSettings bootstrap(Environment environment, SecureString password) throws Exception { fail("Bootstrap shouldn't be called for loaders that cannot be auto-configured"); bootstrapped = true; return KeyStoreWrapper.create(); } @Override public boolean supportsSecurityAutoConfiguration() { return false; } } static
MockSecureSettingsLoader
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/query/restriction/Restriction.java
{ "start": 1859, "end": 2331 }
class ____ similar to {@code jakarta.data.Restriction}, and * is used by Hibernate Data Repositories to implement Jakarta Data * query methods. * * @see org.hibernate.query.specification.SelectionSpecification * @see org.hibernate.query.specification.MutationSpecification * @see org.hibernate.query.specification.QuerySpecification#restrict(Restriction) * * @see Path * @see Order * * @author Gavin King * * @since 7.0 */ @Incubating public
is
java
hibernate__hibernate-orm
tooling/metamodel-generator/src/test/java/org/hibernate/processor/test/embeddedid/withinheritance/EmbeddedIdWithInheritanceTest.java
{ "start": 607, "end": 967 }
class ____ { @Test @WithClasses({ Ref.class, AbstractRef.class, TestEntity.class }) @WithMappingFiles("orm.xml") void testEntityContainsEmbeddedIdProperty() { assertMetamodelClassGeneratedFor( TestEntity.class ); assertPresenceOfFieldInMetamodelFor( TestEntity.class, "ref", "Property ref should be in metamodel" ); } }
EmbeddedIdWithInheritanceTest
java
quarkusio__quarkus
extensions/tls-registry/deployment/src/test/java/io/quarkus/tls/DefaultJKSTrustStoreTest.java
{ "start": 845, "end": 2022 }
class ____ { private static final String configuration = """ quarkus.tls.trust-store.jks.path=target/certs/test-formats-truststore.jks quarkus.tls.trust-store.jks.password=password """; @RegisterExtension static final QuarkusUnitTest config = new QuarkusUnitTest().setArchiveProducer( () -> ShrinkWrap.create(JavaArchive.class) .add(new StringAsset(configuration), "application.properties")); @Inject TlsConfigurationRegistry certificates; @Test void test() throws KeyStoreException, CertificateParsingException { TlsConfiguration def = certificates.getDefault().orElseThrow(); assertThat(def.getTrustStoreOptions()).isNotNull(); assertThat(def.getTrustStore()).isNotNull(); X509Certificate certificate = (X509Certificate) def.getTrustStore().getCertificate("test-formats"); assertThat(certificate).isNotNull(); assertThat(certificate.getSubjectAlternativeNames()).anySatisfy(l -> { assertThat(l.get(0)).isEqualTo(2); assertThat(l.get(1)).isEqualTo("localhost"); }); } }
DefaultJKSTrustStoreTest
java
spring-projects__spring-boot
module/spring-boot-webmvc-test/src/test/java/org/springframework/boot/webmvc/test/autoconfigure/mockmvc/ExampleArgument.java
{ "start": 734, "end": 921 }
class ____ { private final String value; public ExampleArgument(String value) { this.value = value; } @Override public String toString() { return this.value; } }
ExampleArgument
java
apache__camel
components/camel-fhir/camel-fhir-component/src/generated/java/org/apache/camel/component/fhir/FhirCreateEndpointConfigurationConfigurer.java
{ "start": 729, "end": 13396 }
class ____ extends org.apache.camel.support.component.PropertyConfigurerSupport implements GeneratedPropertyConfigurer, ExtendedPropertyConfigurerGetter { private static final Map<String, Object> ALL_OPTIONS; static { Map<String, Object> map = new CaseInsensitiveMap(); map.put("AccessToken", java.lang.String.class); map.put("ApiName", org.apache.camel.component.fhir.internal.FhirApiName.class); map.put("Client", ca.uhn.fhir.rest.client.api.IGenericClient.class); map.put("ClientFactory", ca.uhn.fhir.rest.client.api.IRestfulClientFactory.class); map.put("Compress", boolean.class); map.put("ConnectionTimeout", java.lang.Integer.class); map.put("DeferModelScanning", boolean.class); map.put("Encoding", java.lang.String.class); map.put("ExtraParameters", java.util.Map.class); map.put("FhirContext", ca.uhn.fhir.context.FhirContext.class); map.put("FhirVersion", java.lang.String.class); map.put("ForceConformanceCheck", boolean.class); map.put("Log", boolean.class); map.put("MethodName", java.lang.String.class); map.put("Password", java.lang.String.class); map.put("PreferReturn", ca.uhn.fhir.rest.api.PreferReturnEnum.class); map.put("PrettyPrint", boolean.class); map.put("ProxyHost", java.lang.String.class); map.put("ProxyPassword", java.lang.String.class); map.put("ProxyPort", java.lang.Integer.class); map.put("ProxyUser", java.lang.String.class); map.put("Resource", org.hl7.fhir.instance.model.api.IBaseResource.class); map.put("ResourceAsString", java.lang.String.class); map.put("ServerUrl", java.lang.String.class); map.put("SessionCookie", java.lang.String.class); map.put("SocketTimeout", java.lang.Integer.class); map.put("Summary", java.lang.String.class); map.put("Url", java.lang.String.class); map.put("Username", java.lang.String.class); map.put("ValidationMode", java.lang.String.class); ALL_OPTIONS = map; } @Override public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) { org.apache.camel.component.fhir.FhirCreateEndpointConfiguration target = (org.apache.camel.component.fhir.FhirCreateEndpointConfiguration) obj; switch (ignoreCase ? name.toLowerCase() : name) { case "accesstoken": case "accessToken": target.setAccessToken(property(camelContext, java.lang.String.class, value)); return true; case "apiname": case "apiName": target.setApiName(property(camelContext, org.apache.camel.component.fhir.internal.FhirApiName.class, value)); return true; case "client": target.setClient(property(camelContext, ca.uhn.fhir.rest.client.api.IGenericClient.class, value)); return true; case "clientfactory": case "clientFactory": target.setClientFactory(property(camelContext, ca.uhn.fhir.rest.client.api.IRestfulClientFactory.class, value)); return true; case "compress": target.setCompress(property(camelContext, boolean.class, value)); return true; case "connectiontimeout": case "connectionTimeout": target.setConnectionTimeout(property(camelContext, java.lang.Integer.class, value)); return true; case "defermodelscanning": case "deferModelScanning": target.setDeferModelScanning(property(camelContext, boolean.class, value)); return true; case "encoding": target.setEncoding(property(camelContext, java.lang.String.class, value)); return true; case "extraparameters": case "extraParameters": target.setExtraParameters(property(camelContext, java.util.Map.class, value)); return true; case "fhircontext": case "fhirContext": target.setFhirContext(property(camelContext, ca.uhn.fhir.context.FhirContext.class, value)); return true; case "fhirversion": case "fhirVersion": target.setFhirVersion(property(camelContext, java.lang.String.class, value)); return true; case "forceconformancecheck": case "forceConformanceCheck": target.setForceConformanceCheck(property(camelContext, boolean.class, value)); return true; case "log": target.setLog(property(camelContext, boolean.class, value)); return true; case "methodname": case "methodName": target.setMethodName(property(camelContext, java.lang.String.class, value)); return true; case "password": target.setPassword(property(camelContext, java.lang.String.class, value)); return true; case "preferreturn": case "preferReturn": target.setPreferReturn(property(camelContext, ca.uhn.fhir.rest.api.PreferReturnEnum.class, value)); return true; case "prettyprint": case "prettyPrint": target.setPrettyPrint(property(camelContext, boolean.class, value)); return true; case "proxyhost": case "proxyHost": target.setProxyHost(property(camelContext, java.lang.String.class, value)); return true; case "proxypassword": case "proxyPassword": target.setProxyPassword(property(camelContext, java.lang.String.class, value)); return true; case "proxyport": case "proxyPort": target.setProxyPort(property(camelContext, java.lang.Integer.class, value)); return true; case "proxyuser": case "proxyUser": target.setProxyUser(property(camelContext, java.lang.String.class, value)); return true; case "resource": target.setResource(property(camelContext, org.hl7.fhir.instance.model.api.IBaseResource.class, value)); return true; case "resourceasstring": case "resourceAsString": target.setResourceAsString(property(camelContext, java.lang.String.class, value)); return true; case "serverurl": case "serverUrl": target.setServerUrl(property(camelContext, java.lang.String.class, value)); return true; case "sessioncookie": case "sessionCookie": target.setSessionCookie(property(camelContext, java.lang.String.class, value)); return true; case "sockettimeout": case "socketTimeout": target.setSocketTimeout(property(camelContext, java.lang.Integer.class, value)); return true; case "summary": target.setSummary(property(camelContext, java.lang.String.class, value)); return true; case "url": target.setUrl(property(camelContext, java.lang.String.class, value)); return true; case "username": target.setUsername(property(camelContext, java.lang.String.class, value)); return true; case "validationmode": case "validationMode": target.setValidationMode(property(camelContext, java.lang.String.class, value)); return true; default: return false; } } @Override public Map<String, Object> getAllOptions(Object target) { return ALL_OPTIONS; } @Override public Class<?> getOptionType(String name, boolean ignoreCase) { switch (ignoreCase ? name.toLowerCase() : name) { case "accesstoken": case "accessToken": return java.lang.String.class; case "apiname": case "apiName": return org.apache.camel.component.fhir.internal.FhirApiName.class; case "client": return ca.uhn.fhir.rest.client.api.IGenericClient.class; case "clientfactory": case "clientFactory": return ca.uhn.fhir.rest.client.api.IRestfulClientFactory.class; case "compress": return boolean.class; case "connectiontimeout": case "connectionTimeout": return java.lang.Integer.class; case "defermodelscanning": case "deferModelScanning": return boolean.class; case "encoding": return java.lang.String.class; case "extraparameters": case "extraParameters": return java.util.Map.class; case "fhircontext": case "fhirContext": return ca.uhn.fhir.context.FhirContext.class; case "fhirversion": case "fhirVersion": return java.lang.String.class; case "forceconformancecheck": case "forceConformanceCheck": return boolean.class; case "log": return boolean.class; case "methodname": case "methodName": return java.lang.String.class; case "password": return java.lang.String.class; case "preferreturn": case "preferReturn": return ca.uhn.fhir.rest.api.PreferReturnEnum.class; case "prettyprint": case "prettyPrint": return boolean.class; case "proxyhost": case "proxyHost": return java.lang.String.class; case "proxypassword": case "proxyPassword": return java.lang.String.class; case "proxyport": case "proxyPort": return java.lang.Integer.class; case "proxyuser": case "proxyUser": return java.lang.String.class; case "resource": return org.hl7.fhir.instance.model.api.IBaseResource.class; case "resourceasstring": case "resourceAsString": return java.lang.String.class; case "serverurl": case "serverUrl": return java.lang.String.class; case "sessioncookie": case "sessionCookie": return java.lang.String.class; case "sockettimeout": case "socketTimeout": return java.lang.Integer.class; case "summary": return java.lang.String.class; case "url": return java.lang.String.class; case "username": return java.lang.String.class; case "validationmode": case "validationMode": return java.lang.String.class; default: return null; } } @Override public Object getOptionValue(Object obj, String name, boolean ignoreCase) { org.apache.camel.component.fhir.FhirCreateEndpointConfiguration target = (org.apache.camel.component.fhir.FhirCreateEndpointConfiguration) obj; switch (ignoreCase ? name.toLowerCase() : name) { case "accesstoken": case "accessToken": return target.getAccessToken(); case "apiname": case "apiName": return target.getApiName(); case "client": return target.getClient(); case "clientfactory": case "clientFactory": return target.getClientFactory(); case "compress": return target.isCompress(); case "connectiontimeout": case "connectionTimeout": return target.getConnectionTimeout(); case "defermodelscanning": case "deferModelScanning": return target.isDeferModelScanning(); case "encoding": return target.getEncoding(); case "extraparameters": case "extraParameters": return target.getExtraParameters(); case "fhircontext": case "fhirContext": return target.getFhirContext(); case "fhirversion": case "fhirVersion": return target.getFhirVersion(); case "forceconformancecheck": case "forceConformanceCheck": return target.isForceConformanceCheck(); case "log": return target.isLog(); case "methodname": case "methodName": return target.getMethodName(); case "password": return target.getPassword(); case "preferreturn": case "preferReturn": return target.getPreferReturn(); case "prettyprint": case "prettyPrint": return target.isPrettyPrint(); case "proxyhost": case "proxyHost": return target.getProxyHost(); case "proxypassword": case "proxyPassword": return target.getProxyPassword(); case "proxyport": case "proxyPort": return target.getProxyPort(); case "proxyuser": case "proxyUser": return target.getProxyUser(); case "resource": return target.getResource(); case "resourceasstring": case "resourceAsString": return target.getResourceAsString(); case "serverurl": case "serverUrl": return target.getServerUrl(); case "sessioncookie": case "sessionCookie": return target.getSessionCookie(); case "sockettimeout": case "socketTimeout": return target.getSocketTimeout(); case "summary": return target.getSummary(); case "url": return target.getUrl(); case "username": return target.getUsername(); case "validationmode": case "validationMode": return target.getValidationMode(); default: return null; } } @Override public Object getCollectionValueType(Object target, String name, boolean ignoreCase) { switch (ignoreCase ? name.toLowerCase() : name) { case "extraparameters": case "extraParameters": return java.lang.Object.class; default: return null; } } }
FhirCreateEndpointConfigurationConfigurer
java
apache__flink
flink-runtime/src/test/java/org/apache/flink/runtime/io/network/buffer/ReadOnlySlicedBufferTest.java
{ "start": 1493, "end": 14573 }
class ____ { private static final int BUFFER_SIZE = 1024; private static final int DATA_SIZE = 10; private NetworkBuffer buffer; @BeforeEach void setUp() throws Exception { final MemorySegment segment = MemorySegmentFactory.allocateUnpooledSegment(BUFFER_SIZE); buffer = new NetworkBuffer( segment, FreeingBufferRecycler.INSTANCE, Buffer.DataType.DATA_BUFFER, 0); for (int i = 0; i < DATA_SIZE; ++i) { buffer.writeByte(i); } } @Test void testForwardsIsBuffer() throws IOException { assertThat(buffer.readOnlySlice().isBuffer()).isEqualTo(buffer.isBuffer()); assertThat(buffer.readOnlySlice(1, 2).isBuffer()).isEqualTo(buffer.isBuffer()); Buffer eventBuffer = EventSerializer.toBuffer(EndOfPartitionEvent.INSTANCE, false); assertThat(eventBuffer.readOnlySlice().isBuffer()).isEqualTo(eventBuffer.isBuffer()); assertThat(eventBuffer.readOnlySlice(1, 2).isBuffer()).isEqualTo(eventBuffer.isBuffer()); } @Test void testSetDataType1() { ReadOnlySlicedNetworkBuffer readOnlyBuffer = buffer.readOnlySlice(); readOnlyBuffer.setDataType(Buffer.DataType.EVENT_BUFFER); assertThat(readOnlyBuffer.getDataType()).isEqualTo(Buffer.DataType.EVENT_BUFFER); } @Test void testSetDataType2() { ReadOnlySlicedNetworkBuffer readOnlyBuffer = buffer.readOnlySlice(1, 2); readOnlyBuffer.setDataType(Buffer.DataType.EVENT_BUFFER); assertThat(readOnlyBuffer.getDataType()).isEqualTo(Buffer.DataType.EVENT_BUFFER); assertThat(buffer.readOnlySlice(1, 2).getDataType()) .isNotEqualTo(Buffer.DataType.EVENT_BUFFER); } @Test void testForwardsGetMemorySegment() { assertThat(buffer.readOnlySlice().getMemorySegment()).isSameAs(buffer.getMemorySegment()); assertThat(buffer.readOnlySlice(1, 2).getMemorySegment()) .isSameAs(buffer.getMemorySegment()); } @Test void testForwardsGetRecycler() { assertThat(buffer.readOnlySlice().getRecycler()).isSameAs(buffer.getRecycler()); assertThat(buffer.readOnlySlice(1, 2).getRecycler()).isSameAs(buffer.getRecycler()); } /** * Tests forwarding of both {@link ReadOnlySlicedNetworkBuffer#recycleBuffer()} and {@link * ReadOnlySlicedNetworkBuffer#isRecycled()}. */ @Test void testForwardsRecycleBuffer1() { ReadOnlySlicedNetworkBuffer slice = buffer.readOnlySlice(); assertThat(slice.isRecycled()).isFalse(); slice.recycleBuffer(); assertThat(slice.isRecycled()).isTrue(); assertThat(buffer.isRecycled()).isTrue(); } /** * Tests forwarding of both {@link ReadOnlySlicedNetworkBuffer#recycleBuffer()} and {@link * ReadOnlySlicedNetworkBuffer#isRecycled()}. */ @Test void testForwardsRecycleBuffer2() { ReadOnlySlicedNetworkBuffer slice = buffer.readOnlySlice(1, 2); assertThat(slice.isRecycled()).isFalse(); slice.recycleBuffer(); assertThat(slice.isRecycled()).isTrue(); assertThat(buffer.isRecycled()).isTrue(); } /** * Tests forwarding of both {@link ReadOnlySlicedNetworkBuffer#recycleBuffer()} and {@link * ReadOnlySlicedNetworkBuffer#isRecycled()}. */ @Test void testForwardsRetainBuffer1() { ReadOnlySlicedNetworkBuffer slice = buffer.readOnlySlice(); assertThat(slice.refCnt()).isEqualTo(buffer.refCnt()); slice.retainBuffer(); assertThat(slice.refCnt()).isEqualTo(buffer.refCnt()); } /** * Tests forwarding of both {@link ReadOnlySlicedNetworkBuffer#retainBuffer()} and {@link * ReadOnlySlicedNetworkBuffer#isRecycled()}. */ @Test void testForwardsRetainBuffer2() { ReadOnlySlicedNetworkBuffer slice = buffer.readOnlySlice(1, 2); assertThat(slice.refCnt()).isEqualTo(buffer.refCnt()); slice.retainBuffer(); assertThat(slice.refCnt()).isEqualTo(buffer.refCnt()); } @Test void testCreateSlice1() { buffer.readByte(); // so that we do not start at position 0 ReadOnlySlicedNetworkBuffer slice1 = buffer.readOnlySlice(); buffer.readByte(); // should not influence the second slice at all ReadOnlySlicedNetworkBuffer slice2 = slice1.readOnlySlice(); assertThat(slice2.unwrap().unwrap()).isSameAs(buffer); assertThat(slice2.getMemorySegment()).isEqualTo(slice1.getMemorySegment()); assertThat(slice2.getMemorySegmentOffset()) .isEqualTo(slice1.getMemorySegmentOffset()) .isOne(); assertReadableBytes(slice1, 1, 2, 3, 4, 5, 6, 7, 8, 9); assertReadableBytes(slice2, 1, 2, 3, 4, 5, 6, 7, 8, 9); } @Test void testCreateSlice2() { buffer.readByte(); // so that we do not start at position 0 ReadOnlySlicedNetworkBuffer slice1 = buffer.readOnlySlice(); buffer.readByte(); // should not influence the second slice at all ReadOnlySlicedNetworkBuffer slice2 = slice1.readOnlySlice(1, 2); assertThat(slice2.unwrap().unwrap()).isSameAs(buffer); assertThat(slice2.getMemorySegment()).isEqualTo(slice1.getMemorySegment()); assertThat(slice1.getMemorySegmentOffset()).isOne(); assertThat(slice2.getMemorySegmentOffset()).isEqualTo(2); assertReadableBytes(slice1, 1, 2, 3, 4, 5, 6, 7, 8, 9); assertReadableBytes(slice2, 2, 3); } @Test void testCreateSlice3() { ReadOnlySlicedNetworkBuffer slice1 = buffer.readOnlySlice(1, 2); buffer.readByte(); // should not influence the second slice at all ReadOnlySlicedNetworkBuffer slice2 = slice1.readOnlySlice(); assertThat(slice2.unwrap().unwrap()).isSameAs(buffer); assertThat(slice2.getMemorySegment()).isSameAs(slice1.getMemorySegment()); assertThat(slice1.getMemorySegmentOffset()).isOne(); assertThat(slice2.getMemorySegmentOffset()).isOne(); assertReadableBytes(slice1, 1, 2); assertReadableBytes(slice2, 1, 2); } @Test void testCreateSlice4() { ReadOnlySlicedNetworkBuffer slice1 = buffer.readOnlySlice(1, 5); buffer.readByte(); // should not influence the second slice at all ReadOnlySlicedNetworkBuffer slice2 = slice1.readOnlySlice(1, 2); assertThat(slice2.unwrap().unwrap()).isSameAs(buffer); assertThat(slice2.getMemorySegment()).isSameAs(slice1.getMemorySegment()); assertThat(slice1.getMemorySegmentOffset()).isOne(); assertThat(slice2.getMemorySegmentOffset()).isEqualTo(2); assertReadableBytes(slice1, 1, 2, 3, 4, 5); assertReadableBytes(slice2, 2, 3); } @Test void testGetMaxCapacity() { assertThat(buffer.readOnlySlice().getMaxCapacity()).isEqualTo(DATA_SIZE); assertThat(buffer.readOnlySlice(1, 2).getMaxCapacity()).isEqualTo(2); } /** * Tests the independence of the reader index via {@link * ReadOnlySlicedNetworkBuffer#setReaderIndex(int)} and {@link * ReadOnlySlicedNetworkBuffer#getReaderIndex()}. */ @Test void testGetSetReaderIndex1() { testGetSetReaderIndex(buffer.readOnlySlice()); } /** * Tests the independence of the reader index via {@link * ReadOnlySlicedNetworkBuffer#setReaderIndex(int)} and {@link * ReadOnlySlicedNetworkBuffer#getReaderIndex()}. */ @Test void testGetSetReaderIndex2() { testGetSetReaderIndex(buffer.readOnlySlice(1, 2)); } private void testGetSetReaderIndex(ReadOnlySlicedNetworkBuffer slice) { assertThat(buffer.getReaderIndex()).isZero(); assertThat(slice.getReaderIndex()).isZero(); slice.setReaderIndex(1); assertThat(buffer.getReaderIndex()).isZero(); assertThat(slice.getReaderIndex()).isOne(); } /** * Tests the independence of the writer index via {@link * ReadOnlySlicedNetworkBuffer#setSize(int)}, {@link ReadOnlySlicedNetworkBuffer#getSize()}. */ @Test void testGetSetSize1() { testGetSetSize(buffer.readOnlySlice(), DATA_SIZE); } /** * Tests the independence of the writer index via {@link * ReadOnlySlicedNetworkBuffer#setSize(int)}, {@link ReadOnlySlicedNetworkBuffer#getSize()}. */ @Test void testGetSetSize2() { testGetSetSize(buffer.readOnlySlice(1, 2), 2); } private void testGetSetSize(ReadOnlySlicedNetworkBuffer slice, int sliceSize) { assertThat(buffer.getSize()).isEqualTo(DATA_SIZE); assertThat(slice.getSize()).isEqualTo(sliceSize); buffer.setSize(DATA_SIZE + 1); assertThat(buffer.getSize()).isEqualTo(DATA_SIZE + 1); assertThat(slice.getSize()).isEqualTo(sliceSize); } @Test void testReadableBytes() { assertThat(buffer.readOnlySlice().readableBytes()).isEqualTo(buffer.readableBytes()); assertThat(buffer.readOnlySlice(1, 2).readableBytes()).isEqualTo(2); } @Test void testGetNioBufferReadable1() { testGetNioBufferReadable(buffer.readOnlySlice(), DATA_SIZE); } @Test void testGetNioBufferReadable2() { testGetNioBufferReadable(buffer.readOnlySlice(1, 2), 2); } private void testGetNioBufferReadable(ReadOnlySlicedNetworkBuffer slice, int sliceSize) { ByteBuffer sliceByteBuffer = slice.getNioBufferReadable(); assertThat(sliceByteBuffer.isReadOnly()).isTrue(); assertThat(sliceByteBuffer.remaining()).isEqualTo(sliceSize); assertThat(sliceByteBuffer.limit()).isEqualTo(sliceSize); assertThat(sliceByteBuffer.capacity()).isEqualTo(sliceSize); // modify sliceByteBuffer position and verify nothing has changed in the original buffer sliceByteBuffer.position(1); assertThat(buffer.getReaderIndex()).isZero(); assertThat(slice.getReaderIndex()).isZero(); assertThat(buffer.getSize()).isEqualTo(DATA_SIZE); assertThat(slice.getSize()).isEqualTo(sliceSize); } @Test void testGetNioBuffer1() { testGetNioBuffer(buffer.readOnlySlice(), DATA_SIZE); } @Test void testGetNioBuffer2() { testGetNioBuffer(buffer.readOnlySlice(1, 2), 2); } private void testGetNioBuffer(ReadOnlySlicedNetworkBuffer slice, int sliceSize) { ByteBuffer sliceByteBuffer = slice.getNioBuffer(1, 1); assertThat(sliceByteBuffer.isReadOnly()).isTrue(); assertThat(sliceByteBuffer.remaining()).isOne(); assertThat(sliceByteBuffer.limit()).isOne(); assertThat(sliceByteBuffer.capacity()).isOne(); // modify sliceByteBuffer position and verify nothing has changed in the original buffer sliceByteBuffer.position(1); assertThat(buffer.getReaderIndex()).isZero(); assertThat(slice.getReaderIndex()).isZero(); assertThat(buffer.getSize()).isEqualTo(DATA_SIZE); assertThat(slice.getSize()).isEqualTo(sliceSize); } @Test void testGetNioBufferReadableThreadSafe1() { NetworkBufferTest.testGetNioBufferReadableThreadSafe(buffer.readOnlySlice()); } @Test void testGetNioBufferReadableThreadSafe2() { NetworkBufferTest.testGetNioBufferReadableThreadSafe(buffer.readOnlySlice(1, 2)); } @Test void testGetNioBufferThreadSafe1() { NetworkBufferTest.testGetNioBufferThreadSafe(buffer.readOnlySlice(), DATA_SIZE); } @Test void testGetNioBufferThreadSafe2() { NetworkBufferTest.testGetNioBufferThreadSafe(buffer.readOnlySlice(1, 2), 2); } @Test void testForwardsSetAllocator() { testForwardsSetAllocator(buffer.readOnlySlice()); testForwardsSetAllocator(buffer.readOnlySlice(1, 2)); } private void testForwardsSetAllocator(ReadOnlySlicedNetworkBuffer slice) { NettyBufferPool allocator = new NettyBufferPool(1); slice.setAllocator(allocator); assertThat(slice.alloc()).isSameAs(buffer.alloc()); assertThat(slice.alloc()).isSameAs(allocator); } private static void assertReadableBytes(Buffer actualBuffer, int... expectedBytes) { ByteBuffer actualBytesBuffer = actualBuffer.getNioBufferReadable(); int[] actual = new int[actualBytesBuffer.limit()]; for (int i = 0; i < actual.length; ++i) { actual[i] = actualBytesBuffer.get(); } assertThat(actual).isEqualTo(expectedBytes); // verify absolutely positioned read method: ByteBuf buffer = (ByteBuf) actualBuffer; for (int i = 0; i < buffer.readableBytes(); ++i) { actual[i] = buffer.getByte(buffer.readerIndex() + i); } assertThat(actual).isEqualTo(expectedBytes); // verify relatively positioned read method: for (int i = 0; i < buffer.readableBytes(); ++i) { actual[i] = buffer.readByte(); } assertThat(actual).isEqualTo(expectedBytes); } }
ReadOnlySlicedBufferTest
java
apache__camel
core/camel-core-reifier/src/main/java/org/apache/camel/reifier/dataformat/CBORDataFormatReifier.java
{ "start": 1027, "end": 2131 }
class ____ extends DataFormatReifier<CBORDataFormat> { public CBORDataFormatReifier(CamelContext camelContext, DataFormatDefinition definition) { super(camelContext, (CBORDataFormat) definition); } @Override protected void prepareDataFormatConfig(Map<String, Object> properties) { // must be a reference value properties.put("objectMapper", asRef(definition.getObjectMapper())); properties.put("unmarshalType", or(definition.getUnmarshalType(), definition.getUnmarshalTypeName())); properties.put("collectionType", or(definition.getCollectionType(), definition.getCollectionTypeName())); properties.put("useList", definition.getUseList()); properties.put("allowUnmarshallType", definition.getAllowUnmarshallType()); properties.put("prettyPrint", definition.getPrettyPrint()); properties.put("allowJmsType", definition.getAllowJmsType()); properties.put("enableFeatures", definition.getEnableFeatures()); properties.put("disableFeatures", definition.getDisableFeatures()); } }
CBORDataFormatReifier
java
assertj__assertj-core
assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/example/test/Throwables_addLineNumberToErrorMessages_Test.java
{ "start": 884, "end": 1736 }
class ____ { @Test void should_add_the_line_where_the_error_was_thrown() { // GIVEN Throwable throwable1 = new Throwable("boom 1"); Throwable throwable2 = new Throwable("boom 2"); List<Throwable> errors = list(throwable1, throwable2); // WHEN List<Throwable> errorsWithLineNumber = addLineNumberToErrorMessages(errors); // THEN then(errorsWithLineNumber.get(0)).hasMessage("boom 1%nat Throwables_addLineNumberToErrorMessages_Test.should_add_the_line_where_the_error_was_thrown(Throwables_addLineNumberToErrorMessages_Test.java:30)".formatted()); then(errorsWithLineNumber.get(1)).hasMessage("boom 2%nat Throwables_addLineNumberToErrorMessages_Test.should_add_the_line_where_the_error_was_thrown(Throwables_addLineNumberToErrorMessages_Test.java:31)".formatted()); } }
Throwables_addLineNumberToErrorMessages_Test
java
apache__hadoop
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionSSEKMSDefaultKey.java
{ "start": 1458, "end": 1777 }
class ____ extends {@link AbstractTestS3AEncryption} * and tests SSE-KMS encryption when no KMS encryption key is provided and AWS * uses the default. Since this resource changes for every account and region, * there is no good way to explicitly set this value to do a equality check * in the response. */ public
that
java
assertj__assertj-core
assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/paths/Paths_assertIsExecutable_Test.java
{ "start": 1253, "end": 2465 }
class ____ extends PathsBaseTest { @Test void should_fail_if_actual_is_null() { // WHEN var error = expectAssertionError(() -> underTest.assertIsExecutable(INFO, null)); // THEN then(error).hasMessage(actualIsNull()); } @Test void should_fail_if_actual_does_not_exist() { // GIVEN Path actual = tempDir.resolve("non-existent"); // WHEN var error = expectAssertionError(() -> underTest.assertIsExecutable(INFO, actual)); // THEN then(error).hasMessage(shouldExist(actual).create()); } @Test @DisabledOnOs(value = WINDOWS, disabledReason = "gh-2312") void should_fail_if_actual_is_not_executable() throws IOException { // GIVEN Path actual = createFile(tempDir.resolve("actual")); // WHEN var error = expectAssertionError(() -> underTest.assertIsExecutable(INFO, actual)); // THEN then(error).hasMessage(shouldBeExecutable(actual).create()); } @Test void should_pass_if_actual_is_executable() throws IOException { // GIVEN Path actual = createFile(tempDir.resolve("actual")); actual.toFile().setExecutable(true); // WHEN/THEN underTest.assertIsExecutable(INFO, actual); } }
Paths_assertIsExecutable_Test
java
apache__avro
lang/java/protobuf/src/test/java/org/apache/avro/protobuf/noopt/Test.java
{ "start": 8375, "end": 12311 }
enum ____ is set. */ boolean hasEnum(); /** * <code>optional .org.apache.avro.protobuf.noopt.A enum = 16 [default = Z];</code> * * @return The enum. */ org.apache.avro.protobuf.noopt.Test.A getEnum(); /** * <pre> * some repeated types * </pre> * * <code>repeated int32 intArray = 17;</code> * * @return A list containing the intArray. */ java.util.List<java.lang.Integer> getIntArrayList(); /** * <pre> * some repeated types * </pre> * * <code>repeated int32 intArray = 17;</code> * * @return The count of intArray. */ int getIntArrayCount(); /** * <pre> * some repeated types * </pre> * * <code>repeated int32 intArray = 17;</code> * * @param index The index of the element to return. * @return The intArray at the given index. */ int getIntArray(int index); /** * <code>repeated .org.apache.avro.protobuf.noopt.Foo fooArray = 20;</code> */ java.util.List<org.apache.avro.protobuf.noopt.Test.Foo> getFooArrayList(); /** * <code>repeated .org.apache.avro.protobuf.noopt.Foo fooArray = 20;</code> */ org.apache.avro.protobuf.noopt.Test.Foo getFooArray(int index); /** * <code>repeated .org.apache.avro.protobuf.noopt.Foo fooArray = 20;</code> */ int getFooArrayCount(); /** * <code>repeated .org.apache.avro.protobuf.noopt.Foo fooArray = 20;</code> */ java.util.List<? extends org.apache.avro.protobuf.noopt.Test.FooOrBuilder> getFooArrayOrBuilderList(); /** * <code>repeated .org.apache.avro.protobuf.noopt.Foo fooArray = 20;</code> */ org.apache.avro.protobuf.noopt.Test.FooOrBuilder getFooArrayOrBuilder(int index); /** * <code>repeated .org.apache.avro.protobuf.noopt.A syms = 19;</code> * * @return A list containing the syms. */ java.util.List<org.apache.avro.protobuf.noopt.Test.A> getSymsList(); /** * <code>repeated .org.apache.avro.protobuf.noopt.A syms = 19;</code> * * @return The count of syms. */ int getSymsCount(); /** * <code>repeated .org.apache.avro.protobuf.noopt.A syms = 19;</code> * * @param index The index of the element to return. * @return The syms at the given index. */ org.apache.avro.protobuf.noopt.Test.A getSyms(int index); /** * <pre> * a recursive type * </pre> * * <code>optional .org.apache.avro.protobuf.noopt.Foo foo = 18;</code> * * @return Whether the foo field is set. */ boolean hasFoo(); /** * <pre> * a recursive type * </pre> * * <code>optional .org.apache.avro.protobuf.noopt.Foo foo = 18;</code> * * @return The foo. */ org.apache.avro.protobuf.noopt.Test.Foo getFoo(); /** * <pre> * a recursive type * </pre> * * <code>optional .org.apache.avro.protobuf.noopt.Foo foo = 18;</code> */ org.apache.avro.protobuf.noopt.Test.FooOrBuilder getFooOrBuilder(); /** * <pre> * a predefined message type * </pre> * * <code>optional .google.protobuf.Timestamp timestamp = 21;</code> * * @return Whether the timestamp field is set. */ boolean hasTimestamp(); /** * <pre> * a predefined message type * </pre> * * <code>optional .google.protobuf.Timestamp timestamp = 21;</code> * * @return The timestamp. */ com.google.protobuf.Timestamp getTimestamp(); /** * <pre> * a predefined message type * </pre> * * <code>optional .google.protobuf.Timestamp timestamp = 21;</code> */ com.google.protobuf.TimestampOrBuilder getTimestampOrBuilder(); } /** * Protobuf type {@code org.apache.avro.protobuf.noopt.Foo} */ public static final
field
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/CheckReturnValueTest.java
{ "start": 27712, "end": 28076 }
class ____ { public static void foo(List<Integer> x) { x.add(42); // BUG: Diagnostic contains: CheckReturnValue x.get(0); } } """) .addSourceLines( "my/java/util/List.java", """ package my.java.util; public
Test
java
micronaut-projects__micronaut-core
core-reactive/src/main/java/io/micronaut/core/async/publisher/CompletableFuturePublisher.java
{ "start": 1771, "end": 4077 }
class ____ implements Subscription { private final Subscriber<? super T> subscriber; private final AtomicBoolean completed = new AtomicBoolean(false); private CompletableFuture<T> future; // to allow cancellation /** * @param subscriber The subscriber */ CompletableFutureSubscription(Subscriber<? super T> subscriber) { this.subscriber = subscriber; } /** * @param n Number of elements to request to the upstream */ @Override public synchronized void request(long n) { if (n != 0 && !completed.get()) { if (n < 0) { IllegalArgumentException ex = new IllegalArgumentException("Cannot request a negative number"); subscriber.onError(ex); } else { try { CompletableFuture<T> future = futureSupplier.get(); if (future == null) { subscriber.onComplete(); } else { this.future = future; future.whenComplete((s, throwable) -> { if (completed.compareAndSet(false, true)) { if (throwable != null) { subscriber.onError(throwable); } else { if (s != null) { subscriber.onNext(s); } subscriber.onComplete(); } } }); } } catch (Throwable e) { subscriber.onError(e); } } } } /** * Request the publisher to stop sending data and clean up resources. */ @Override public synchronized void cancel() { if (completed.compareAndSet(false, true) && future != null) { future.cancel(false); } } } }
CompletableFutureSubscription
java
processing__processing4
app/src/processing/app/Sketch.java
{ "start": 58875, "end": 62738 }
class ____.) * Most getName() calls before 4.0 were to get the main class, * so this method addition allows the sketch name to be decoupled * from the name of the main tab. */ public String getMainName() { return code[0].getPrettyName(); } /** * Returns path to the main .pde file for this sketch. */ public String getMainPath() { return mainFile.getAbsolutePath(); } /** * Returns the sketch folder. */ public File getFolder() { return folder; } /** * Returns the location of the sketch's data folder. (It may not exist yet.) */ public File getDataFolder() { return dataFolder; } public boolean hasDataFolder() { return dataFolder.exists(); } /** * Create the data folder if it does not exist already. As a convenience, * it also returns the data folder, since it's likely about to be used. */ public File prepareDataFolder() { if (!dataFolder.exists()) { dataFolder.mkdirs(); } return dataFolder; } /** * Returns the location of the sketch's code folder. (It may not exist yet.) */ public File getCodeFolder() { return codeFolder; } public boolean hasCodeFolder() { return (codeFolder != null) && codeFolder.exists(); } public SketchCode[] getCode() { return code; } public int getCodeCount() { return codeCount; } // Used by GUI Builder for Processing // https://github.com/processing/processing4/issues/545 // https://github.com/processing/processing4/issues/596 public SketchCode getCode(int index) { return code[index]; } public SketchCode getCurrentCode() { return current; } public int getCurrentCodeIndex() { return currentIndex; } /** * Tried to remove in beta 6, but in use by Python Mode. * When it's removed there, let me know, and I'll remove it here. */ @Deprecated public String getMainProgram() { return getCode(0).getProgram(); } public void setUntitled(boolean untitled) { this.untitled = untitled; editor.updateTitle(); } public boolean isUntitled() { return untitled; } // . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . // /** // * Convert to sanitized name and alert the user // * if changes were made. // */ // static public String checkName(String origName) { // String newName = sanitizeName(origName); // // if (!newName.equals(origName)) { // String msg = // Language.text("check_name.messages.is_name_modified"); // System.out.println(msg); // } // return newName; // } /** * Return true if the name is valid for a Processing sketch. * Extensions of the form .foo are ignored. */ public static boolean isSanitaryName(String name) { final int dot = name.lastIndexOf('.'); if (dot >= 0) { name = name.substring(0, dot); } return sanitizeName(name).equals(name); } static boolean isAsciiLetter(char c) { return (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z'); } /** * Produce a sanitized name that fits our standards for likely to work. * <p/> * Java classes have a wider range of names that are technically allowed * (supposedly any Unicode name) than what we support. The reason for * going more narrow is to avoid situations with text encodings and * converting during the process of moving files between operating * systems, i.e. uploading from a Windows machine to a Linux server, * or reading a FAT32 partition in OS X and using a thumb drive. * <p/> * This helper function replaces everything but A-Z, a-z, and 0-9 with * underscores. Also disallows starting the sketch name with a digit * or underscore. * <p/> * In Processing 2.0, sketches can no longer begin with an underscore, * because these aren't valid
name
java
elastic__elasticsearch
libs/exponential-histogram/src/main/java/org/elasticsearch/exponentialhistogram/ExponentialHistogramCircuitBreaker.java
{ "start": 1140, "end": 1659 }
interface ____ { /** * Adjusts the circuit breaker, potentially throwing an exception if the limit is exceeded. * Guaranteed to never cause an exception when called with a negative number to reduce the breaker count. * * @param bytesAllocated the number of bytes allocated, or a negative value if deallocated */ void adjustBreaker(long bytesAllocated); static ExponentialHistogramCircuitBreaker noop() { return bytesAllocated -> {}; } }
ExponentialHistogramCircuitBreaker
java
FasterXML__jackson-databind
src/test/java/tools/jackson/databind/module/SimpleModuleTest.java
{ "start": 4849, "end": 4917 }
class ____ { public String value; } static
Test3787Bean
java
alibaba__nacos
common/src/main/java/com/alibaba/nacos/common/trace/DeregisterInstanceReason.java
{ "start": 732, "end": 1069 }
enum ____ { /** * client initiates request. */ REQUEST, /** * Instance native disconnected. */ NATIVE_DISCONNECTED, /** * Instance synced disconnected. */ SYNCED_DISCONNECTED, /** * Instance heart beat timeout expire. */ HEARTBEAT_EXPIRE, }
DeregisterInstanceReason
java
spring-projects__spring-security
oauth2/oauth2-client/src/main/java/org/springframework/security/oauth2/client/endpoint/WebClientReactiveClientCredentialsTokenResponseClient.java
{ "start": 1601, "end": 1770 }
class ____ extends AbstractWebClientReactiveOAuth2AccessTokenResponseClient<OAuth2ClientCredentialsGrantRequest> { }
WebClientReactiveClientCredentialsTokenResponseClient
java
apache__hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/java/org/apache/hadoop/yarn/service/provider/TestAbstractProviderService.java
{ "start": 2358, "end": 6622 }
class ____ { private ServiceContext serviceContext; private Service testService; private AbstractLauncher launcher; @RegisterExtension private ServiceTestUtils.ServiceFSWatcher rule = new ServiceTestUtils.ServiceFSWatcher(); @BeforeEach public void setup() throws Exception { testService = TestServiceManager.createBaseDef("testService"); serviceContext = new MockRunningServiceContext(rule, testService); launcher = new AbstractLauncher(serviceContext); rule.getFs().setAppDir(new Path("target/testAbstractProviderService")); } @AfterEach public void teardown() throws Exception { FileUtils.deleteQuietly( new File(rule.getFs().getAppDir().toUri().getPath())); } @Test public void testBuildContainerLaunchCommand() throws Exception { AbstractProviderService providerService = new DockerProviderService(); Component component = serviceContext.scheduler.getAllComponents().entrySet() .iterator().next().getValue(); ContainerLaunchService.ComponentLaunchContext clc = createEntryPointCLCFor(testService, component, "sleep,9000"); ComponentInstance instance = component.getAllComponentInstances().iterator() .next(); Container container = mock(Container.class); providerService.buildContainerLaunchCommand(launcher, testService, instance, rule.getFs(), serviceContext.scheduler.getConfig(), container, clc, null); assertEquals(Lists.newArrayList(clc.getLaunchCommand()), launcher.getCommands(), "commands"); } @Test public void testBuildContainerLaunchCommandWithSpace() throws Exception { AbstractProviderService providerService = new DockerProviderService(); Component component = serviceContext.scheduler.getAllComponents().entrySet() .iterator().next().getValue(); ContainerLaunchService.ComponentLaunchContext clc = createEntryPointCLCFor(testService, component, "ls -l \" space\""); ComponentInstance instance = component.getAllComponentInstances().iterator() .next(); Container container = mock(Container.class); providerService.buildContainerLaunchCommand(launcher, testService, instance, rule.getFs(), serviceContext.scheduler.getConfig(), container, clc, null); assertEquals(Lists.newArrayList("ls,-l, space"), launcher.getCommands(), "commands don't match."); } @Test public void testBuildContainerLaunchContext() throws Exception { AbstractProviderService providerService = new DockerProviderService(); Component component = serviceContext.scheduler.getAllComponents().entrySet() .iterator().next().getValue(); ContainerLaunchService.ComponentLaunchContext clc = createEntryPointCLCFor(testService, component, "sleep,9000"); ComponentInstance instance = component.getAllComponentInstances().iterator() .next(); Container container = mock(Container.class); ContainerId containerId = ContainerId.newContainerId( ApplicationAttemptId.newInstance(ApplicationId.newInstance( System.currentTimeMillis(), 1), 1), 1L); when(container.getId()).thenReturn(containerId); providerService.buildContainerLaunchContext(launcher, testService, instance, rule.getFs(), serviceContext.scheduler.getConfig(), container, clc); assertEquals(clc.getArtifact().getId(), launcher.getDockerImage(), "artifact"); } private static ContainerLaunchService.ComponentLaunchContext createEntryPointCLCFor(Service service, Component component, String launchCmd) { Artifact artifact = new Artifact(); artifact.setType(Artifact.TypeEnum.DOCKER); artifact.setId("example"); Map<String, String> env = new HashMap<>(); env.put("YARN_CONTAINER_RUNTIME_DOCKER_DELAYED_REMOVAL", "true"); env.put("YARN_CONTAINER_RUNTIME_DOCKER_RUN_OVERRIDE_DISABLE", "true"); component.getComponentSpec().getConfiguration().setEnv(env); return new ContainerLaunchService.ComponentLaunchContext( component.getName(), service.getVersion()) .setArtifact(artifact) .setConfiguration(component.getComponentSpec().getConfiguration()) .setLaunchCommand(launchCmd); } }
TestAbstractProviderService
java
apache__kafka
server/src/test/java/org/apache/kafka/server/KRaftClusterTest.java
{ "start": 3116, "end": 12133 }
class ____ { @Test public void testCreateClusterAndClose() throws Exception { try (KafkaClusterTestKit cluster = new KafkaClusterTestKit.Builder( new TestKitNodes.Builder() .setNumBrokerNodes(1) .setNumControllerNodes(1) .build()) .build()) { cluster.format(); cluster.startup(); } } @Test public void testCreateClusterAndRestartBrokerNode() throws Exception { try (KafkaClusterTestKit cluster = new KafkaClusterTestKit.Builder( new TestKitNodes.Builder() .setNumBrokerNodes(1) .setNumControllerNodes(1) .build()) .build()) { cluster.format(); cluster.startup(); var broker = cluster.brokers().values().iterator().next(); broker.shutdown(); broker.startup(); } } @Test public void testClusterWithLowerCaseListeners() throws Exception { try (KafkaClusterTestKit cluster = new KafkaClusterTestKit.Builder( new TestKitNodes.Builder() .setNumBrokerNodes(1) .setBrokerListenerName(new ListenerName("external")) .setNumControllerNodes(3) .build()) .build()) { cluster.format(); cluster.startup(); cluster.brokers().forEach((brokerId, broker) -> { assertEquals(List.of("external://localhost:0"), broker.config().get(SocketServerConfigs.LISTENERS_CONFIG)); assertEquals("external", broker.config().get(ReplicationConfigs.INTER_BROKER_LISTENER_NAME_CONFIG)); assertEquals("external:PLAINTEXT,CONTROLLER:PLAINTEXT", broker.config().get(SocketServerConfigs.LISTENER_SECURITY_PROTOCOL_MAP_CONFIG)); }); TestUtils.waitForCondition(() -> cluster.brokers().get(0).brokerState() == BrokerState.RUNNING, "Broker never made it to RUNNING state."); TestUtils.waitForCondition(() -> cluster.raftManagers().get(0).client().leaderAndEpoch().leaderId().isPresent(), "RaftManager was not initialized."); try (Admin admin = Admin.create(cluster.clientProperties())) { assertEquals(cluster.nodes().clusterId(), admin.describeCluster().clusterId().get()); } } } @Test public void testCreateClusterAndWaitForBrokerInRunningState() throws Exception { try (KafkaClusterTestKit cluster = new KafkaClusterTestKit.Builder( new TestKitNodes.Builder() .setNumBrokerNodes(1) .setNumControllerNodes(1) .build()) .build()) { cluster.format(); cluster.startup(); TestUtils.waitForCondition(() -> cluster.brokers().get(0).brokerState() == BrokerState.RUNNING, "Broker never made it to RUNNING state."); TestUtils.waitForCondition(() -> cluster.raftManagers().get(0).client().leaderAndEpoch().leaderId().isPresent(), "RaftManager was not initialized."); try (Admin admin = Admin.create(cluster.clientProperties())) { assertEquals(cluster.nodes().clusterId(), admin.describeCluster().clusterId().get()); } } } @Test public void testRemoteLogManagerInstantiation() throws Exception { try (KafkaClusterTestKit cluster = new KafkaClusterTestKit.Builder( new TestKitNodes.Builder() .setNumBrokerNodes(1) .setNumControllerNodes(1) .build()) .setConfigProp(RemoteLogManagerConfig.REMOTE_LOG_STORAGE_SYSTEM_ENABLE_PROP, true) .setConfigProp(RemoteLogManagerConfig.REMOTE_LOG_METADATA_MANAGER_CLASS_NAME_PROP, "org.apache.kafka.server.log.remote.storage.NoOpRemoteLogMetadataManager") .setConfigProp(RemoteLogManagerConfig.REMOTE_STORAGE_MANAGER_CLASS_NAME_PROP, "org.apache.kafka.server.log.remote.storage.NoOpRemoteStorageManager") .build()) { cluster.format(); cluster.startup(); cluster.brokers().forEach((brokerId, broker) -> { assertFalse(broker.remoteLogManagerOpt().isEmpty(), "RemoteLogManager should be initialized"); }); } } @Test public void testAuthorizerFailureFoundInControllerStartup() throws Exception { try (KafkaClusterTestKit cluster = new KafkaClusterTestKit.Builder( new TestKitNodes.Builder(). setNumControllerNodes(3).build()) .setConfigProp("authorizer.class.name", BadAuthorizer.class.getName()) .build()) { cluster.format(); ExecutionException exception = assertThrows(ExecutionException.class, cluster::startup); assertEquals("java.lang.IllegalStateException: test authorizer exception", exception.getMessage()); cluster.fatalFaultHandler().setIgnore(true); } } @ParameterizedTest @ValueSource(booleans = {false, true}) public void testReconfigureControllerClientQuotas(boolean combinedController) throws Exception { try (KafkaClusterTestKit cluster = new KafkaClusterTestKit.Builder( new TestKitNodes.Builder() .setNumBrokerNodes(1) .setCombined(combinedController) .setNumControllerNodes(1) .build()) .setConfigProp("client.quota.callback.class", DummyClientQuotaCallback.class.getName()) .setConfigProp(DummyClientQuotaCallback.DUMMY_CLIENT_QUOTA_CALLBACK_VALUE_CONFIG_KEY, "0") .build()) { cluster.format(); cluster.startup(); cluster.waitForReadyBrokers(); assertConfigValue(cluster, 0); try (Admin admin = Admin.create(cluster.clientProperties())) { admin.incrementalAlterConfigs( Map.of(new ConfigResource(Type.BROKER, ""), List.of(new AlterConfigOp( new ConfigEntry(DummyClientQuotaCallback.DUMMY_CLIENT_QUOTA_CALLBACK_VALUE_CONFIG_KEY, "1"), OpType.SET)))) .all().get(); } assertConfigValue(cluster, 1); } } private void assertConfigValue(KafkaClusterTestKit cluster, int expected) throws InterruptedException { TestUtils.retryOnExceptionWithTimeout(60000, () -> { Object controllerCallback = cluster.controllers().values().iterator().next() .quotaManagers().clientQuotaCallbackPlugin().get().get(); assertEquals(expected, ((DummyClientQuotaCallback) controllerCallback).value); Object brokerCallback = cluster.brokers().values().iterator().next() .quotaManagers().clientQuotaCallbackPlugin().get().get(); assertEquals(expected, ((DummyClientQuotaCallback) brokerCallback).value); }); } @ParameterizedTest @ValueSource(booleans = {false, true}) public void testReconfigureControllerAuthorizer(boolean combinedMode) throws Exception { try (KafkaClusterTestKit cluster = new KafkaClusterTestKit.Builder( new TestKitNodes.Builder() .setNumBrokerNodes(1) .setCombined(combinedMode) .setNumControllerNodes(1) .build()) .setConfigProp("authorizer.class.name", FakeConfigurableAuthorizer.class.getName()) .build()) { cluster.format(); cluster.startup(); cluster.waitForReadyBrokers(); assertFoobarValue(cluster, 0); try (Admin admin = Admin.create(cluster.clientProperties())) { admin.incrementalAlterConfigs( Map.of(new ConfigResource(Type.BROKER, ""), List.of(new AlterConfigOp( new ConfigEntry(FakeConfigurableAuthorizer.FOOBAR_CONFIG_KEY, "123"), OpType.SET)))) .all().get(); } assertFoobarValue(cluster, 123); } } private void assertFoobarValue(KafkaClusterTestKit cluster, int expected) throws InterruptedException { TestUtils.retryOnExceptionWithTimeout(60000, () -> { Object controllerAuthorizer = cluster.controllers().values().iterator().next() .authorizerPlugin().get().get(); assertEquals(expected, ((FakeConfigurableAuthorizer) controllerAuthorizer).foobar.get()); Object brokerAuthorizer = cluster.brokers().values().iterator().next() .authorizerPlugin().get().get(); assertEquals(expected, ((FakeConfigurableAuthorizer) brokerAuthorizer).foobar.get()); }); } public static
KRaftClusterTest
java
apache__camel
core/camel-base-engine/src/main/java/org/apache/camel/impl/engine/DurationRoutePolicyFactory.java
{ "start": 1813, "end": 4116 }
class ____ implements RoutePolicyFactory { @Metadata(description = "Route pattern to select a set of routes (by their route id). By default all routes are selected") private String fromRouteId; @Metadata(description = "Maximum seconds Camel is running before the action is triggered") private int maxSeconds; @Metadata(description = "Maximum number of messages to process before the action is triggered") private int maxMessages; @Metadata(description = "Action to perform", enums = "STOP_CAMEL_CONTEXT,STOP_ROUTE,SUSPEND_ROUTE,SUSPEND_ALL_ROUTES", defaultValue = "STOP_ROUTE") private DurationRoutePolicy.Action action = DurationRoutePolicy.Action.STOP_ROUTE; @Override public RoutePolicy createRoutePolicy(CamelContext camelContext, String routeId, NamedNode route) { DurationRoutePolicy policy = null; if (fromRouteId == null || PatternHelper.matchPattern(routeId, fromRouteId)) { policy = new DurationRoutePolicy(camelContext, routeId); policy.setMaxMessages(maxMessages); policy.setMaxSeconds(maxSeconds); policy.setAction(action); } return policy; } public String getFromRouteId() { return fromRouteId; } /** * Limit the route policy to the route which matches this pattern * * @see PatternHelper#matchPattern(String, String) */ public void setFromRouteId(String fromRouteId) { this.fromRouteId = fromRouteId; } public int getMaxMessages() { return maxMessages; } /** * Maximum number of messages to process before the action is triggered */ public void setMaxMessages(int maxMessages) { this.maxMessages = maxMessages; } public int getMaxSeconds() { return maxSeconds; } /** * Maximum seconds Camel is running before the action is triggered */ public void setMaxSeconds(int maxSeconds) { this.maxSeconds = maxSeconds; } public DurationRoutePolicy.Action getAction() { return action; } /** * What action to perform when maximum is triggered. */ public void setAction(DurationRoutePolicy.Action action) { this.action = action; } }
DurationRoutePolicyFactory
java
spring-projects__spring-framework
spring-orm/src/test/java/org/springframework/orm/jpa/domain/EmployeeCategoryConverter.java
{ "start": 726, "end": 1247 }
class ____ implements AttributeConverter<EmployeeCategory, String> { @Override public String convertToDatabaseColumn(EmployeeCategory employeeCategory) { if (employeeCategory != null) { return employeeCategory.getName(); } return null; } @Override public EmployeeCategory convertToEntityAttribute(String data) { if (data != null) { EmployeeCategory employeeCategory = new EmployeeCategory(); employeeCategory.setName(data); return employeeCategory; } return null; } }
EmployeeCategoryConverter
java
assertj__assertj-core
assertj-core/src/test/java/org/assertj/core/api/double2darray/Double2DArrayAssert_isEmpty_Test.java
{ "start": 949, "end": 1391 }
class ____ extends Double2DArrayAssertBaseTest { @Override protected Double2DArrayAssert invoke_api_method() { assertions.isEmpty(); return null; } @Override protected void verify_internal_effects() { verify(arrays).assertEmpty(getInfo(assertions), getActual(assertions)); } @Override @Test public void should_return_this() { // Disable this test because isEmpty is void } }
Double2DArrayAssert_isEmpty_Test
java
alibaba__druid
core/src/test/java/com/alibaba/druid/bvt/filter/wall/mysql/MySqlWallTest124.java
{ "start": 804, "end": 1199 }
class ____ extends TestCase { public void test_false() throws Exception { WallProvider provider = new MySqlWallProvider(); provider.getConfig().setCommentAllow(false); String sql = "SELECT name, '******' password, createTime from user where name like 'admin' AND 5859=5666 AND 'Cuqo' LIKE 'Cuqo'"; assertFalse(provider.checkValid(sql)); } }
MySqlWallTest124
java
apache__camel
components/camel-wordpress/src/main/java/org/apache/camel/component/wordpress/api/service/WordpressServiceCategories.java
{ "start": 1012, "end": 1117 }
interface ____ extends WordpressCrudService<Category, CategorySearchCriteria> { }
WordpressServiceCategories
java
ReactiveX__RxJava
src/main/java/io/reactivex/rxjava3/internal/subscribers/BasicFuseableConditionalSubscriber.java
{ "start": 1147, "end": 5274 }
class ____<T, R> implements ConditionalSubscriber<T>, QueueSubscription<R> { /** The downstream subscriber. */ protected final ConditionalSubscriber<? super R> downstream; /** The upstream subscription. */ protected Subscription upstream; /** The upstream's QueueSubscription if not null. */ protected QueueSubscription<T> qs; /** Flag indicating no further onXXX event should be accepted. */ protected boolean done; /** Holds the established fusion mode of the upstream. */ protected int sourceMode; /** * Construct a BasicFuseableSubscriber by wrapping the given subscriber. * @param downstream the subscriber, not null (not verified) */ public BasicFuseableConditionalSubscriber(ConditionalSubscriber<? super R> downstream) { this.downstream = downstream; } // final: fixed protocol steps to support fuseable and non-fuseable upstream @SuppressWarnings("unchecked") @Override public final void onSubscribe(Subscription s) { if (SubscriptionHelper.validate(this.upstream, s)) { this.upstream = s; if (s instanceof QueueSubscription) { this.qs = (QueueSubscription<T>)s; } if (beforeDownstream()) { downstream.onSubscribe(this); afterDownstream(); } } } /** * Override this to perform actions before the call {@code actual.onSubscribe(this)} happens. * @return true if onSubscribe should continue with the call */ protected boolean beforeDownstream() { return true; } /** * Override this to perform actions after the call to {@code actual.onSubscribe(this)} happened. */ protected void afterDownstream() { // default no-op } // ----------------------------------- // Convenience and state-aware methods // ----------------------------------- @Override public void onError(Throwable t) { if (done) { RxJavaPlugins.onError(t); return; } done = true; downstream.onError(t); } /** * Rethrows the throwable if it is a fatal exception or calls {@link #onError(Throwable)}. * @param t the throwable to rethrow or signal to the actual subscriber */ protected final void fail(Throwable t) { Exceptions.throwIfFatal(t); upstream.cancel(); onError(t); } @Override public void onComplete() { if (done) { return; } done = true; downstream.onComplete(); } /** * Calls the upstream's QueueSubscription.requestFusion with the mode and * saves the established mode in {@link #sourceMode} if that mode doesn't * have the {@link QueueSubscription#BOUNDARY} flag set. * <p> * If the upstream doesn't support fusion ({@link #qs} is null), the method * returns {@link QueueSubscription#NONE}. * @param mode the fusion mode requested * @return the established fusion mode */ protected final int transitiveBoundaryFusion(int mode) { QueueSubscription<T> qs = this.qs; if (qs != null) { if ((mode & BOUNDARY) == 0) { int m = qs.requestFusion(mode); if (m != NONE) { sourceMode = m; } return m; } } return NONE; } // -------------------------------------------------------------- // Default implementation of the RS and QS protocol (can be overridden) // -------------------------------------------------------------- @Override public void request(long n) { upstream.request(n); } @Override public void cancel() { upstream.cancel(); } @Override public boolean isEmpty() { return qs.isEmpty(); } @Override public void clear() { qs.clear(); } // ----------------------------------------------------------- // The rest of the Queue
BasicFuseableConditionalSubscriber
java
apache__hadoop
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/testjar/JobKillCommitter.java
{ "start": 2972, "end": 3213 }
class ____ extends Mapper<LongWritable, Text, Text, Text> { public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException { throw new IOException(); } } /** * The
MapperFail
java
spring-projects__spring-framework
spring-expression/src/test/java/org/springframework/expression/spel/standard/SpelCompilerTests.java
{ "start": 1469, "end": 3842 }
class ____ { @Test // gh-24357 void expressionCompilesWhenMethodComesFromPublicInterface() { SpelParserConfiguration config = new SpelParserConfiguration(SpelCompilerMode.IMMEDIATE, null); SpelExpressionParser parser = new SpelExpressionParser(config); OrderedComponent component = new OrderedComponent(); Expression expression = parser.parseExpression("order"); // Evaluate the expression multiple times to ensure that it gets compiled. IntStream.rangeClosed(1, 5).forEach(i -> assertThat(expression.getValue(component)).isEqualTo(42)); } @Test // gh-25706 void defaultMethodInvocation() { SpelParserConfiguration config = new SpelParserConfiguration(SpelCompilerMode.IMMEDIATE, null); SpelExpressionParser parser = new SpelExpressionParser(config); StandardEvaluationContext context = new StandardEvaluationContext(); Item item = new Item(); context.setRootObject(item); Expression expression = parser.parseExpression("#root.isEditable2()"); assertThat(SpelCompiler.compile(expression)).isFalse(); assertThat(expression.getValue(context)).isEqualTo(false); assertThat(SpelCompiler.compile(expression)).isTrue(); assertIsCompiled(expression); assertThat(expression.getValue(context)).isEqualTo(false); context.setVariable("user", new User()); expression = parser.parseExpression("#root.isEditable(#user)"); assertThat(SpelCompiler.compile(expression)).isFalse(); assertThat(expression.getValue(context)).asInstanceOf(BOOLEAN).isTrue(); assertThat(SpelCompiler.compile(expression)).isTrue(); assertIsCompiled(expression); assertThat(expression.getValue(context)).asInstanceOf(BOOLEAN).isTrue(); } @Test // gh-28043 void changingRegisteredVariableTypeDoesNotResultInFailureInMixedMode() { SpelParserConfiguration config = new SpelParserConfiguration(SpelCompilerMode.MIXED, null); SpelExpressionParser parser = new SpelExpressionParser(config); Expression sharedExpression = parser.parseExpression("#bean.value"); StandardEvaluationContext context = new StandardEvaluationContext(); Object[] beans = new Object[] {new Bean1(), new Bean2(), new Bean3(), new Bean4()}; IntStream.rangeClosed(1, 1_000_000).parallel().forEach(count -> { context.setVariable("bean", beans[count % 4]); assertThat(sharedExpression.getValue(context)).asString().startsWith("1"); }); } static
SpelCompilerTests
java
elastic__elasticsearch
x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/plain/AbstractAtomicCartesianShapeFieldData.java
{ "start": 1146, "end": 1478 }
class ____ extends LeafShapeFieldData.ShapeScriptValues< CartesianPoint, CartesianShapeValues.CartesianShapeValue> { public CartesianShapeScriptValues(GeometrySupplier<CartesianPoint, CartesianShapeValues.CartesianShapeValue> supplier) { super(supplier); } } }
CartesianShapeScriptValues
java
alibaba__druid
core/src/test/java/com/alibaba/druid/bvt/sql/mysql/select/MySqlSelectTest_309.java
{ "start": 826, "end": 1322 }
class ____ extends MysqlTest { public void test_0() throws Exception { String sql = "SELECT *\n" + "FROM `yeahmobi`.`new_eagle_log_parquet`\n" + "AND part = '2020-07-07'\n" + "AND hour = '09'\n" + "LIMIT 20"; try { SQLStatement stmt = SQLUtils .parseSingleStatement(sql, DbType.mysql); fail(); } catch (Exception e) { } } }
MySqlSelectTest_309
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/annotations/collectionelement/EmbeddableCollectionElementWithLazyManyToOneTest.java
{ "start": 4286, "end": 4375 }
class ____ { @Id @GeneratedValue private int id; } @Embeddable public static
Child
java
quarkusio__quarkus
extensions/reactive-streams-operators/smallrye-reactive-type-converters/deployment/src/main/java/io/quarkus/smallrye/typeconverters/deployment/SmallRyeReactiveTypeConvertersProcessor.java
{ "start": 634, "end": 903 }
class ____ register them as SPI. So the result depends * on the implementation added by the user in the build classpath (Maven dependencies). * * Note that if none are found, nothing is added - so declaring this augmentation is quite useless in this case. */ public
and
java
elastic__elasticsearch
modules/lang-painless/src/main/java/org/elasticsearch/painless/symbol/IRDecorations.java
{ "start": 7913, "end": 8151 }
class ____ extends IRDecoration<String> { public IRDIterableName(String value) { super(value); } } /** describes a method for a node; which method depends on node type */ public static
IRDIterableName
java
alibaba__nacos
core/src/main/java/com/alibaba/nacos/core/namespace/model/form/NamespaceForm.java
{ "start": 973, "end": 2555 }
class ____ implements NacosForm { private static final long serialVersionUID = -1078976569495343487L; private String namespaceId; private String namespaceName; private String namespaceDesc; public NamespaceForm() { } public NamespaceForm(String namespaceId, String namespaceName, String namespaceDesc) { this.namespaceId = namespaceId; this.namespaceName = namespaceName; this.namespaceDesc = namespaceDesc; } public String getNamespaceId() { return namespaceId; } public void setNamespaceId(String namespaceId) { this.namespaceId = namespaceId; } public String getNamespaceName() { return namespaceName; } public void setNamespaceName(String namespaceName) { this.namespaceName = namespaceName; } public String getNamespaceDesc() { return namespaceDesc; } public void setNamespaceDesc(String namespaceDesc) { this.namespaceDesc = namespaceDesc; } @Override public void validate() throws NacosApiException { if (null == namespaceId) { throw new NacosApiException(HttpStatus.BAD_REQUEST.value(), ErrorCode.PARAMETER_MISSING, "required parameter 'namespaceId' is missing"); } if (null == namespaceName) { throw new NacosApiException(HttpStatus.BAD_REQUEST.value(), ErrorCode.PARAMETER_MISSING, "required parameter 'namespaceName' is missing"); } } }
NamespaceForm
java
netty__netty
codec-http2/src/main/java/io/netty/handler/codec/http2/Http2ConnectionHandler.java
{ "start": 17633, "end": 19003 }
class ____ extends BaseDecoder { @Override public void decode(ChannelHandlerContext ctx, ByteBuf in, List<Object> out) throws Exception { try { decoder.decodeFrame(ctx, in, out); } catch (Throwable e) { onError(ctx, false, e); } } } @Override public void handlerAdded(ChannelHandlerContext ctx) throws Exception { // Initialize the encoder, decoder, flow controllers, and internal state. encoder.lifecycleManager(this); decoder.lifecycleManager(this); encoder.flowController().channelHandlerContext(ctx); decoder.flowController().channelHandlerContext(ctx); byteDecoder = new PrefaceDecoder(ctx); } @Override protected void handlerRemoved0(ChannelHandlerContext ctx) throws Exception { if (byteDecoder != null) { byteDecoder.handlerRemoved(ctx); byteDecoder = null; } } @Override public void channelActive(ChannelHandlerContext ctx) throws Exception { if (byteDecoder == null) { byteDecoder = new PrefaceDecoder(ctx); } byteDecoder.channelActive(ctx); super.channelActive(ctx); } @Override public void channelInactive(ChannelHandlerContext ctx) throws Exception { // Call super
FrameDecoder
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/index/mapper/BlockSourceReader.java
{ "start": 8814, "end": 9446 }
class ____ extends SourceBlockLoader { public DoublesBlockLoader(ValueFetcher fetcher, LeafIteratorLookup lookup) { super(fetcher, lookup); } @Override public Builder builder(BlockFactory factory, int expectedCount) { return factory.doubles(expectedCount); } @Override public RowStrideReader rowStrideReader(LeafReaderContext context, DocIdSetIterator iter) { return new Doubles(fetcher, iter); } @Override protected String name() { return "Doubles"; } } private static
DoublesBlockLoader
java
mapstruct__mapstruct
processor/src/test/java/org/mapstruct/ap/test/bugs/_1482/SourceTargetMapper.java
{ "start": 313, "end": 709 }
class ____ { public static final SourceTargetMapper INSTANCE = Mappers.getMapper( SourceTargetMapper.class ); @Mapping( target = "bigDecimal", source = "wrapper" ) abstract Target map(Source source); protected String map(Enum<SourceEnum> e) { return e.toString(); } protected <T> T map(ValueWrapper<T> in) { return in.getValue(); } }
SourceTargetMapper
java
spring-projects__spring-boot
module/spring-boot-activemq/src/main/java/org/springframework/boot/activemq/autoconfigure/ActiveMQAutoConfiguration.java
{ "start": 2289, "end": 2604 }
class ____ { @Bean @ConditionalOnMissingBean ActiveMQConnectionDetails activemqConnectionDetails(ActiveMQProperties properties) { return new PropertiesActiveMQConnectionDetails(properties); } /** * Adapts {@link ActiveMQProperties} to {@link ActiveMQConnectionDetails}. */ static
ActiveMQAutoConfiguration
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/ExpensiveLenientFormatStringTest.java
{ "start": 1485, "end": 1924 }
class ____ { void f() { checkNotNull(this, "%s", "hello"); } void g() { checkNotNull(this, "hello"); } void h() { checkNotNull(this, String.format("%d", 42)); } void i() { checkNotNull(this, "%s", "hello"); } } """) .addOutputLines( "Test.java", """ package com.google.devtools.javatools.refactory.refaster.cleanups; import static com.google.common.base.Preconditions.checkNotNull;
Test
java
assertj__assertj-core
assertj-core/src/test/java/org/assertj/core/error/ShouldBeInSameMinuteWindow_create_Test.java
{ "start": 1278, "end": 2150 }
class ____ { @Test void should_create_error_message() { // GIVEN ErrorMessageFactory factory = shouldBeInSameMinuteWindow(parseDatetime("2011-01-01T05:00:00"), parseDatetime("2011-01-01T05:02:01")); // WHEN String message = factory.create(new TextDescription("Test"), new StandardRepresentation()); // THEN then(message).isEqualTo(format("[Test] %n" + "Expecting actual:%n" + " 2011-01-01T05:00:00.000 (java.util.Date)%n" + "to be close to:%n" + " 2011-01-01T05:02:01.000 (java.util.Date)%n" + "by less than one minute (strictly) but difference was: 2m and 1s")); } }
ShouldBeInSameMinuteWindow_create_Test
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/engine/transaction/jta/platform/internal/ResinJtaPlatform.java
{ "start": 419, "end": 876 }
class ____ extends AbstractJtaPlatform { public static final String TM_NAME = "java:comp/TransactionManager"; public static final String UT_NAME = "java:comp/UserTransaction"; @Override protected TransactionManager locateTransactionManager() { return (TransactionManager) jndiService().locate( TM_NAME ); } @Override protected UserTransaction locateUserTransaction() { return (UserTransaction) jndiService().locate( UT_NAME ); } }
ResinJtaPlatform
java
hibernate__hibernate-orm
hibernate-testing/src/main/java/org/hibernate/testing/jdbc/JdbcSpies.java
{ "start": 7059, "end": 8888 }
class ____ implements InvocationHandler, Spy { protected final Statement statement; protected final SpyContext context; protected final Connection connectionProxy; public StatementHandler(Statement statement, SpyContext context, Connection connectionProxy) { this.statement = statement; this.context = context; this.connectionProxy = connectionProxy; } @Override public Object getSpiedInstance() { return statement; } @Override public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { switch ( method.getName() ) { case "getConnection": return context.onCall( proxy, method, args, connectionProxy ); case "toString": return context.onCall( proxy, method, args, "Statement proxy [@" + hashCode() + "]" ); case "hashCode": return context.onCall( proxy, method, args, hashCode() ); case "equals": return context.onCall( proxy, method, args, proxy == args[0] ); case "executeQuery": return context.onCall( proxy, method, args, getResultSetProxy( statement.executeQuery( (String) args[0] ), (Statement) proxy ) ); case "getResultSet": return context.onCall( proxy, method, args, getResultSetProxy( statement.getResultSet(), (Statement) proxy ) ); case "getGeneratedKeys": return context.onCall( proxy, method, args, getResultSetProxy( statement.getGeneratedKeys(), (Statement) proxy ) ); default: return context.call( proxy, statement, method, args ); } } protected ResultSet getResultSetProxy(ResultSet resultSet, Statement statementProxy) throws Throwable { return (ResultSet) Proxy.newProxyInstance( ClassLoader.getSystemClassLoader(), new Class[] {ResultSet.class}, new ResultSetHandler( resultSet, context, statementProxy ) ); } } private static
StatementHandler
java
quarkusio__quarkus
integration-tests/spring-data-jpa/src/main/java/io/quarkus/it/spring/data/jpa/generics/Father.java
{ "start": 277, "end": 907 }
class ____ extends FatherBase<Child> { @Id @GeneratedValue(strategy = GenerationType.IDENTITY) private Long id; @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || Hibernate.getClass(this) != Hibernate.getClass(o)) return false; Father that = (Father) o; return id != null && Objects.equals(id, that.id); } @Override public int hashCode() { return getClass().hashCode(); } public Long getId() { return id; } public void setId(Long id) { this.id = id; } }
Father
java
alibaba__nacos
api/src/main/java/com/alibaba/nacos/api/config/remote/request/ConfigBatchListenRequest.java
{ "start": 2719, "end": 5045 }
class ____ { String group; String md5; String dataId; String tenant; public ConfigListenContext() { } @Override public String toString() { return "ConfigListenContext{" + "group='" + group + '\'' + ", md5='" + md5 + '\'' + ", dataId='" + dataId + '\'' + ", tenant='" + tenant + '\'' + '}'; } /** * Getter method for property <tt>group</tt>. * * @return property value of group */ public String getGroup() { return group; } /** * Setter method for property <tt>groupId</tt>. * * @param group value to be assigned to property groupId */ public void setGroup(String group) { this.group = group; } /** * Getter method for property <tt>md5</tt>. * * @return property value of md5 */ public String getMd5() { return md5; } /** * Setter method for property <tt>md5</tt>. * * @param md5 value to be assigned to property md5 */ public void setMd5(String md5) { this.md5 = md5; } /** * Getter method for property <tt>dataId</tt>. * * @return property value of dataId */ public String getDataId() { return dataId; } /** * Setter method for property <tt>dataId</tt>. * * @param dataId value to be assigned to property dataId */ public void setDataId(String dataId) { this.dataId = dataId; } /** * Getter method for property <tt>tenant</tt>. * * @return property value of tenant */ public String getTenant() { return tenant; } /** * Setter method for property <tt>tenant</tt>. * * @param tenant value to be assigned to property tenant */ public void setTenant(String tenant) { this.tenant = tenant; } } }
ConfigListenContext
java
apache__camel
core/camel-core/src/test/java/org/apache/camel/component/validator/ValidatorResourceResolverFactoryTest.java
{ "start": 6719, "end": 7440 }
class ____ extends DefaultLSResourceResolver { private final Set<String> resolvedRsourceUris = new HashSet<>(); CustomResourceResolver(CamelContext camelContext, String resourceUri) { super(camelContext, resourceUri); } public Set<String> getResolvedResourceUris() { return resolvedRsourceUris; } @Override public LSInput resolveResource(String type, String namespaceURI, String publicId, String systemId, String baseURI) { LSInput result = super.resolveResource(type, namespaceURI, publicId, systemId, baseURI); resolvedRsourceUris.add(systemId); return result; } } }
CustomResourceResolver
java
apache__camel
components/camel-ftp/src/test/java/org/apache/camel/component/file/remote/integration/FromFileToFtpDeleteIT.java
{ "start": 1414, "end": 2621 }
class ____ extends FtpServerTestSupport { @TempDir Path testDirectory; protected String getFtpUrl() { return "ftp://admin@localhost:{{ftp.server.port}}?password=admin"; } @Test public void testFromFileToFtpDelete() throws Exception { NotifyBuilder notify = new NotifyBuilder(context).whenDone(1).create(); MockEndpoint mock = getMockEndpoint("mock:result"); mock.expectedMessageCount(1); template.sendBodyAndHeader(TestSupport.fileUri(testDirectory, "delete"), "Hello World", Exchange.FILE_NAME, "hello.txt"); MockEndpoint.assertIsSatisfied(context); assertTrue(notify.matchesWaitTime()); // file should be deleted assertFileNotExists(testDirectory.resolve("delete/hello.txt")); // file should exists on ftp server assertFileExists(service.ftpFile("hello.txt")); } @Override protected RouteBuilder createRouteBuilder() { return new RouteBuilder() { public void configure() { from(TestSupport.fileUri(testDirectory, "delete?delete=true")).to(getFtpUrl()).to("mock:result"); } }; } }
FromFileToFtpDeleteIT
java
quarkusio__quarkus
extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/resource/basic/resource/GenericResourceStudentReader.java
{ "start": 554, "end": 1374 }
class ____ implements MessageBodyReader<GenericResourceStudent> { public boolean isReadable(Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType) { return true; } public GenericResourceStudent readFrom(Class<GenericResourceStudent> type, Type genericType, Annotation[] annotations, MediaType mediaType, MultivaluedMap<String, String> httpHeaders, InputStream entityStream) throws IOException, WebApplicationException { BufferedReader br = null; try { br = new BufferedReader(new InputStreamReader(entityStream)); return new GenericResourceStudent(br.readLine()); } catch (Exception e) { throw new RuntimeException("Unable to parse student.", e); } } }
GenericResourceStudentReader
java
apache__hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/NodesListManager.java
{ "start": 22024, "end": 30733 }
class ____ extends TimerTask { @Override public void run() { long currentTime = clock.getTime(); Iterator<Map.Entry<String, CacheEntry>> iterator = cache.entrySet().iterator(); while (iterator.hasNext()) { Map.Entry<String, CacheEntry> entry = iterator.next(); if (currentTime > entry.getValue().resolveTime + CachedResolver.this.expiryIntervalMs) { iterator.remove(); if (LOG.isDebugEnabled()) { LOG.debug("[" + entry.getKey() + ":" + entry.getValue().ip + "] Expired after " + CachedResolver.this.expiryIntervalMs / 1000 + " secs"); } } } } } } public boolean isValidNode(String hostName) { HostDetails hostDetails = hostsReader.getHostDetails(); return isValidNode(hostName, hostDetails.getIncludedHosts(), hostDetails.getExcludedHosts()); } boolean isGracefullyDecommissionableNode(RMNode node) { return gracefulDecommissionableNodes.contains(node); } private boolean isValidNode( String hostName, Set<String> hostsList, Set<String> excludeList) { String ip = resolver.resolve(hostName); return (hostsList.isEmpty() || hostsList.contains(hostName) || hostsList .contains(ip)) && !(excludeList.contains(hostName) || excludeList.contains(ip)); } private void sendRMAppNodeUpdateEventToNonFinalizedApps( RMNode eventNode, RMAppNodeUpdateType appNodeUpdateType) { for(RMApp app : rmContext.getRMApps().values()) { if (!app.isAppFinalStateStored()) { app.handle(new RMAppNodeUpdateEvent(app.getApplicationId(), eventNode, appNodeUpdateType)); } } } @Override public void handle(NodesListManagerEvent event) { RMNode eventNode = event.getNode(); switch (event.getType()) { case NODE_UNUSABLE: LOG.debug("{} reported unusable", eventNode); sendRMAppNodeUpdateEventToNonFinalizedApps(eventNode, RMAppNodeUpdateType.NODE_UNUSABLE); break; case NODE_USABLE: LOG.debug("{} reported usable", eventNode); sendRMAppNodeUpdateEventToNonFinalizedApps(eventNode, RMAppNodeUpdateType.NODE_USABLE); break; case NODE_DECOMMISSIONING: LOG.debug("{} reported decommissioning", eventNode); sendRMAppNodeUpdateEventToNonFinalizedApps( eventNode, RMAppNodeUpdateType.NODE_DECOMMISSIONING); break; default: LOG.error("Ignoring invalid eventtype " + event.getType()); } // remove the cache of normalized hostname if enabled if (resolver instanceof CachedResolver) { ((CachedResolver)resolver).removeFromCache( eventNode.getNodeID().getHost()); } } private void disableHostsFileReader(Exception ex) { LOG.warn("Failed to init hostsReader, disabling", ex); try { this.includesFile = conf.get(YarnConfiguration.DEFAULT_RM_NODES_INCLUDE_FILE_PATH); this.excludesFile = conf.get(YarnConfiguration.DEFAULT_RM_NODES_EXCLUDE_FILE_PATH); this.hostsReader = createHostsFileReader(this.includesFile, this.excludesFile); setDecommissionedNMs(); } catch (IOException ioe2) { // Should *never* happen this.hostsReader = null; throw new YarnRuntimeException(ioe2); } catch (YarnException e) { // Should *never* happen this.hostsReader = null; throw new YarnRuntimeException(e); } } @VisibleForTesting public HostsFileReader getHostsReader() { return this.hostsReader; } private HostsFileReader createHostsFileReader(String includesFile, String excludesFile) throws IOException, YarnException { HostsFileReader hostsReader = new HostsFileReader(includesFile, (includesFile == null || includesFile.isEmpty()) ? null : this.rmContext.getConfigurationProvider() .getConfigurationInputStream(this.conf, includesFile), excludesFile, (excludesFile == null || excludesFile.isEmpty()) ? null : this.rmContext.getConfigurationProvider() .getConfigurationInputStream(this.conf, excludesFile)); return hostsReader; } private void updateInactiveNodes() { long now = Time.monotonicNow(); for(Entry<NodeId, RMNode> entry : rmContext.getInactiveRMNodes().entrySet()) { NodeId nodeId = entry.getKey(); RMNode rmNode = entry.getValue(); if (isUntrackedNode(nodeId.getHost()) && rmNode.getUntrackedTimeStamp() == 0) { rmNode.setUntrackedTimeStamp(now); } } } public boolean isUntrackedNode(String hostName) { String ip = resolver.resolve(hostName); HostDetails hostDetails = hostsReader.getHostDetails(); Set<String> hostsList = hostDetails.getIncludedHosts(); Set<String> excludeList = hostDetails.getExcludedHosts(); return (!hostsList.isEmpty() || (enableNodeUntrackedWithoutIncludePath && (hostDetails.getIncludesFile() == null || hostDetails.getIncludesFile().isEmpty()))) && !hostsList.contains(hostName) && !hostsList.contains(ip) && !excludeList.contains(hostName) && !excludeList.contains(ip); } /** * Refresh the nodes gracefully. * * @param yarnConf yarn configuration. * @param timeout decommission timeout, null means default timeout. * @throws IOException io error occur. * @throws YarnException exceptions from yarn servers. */ public void refreshNodesGracefully(Configuration yarnConf, Integer timeout) throws IOException, YarnException { refreshHostsReader(yarnConf, true, timeout); } /** * It checks for any nodes in decommissioning state * * @return decommissioning nodes */ public Set<NodeId> checkForDecommissioningNodes() { Set<NodeId> decommissioningNodes = new HashSet<NodeId>(); for (Entry<NodeId, RMNode> entry : rmContext.getRMNodes().entrySet()) { if (entry.getValue().getState() == NodeState.DECOMMISSIONING) { decommissioningNodes.add(entry.getKey()); } } return decommissioningNodes; } /** * Forcefully decommission the nodes if they are in DECOMMISSIONING state */ public void refreshNodesForcefully() { for (Entry<NodeId, RMNode> entry : rmContext.getRMNodes().entrySet()) { if (entry.getValue().getState() == NodeState.DECOMMISSIONING) { RMNodeEventType nodeEventType = isUntrackedNode(entry.getKey().getHost()) ? RMNodeEventType.SHUTDOWN : RMNodeEventType.DECOMMISSION; this.rmContext.getDispatcher().getEventHandler().handle( new RMNodeEvent(entry.getKey(), nodeEventType)); } } } // Read possible new DECOMMISSIONING_TIMEOUT_KEY from yarn-site.xml. // This enables NodesListManager to pick up new value without // ResourceManager restart. private int readDecommissioningTimeout(Configuration pConf) { try { if (pConf == null) { pConf = new YarnConfiguration(); } int configuredDefaultDecTimeoutSecs = pConf.getInt(YarnConfiguration.RM_NODE_GRACEFUL_DECOMMISSION_TIMEOUT, YarnConfiguration.DEFAULT_RM_NODE_GRACEFUL_DECOMMISSION_TIMEOUT); if (defaultDecTimeoutSecs != configuredDefaultDecTimeoutSecs) { defaultDecTimeoutSecs = configuredDefaultDecTimeoutSecs; LOG.info("Use new decommissioningTimeoutSecs: " + defaultDecTimeoutSecs); } } catch (Exception e) { LOG.warn("Error readDecommissioningTimeout " + e.getMessage()); } return defaultDecTimeoutSecs; } /** * A NodeId instance needed upon startup for populating inactive nodes Map. * It only knows the hostname/ip and marks the port to -1 or invalid. * * @param host host name. * @return node id. */ public static NodeId createUnknownNodeId(String host) { return NodeId.newInstance(host, -1); } /** * Creates a NodeId for a node marked as LOST. * * The NodeId combines the hostname with a special port value of -2, indicating * that the node is lost in the cluster. * * @param host The hostname of the lost node. * @return NodeId Unique identifier for the lost node, with the port set to -2. */ public static NodeId createLostNodeId(String host) { // Create a NodeId with the given host and port -2 to signify the node is lost. return NodeId.newInstance(host, -2); } /** * A Node instance needed upon startup for populating inactive nodes Map. * It only knows its hostname/ip. */ private static
ExpireChecker
java
apache__flink
flink-test-utils-parent/flink-test-utils/src/main/java/org/apache/flink/test/util/TestingSecurityContext.java
{ "start": 1523, "end": 1626 }
class ____ used only in integration test code for connectors like Kafka, HDFS etc., */ @Internal public
is
java
apache__flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/BooleanToStringCastRule.java
{ "start": 1815, "end": 2883 }
class ____ extends AbstractExpressionCodeGeneratorCastRule<Boolean, StringData> { static final BooleanToStringCastRule INSTANCE = new BooleanToStringCastRule(); private BooleanToStringCastRule() { super( CastRulePredicate.builder() .input(LogicalTypeRoot.BOOLEAN) .target(STRING_TYPE) .build()); } @Override public String generateExpression( CodeGeneratorCastRule.Context context, String inputTerm, LogicalType inputLogicalType, LogicalType targetLogicalType) { if (context.legacyBehaviour()) { return CastRuleUtils.staticCall( BINARY_STRING_DATA_FROM_STRING(), stringConcat(EMPTY_STR_LITERAL, inputTerm)); } return ternaryOperator( inputTerm, accessStaticField(BinaryStringDataUtil.class, "TRUE_STRING"), accessStaticField(BinaryStringDataUtil.class, "FALSE_STRING")); } }
BooleanToStringCastRule
java
spring-projects__spring-framework
spring-core/src/main/java/org/springframework/asm/ClassVisitor.java
{ "start": 7161, "end": 7246 }
class ____ as * argument. * * @param nestHost the internal name of the host
name
java
apache__flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/JobIdsWithStatusesOverviewHeaders.java
{ "start": 1150, "end": 2521 }
class ____ implements RuntimeMessageHeaders< EmptyRequestBody, JobIdsWithStatusOverview, EmptyMessageParameters> { public static final String CURRENT_JOB_IDS_REST_PATH = "/jobs"; private static final JobIdsWithStatusesOverviewHeaders INSTANCE = new JobIdsWithStatusesOverviewHeaders(); private JobIdsWithStatusesOverviewHeaders() {} @Override public Class<EmptyRequestBody> getRequestClass() { return EmptyRequestBody.class; } @Override public HttpMethodWrapper getHttpMethod() { return HttpMethodWrapper.GET; } @Override public String getTargetRestEndpointURL() { return CURRENT_JOB_IDS_REST_PATH; } @Override public Class<JobIdsWithStatusOverview> getResponseClass() { return JobIdsWithStatusOverview.class; } @Override public HttpResponseStatus getResponseStatusCode() { return HttpResponseStatus.OK; } @Override public EmptyMessageParameters getUnresolvedMessageParameters() { return EmptyMessageParameters.getInstance(); } public static JobIdsWithStatusesOverviewHeaders getInstance() { return INSTANCE; } @Override public String getDescription() { return "Returns an overview over all jobs and their current state."; } }
JobIdsWithStatusesOverviewHeaders
java
elastic__elasticsearch
test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java
{ "start": 11934, "end": 22153 }
class ____ extends LuceneTestCase { protected static final List<String> JAVA_TIMEZONE_IDS; protected static final List<String> JAVA_ZONE_IDS; private static final AtomicInteger portGenerator = new AtomicInteger(); private static final Collection<String> loggedLeaks = new ArrayList<>(); private HeaderWarningAppender headerWarningAppender; @AfterClass public static void resetPortCounter() { portGenerator.set(0); } // Allows distinguishing between parallel test processes public static final String TEST_WORKER_VM_ID; public static final String TEST_WORKER_SYS_PROPERTY = "org.gradle.test.worker"; public static final String DEFAULT_TEST_WORKER_ID = "--not-gradle--"; public static final String FIPS_SYSPROP = "tests.fips.enabled"; private static final SetOnce<Boolean> WARN_SECURE_RANDOM_FIPS_NOT_DETERMINISTIC = new SetOnce<>(); private static final String LOWER_ALPHA_CHARACTERS = "abcdefghijklmnopqrstuvwxyz"; private static final String UPPER_ALPHA_CHARACTERS = LOWER_ALPHA_CHARACTERS.toUpperCase(Locale.ROOT); private static final String DIGIT_CHARACTERS = "0123456789"; private static final String ALPHANUMERIC_CHARACTERS = LOWER_ALPHA_CHARACTERS + UPPER_ALPHA_CHARACTERS + DIGIT_CHARACTERS; static { Random random = initTestSeed(); TEST_WORKER_VM_ID = System.getProperty(TEST_WORKER_SYS_PROPERTY, DEFAULT_TEST_WORKER_ID); setTestSysProps(random); // TODO: consolidate logging initialization for tests so it all occurs in logconfigurator LogConfigurator.loadLog4jPlugins(); LogConfigurator.configureESLogging(); MockLog.init(); final List<Appender> testAppenders = new ArrayList<>(3); for (String leakLoggerName : Arrays.asList("io.netty.util.ResourceLeakDetector", LeakTracker.class.getName())) { Logger leakLogger = LogManager.getLogger(leakLoggerName); Appender leakAppender = new AbstractAppender(leakLoggerName, null, PatternLayout.newBuilder().withPattern("%m").build()) { @Override public void append(LogEvent event) { String message = event.getMessage().getFormattedMessage(); if (Level.ERROR.equals(event.getLevel()) && message.contains("LEAK:")) { synchronized (loggedLeaks) { loggedLeaks.add(message); } } } }; leakAppender.start(); Loggers.addAppender(leakLogger, leakAppender); testAppenders.add(leakAppender); } Logger promiseUncaughtLogger = LogManager.getLogger("io.netty.util.concurrent.DefaultPromise"); final Appender uncaughtAppender = new AbstractAppender( promiseUncaughtLogger.getName(), null, PatternLayout.newBuilder().withPattern("%m").build() ) { @Override public void append(LogEvent event) { if (Level.WARN.equals(event.getLevel())) { synchronized (loggedLeaks) { loggedLeaks.add(event.getMessage().getFormattedMessage()); } } } }; uncaughtAppender.start(); Loggers.addAppender(promiseUncaughtLogger, uncaughtAppender); testAppenders.add(uncaughtAppender); // shutdown hook so that when the test JVM exits, logging is shutdown too Runtime.getRuntime().addShutdownHook(new Thread(() -> { for (Appender testAppender : testAppenders) { testAppender.stop(); } LoggerContext context = (LoggerContext) LogManager.getContext(false); Configurator.shutdown(context); })); BootstrapForTesting.ensureInitialized(); /* * We need to exclude time zones not supported by joda (like SystemV* timezones) * because they cannot be converted back to DateTimeZone which we currently * still need to do internally e.g. in bwc serialization and in the extract() method * //TODO remove once tests do not send time zone ids back to versions of ES using Joda */ Set<String> unsupportedJodaTZIds = Set.of( "ACT", "AET", "AGT", "ART", "AST", "BET", "BST", "CAT", "CNT", "CST", "CTT", "EAT", "ECT", "EST", "HST", "IET", "IST", "JST", "MIT", "MST", "NET", "NST", "PLT", "PNT", "PRT", "PST", "SST", "VST" ); Predicate<String> unsupportedZoneIdsPredicate = tz -> tz.startsWith("System/") || tz.equals("Eire"); Predicate<String> unsupportedTZIdsPredicate = unsupportedJodaTZIds::contains; JAVA_TIMEZONE_IDS = Arrays.stream(TimeZone.getAvailableIDs()) .filter(unsupportedTZIdsPredicate.negate()) .filter(unsupportedZoneIdsPredicate.negate()) .sorted() .toList(); JAVA_ZONE_IDS = ZoneId.getAvailableZoneIds().stream().filter(unsupportedZoneIdsPredicate.negate()).sorted().toList(); } protected static Random initTestSeed() { String inputSeed = System.getProperty("tests.seed"); long seed; if (inputSeed == null) { // when running tests in intellij, we don't have a seed. Setup the seed early here, before getting to RandomizedRunner, // so that we can use it in ESTestCase static init seed = System.nanoTime(); setTestSeed(Long.toHexString(seed)); } else { String[] seedParts = inputSeed.split("[\\:]"); seed = Long.parseUnsignedLong(seedParts[0], 16); } if (Booleans.parseBoolean(System.getProperty("tests.hackImmutableCollections", "false"))) { forceImmutableCollectionsSeed(seed); } return new Random(seed); } @SuppressForbidden(reason = "set tests.seed for intellij") static void setTestSeed(String seed) { System.setProperty("tests.seed", seed); } private static void forceImmutableCollectionsSeed(long seed) { try { MethodHandles.Lookup lookup = MethodHandles.lookup(); Class<?> collectionsClass = Class.forName("java.util.ImmutableCollections"); var salt32l = lookup.findStaticVarHandle(collectionsClass, "SALT32L", long.class); var reverse = lookup.findStaticVarHandle(collectionsClass, "REVERSE", boolean.class); salt32l.set(seed & 0xFFFF_FFFFL); reverse.set((seed & 1) == 0); } catch (Exception e) { throw new AssertionError(e); } } @SuppressForbidden(reason = "force log4j and netty sysprops") private static void setTestSysProps(Random random) { System.setProperty("log4j.shutdownHookEnabled", "false"); System.setProperty("log4j2.disable.jmx", "true"); // Enable Netty leak detection and monitor logger for logged leak errors System.setProperty("io.netty.leakDetection.level", "paranoid"); if (System.getProperty("es.use_unpooled_allocator") == null) { // unless explicitly forced to unpooled, always test with the pooled allocator to get the best possible coverage from Netty's // leak detection which does not cover simple unpooled heap buffers System.setProperty("es.use_unpooled_allocator", "false"); } // We have to disable setting the number of available processors as tests in the same JVM randomize processors and will step on each // other if we allow them to set the number of available processors as it's set-once in Netty. System.setProperty("es.set.netty.runtime.available.processors", "false"); } protected final Logger logger = LogManager.getLogger(getClass()); private ThreadContext threadContext; // ----------------------------------------------------------------- // Suite and test case setup/cleanup. // ----------------------------------------------------------------- @Rule public RuleChain failureAndSuccessEvents = RuleChain.outerRule(new TestRuleAdapter() { @Override protected void afterIfSuccessful() throws Throwable { ESTestCase.this.afterIfSuccessful(); } @Override protected void afterAlways(List<Throwable> errors) throws Throwable { if (errors != null && errors.isEmpty() == false) { boolean allAssumption = true; for (Throwable error : errors) { if (false == error instanceof AssumptionViolatedException) { allAssumption = false; break; } } if (false == allAssumption) { ESTestCase.this.afterIfFailed(errors); } } super.afterAlways(errors); } }); /** * Generates a new transport address using {@link TransportAddress#META_ADDRESS} with an incrementing port number. * The port number starts at 0 and is reset after each test suite run. */ public static TransportAddress buildNewFakeTransportAddress() { return new TransportAddress(TransportAddress.META_ADDRESS, portGenerator.incrementAndGet()); } /** * Called when a test fails, supplying the errors it generated. Not called when the test fails because assumptions are violated. */ protected void afterIfFailed(List<Throwable> errors) {} /** called after a test is finished, but only if successful */ protected void afterIfSuccessful() throws Exception {} /** * Marks a test suite or a test method that should run without checking for entitlements. */ @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.TYPE) @Inherited public @
ESTestCase
java
spring-projects__spring-framework
spring-context/src/test/java/org/springframework/context/event/AnnotationDrivenEventListenerTests.java
{ "start": 27922, "end": 28122 }
class ____ extends AbstractTestEventListener { @EventListener public void handleContextEvent(ApplicationContextEvent event) { collectEvent(event); } } @Component static
ContextEventListener
java
quarkusio__quarkus
core/deployment/src/main/java/io/quarkus/deployment/Feature.java
{ "start": 187, "end": 3695 }
enum ____ { AGROAL, AMAZON_LAMBDA, AZURE_FUNCTIONS, APICURIO_REGISTRY_AVRO, APICURIO_REGISTRY_JSON_SCHEMA, AWT, CACHE, CDI, COMPOSE, CONFIG_YAML, CONFLUENT_REGISTRY_AVRO, CONFLUENT_REGISTRY_JSON, ELASTICSEARCH_REST_CLIENT_COMMON, ELASTICSEARCH_REST_CLIENT, ELASTICSEARCH_REST_HIGH_LEVEL_CLIENT, ELASTICSEARCH_JAVA_CLIENT, FLYWAY, GRPC_CLIENT, GRPC_SERVER, HIBERNATE_ORM, HIBERNATE_ENVERS, HIBERNATE_ORM_PANACHE, HIBERNATE_ORM_PANACHE_KOTLIN, HIBERNATE_ORM_REST_DATA_PANACHE, HIBERNATE_REACTIVE, HIBERNATE_REACTIVE_PANACHE, HIBERNATE_REACTIVE_PANACHE_KOTLIN, HIBERNATE_REACTIVE_REST_DATA_PANACHE, HIBERNATE_SEARCH_ELASTICSEARCH, HIBERNATE_SEARCH_STANDALONE_ELASTICSEARCH, HIBERNATE_VALIDATOR, INFINISPAN_CLIENT, INFINISPAN_EMBEDDED, JDBC_DB2, JDBC_H2, JDBC_POSTGRESQL, JDBC_MARIADB, JDBC_MSSQL, JDBC_MYSQL, JDBC_ORACLE, JFR, KAFKA_CLIENT, KAFKA_STREAMS, KEYCLOAK_AUTHORIZATION, KOTLIN, KUBERNETES, KUBERNETES_CLIENT, LIQUIBASE, LIQUIBASE_MONGODB, LOGGING_GELF, MAILER, MICROMETER, MONGODB_CLIENT, MONGODB_PANACHE, MONGODB_PANACHE_KOTLIN, MONGODB_REST_DATA_PANACHE, MUTINY, NARAYANA_JTA, NARAYANA_LRA, NARAYANA_STM, NEO4J, OBSERVABILITY, OIDC, OIDC_CLIENT, OIDC_CLIENT_REGISTRATION, RESTEASY_CLIENT_OIDC_FILTER, REST_CLIENT_OIDC_FILTER, OIDC_CLIENT_GRAPHQL_CLIENT_INTEGRATION, RESTEASY_CLIENT_OIDC_TOKEN_PROPAGATION, REST_CLIENT_OIDC_TOKEN_PROPAGATION, OPENSHIFT_CLIENT, OPENTELEMETRY, OPENTELEMETRY_JAEGER_EXPORTER, OPENTELEMETRY_OTLP_EXPORTER, PICOCLI, QUARTZ, QUTE, REACTIVE_PG_CLIENT, REACTIVE_MYSQL_CLIENT, REACTIVE_MSSQL_CLIENT, REACTIVE_DB2_CLIENT, REACTIVE_ORACLE_CLIENT, REACTIVE_ROUTES, REDIS_CLIENT, RESTEASY, RESTEASY_JACKSON, RESTEASY_JAXB, RESTEASY_JSONB, RESTEASY_MULTIPART, RESTEASY_MUTINY, RESTEASY_QUTE, REST, REST_CSRF, REST_QUTE, REST_JSONB, REST_JAXB, REST_JACKSON, REST_KOTLIN_SERIALIZATION, REST_LINKS, RESTEASY_CLIENT, RESTEASY_CLIENT_JACKSON, RESTEASY_CLIENT_JAXB, RESTEASY_CLIENT_JSONB, RESTEASY_CLIENT_MUTINY, REST_CLIENT, REST_CLIENT_JACKSON, REST_CLIENT_JAXB, REST_CLIENT_JSONB, REST_CLIENT_KOTLIN_SERIALIZATION, SCALA, SCHEDULER, SECURITY, SECURITY_JDBC, SECURITY_LDAP, SECURITY_JPA, SECURITY_JPA_REACTIVE, SECURITY_PROPERTIES_FILE, SECURITY_OAUTH2, SECURITY_WEBAUTHN, SERVLET, SMALLRYE_CONTEXT_PROPAGATION, SMALLRYE_FAULT_TOLERANCE, SMALLRYE_HEALTH, SMALLRYE_JWT, SMALLRYE_METRICS, SMALLRYE_OPENAPI, MESSAGING, MESSAGING_KAFKA, MESSAGING_AMQP, MESSAGING_MQTT, MESSAGING_RABBITMQ, MESSAGING_PULSAR, SMALLRYE_REACTIVE_STREAMS_OPERATORS, SMALLRYE_REACTIVE_TYPE_CONVERTERS, SMALLRYE_GRAPHQL, SMALLRYE_GRAPHQL_CLIENT, SPRING_DI, SPRING_WEB, SPRING_DATA_JPA, SPRING_DATA_REST, SPRING_SECURITY, SPRING_BOOT_PROPERTIES, SPRING_CACHE, SPRING_CLOUD_CONFIG_CLIENT, SPRING_SCHEDULED, SWAGGER_UI, WEBSOCKETS, WEBSOCKETS_CLIENT, VAULT, VERTX, VERTX_GRAPHQL, WEB_DEPENDENCY_LOCATOR; public String getName() { return toString().toLowerCase().replace('_', '-'); } }
Feature
java
elastic__elasticsearch
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/ExpressionQueryList.java
{ "start": 2958, "end": 3352 }
class ____ two types of joins: * 1. Field-based join: The join conditions are based on the equality of fields from the left and right datasets. * It is used for field-based join when the join is on more than one field or there is a preJoinFilter * 2. Expression-based join: The join conditions are based on a complex expression that can involve multiple fields and operators. */ public
supports
java
quarkusio__quarkus
core/deployment/src/main/java/io/quarkus/deployment/configuration/ConfigCompatibility.java
{ "start": 1300, "end": 7528 }
class ____ { private static final Logger log = Logger.getLogger("io.quarkus.deployment.configuration"); /** * When these legacy name patterns are detected on iteration, remove them or replace them with other name(s). */ private static final KeyMap<BiFunction<ConfigSourceInterceptorContext, NameIterator, List<String>>> oldNames = keyMap( entry(List.of("quarkus", "package", "type"), ConfigCompatibility::quarkusPackageType), entry(List.of("quarkus", "package", "create-appcds"), ConfigCompatibility::quarkusPackageCreateAppcds), entry(List.of("quarkus", "package", "appcds-builder-image"), ConfigCompatibility::quarkusPackageAppcdsBuilderImage), entry(List.of("quarkus", "package", "appcds-use-container"), ConfigCompatibility::quarkusPackageAppcdsUseContainer), entry(List.of("quarkus", "package", "compress-jar"), ConfigCompatibility::quarkusPackageCompressJar), entry(List.of("quarkus", "package", "filter-optional-dependencies"), ConfigCompatibility::quarkusFilterOptionalDependencies), entry(List.of("quarkus", "package", "add-runner-suffix"), ConfigCompatibility::quarkusPackageAddRunnerSuffix), entry(List.of("quarkus", "package", "user-configured-ignored-entries"), ConfigCompatibility::quarkusPackageUserConfiguredIgnoredEntries), entry(List.of("quarkus", "package", "user-providers-directory"), ConfigCompatibility::quarkusPackageUserProvidersDirectory), entry(List.of("quarkus", "package", "included-optional-dependencies"), ConfigCompatibility::quarkusPackageIncludedOptionalDependencies), entry(List.of("quarkus", "package", "include-dependency-list"), ConfigCompatibility::quarkusPackageIncludeDependencyList), entry(List.of("quarkus", "package", "decompiler", "version"), ConfigCompatibility::quarkusPackageDecompilerVersion), entry(List.of("quarkus", "package", "decompiler", "enabled"), ConfigCompatibility::quarkusPackageDecompilerEnabled), entry(List.of("quarkus", "package", "decompiler", "jar-directory"), ConfigCompatibility::quarkusPackageDecompilerJarDirectory), entry(List.of("quarkus", "package", "manifest", "attributes", "*"), ConfigCompatibility::quarkusPackageManifestAttributes), entry(List.of("quarkus", "package", "manifest", "sections", "*", "*"), ConfigCompatibility::quarkusPackageManifestSections), entry(List.of("quarkus", "package", "manifest", "add-implementation-entries"), ConfigCompatibility::quarkusPackageManifestAddImplementationEntries)); /** * When these new name patterns are detected on get, see if legacy values are present and if so, * provide a default based on those value(s). */ public static final KeyMap<BiFunction<ConfigSourceInterceptorContext, NameIterator, ConfigValue>> newNames = keyMap( entry(List.of("quarkus", "native", "enabled"), ConfigCompatibility::quarkusNativeEnabled), entry(List.of("quarkus", "native", "sources-only"), ConfigCompatibility::quarkusNativeSourcesOnly), entry(List.of("quarkus", "package", "jar", "enabled"), ConfigCompatibility::quarkusPackageJarEnabled), entry(List.of("quarkus", "package", "jar", "appcds", "enabled"), ConfigCompatibility::quarkusPackageJarAppcdsEnabled), entry(List.of("quarkus", "package", "jar", "appcds", "builder-image"), ConfigCompatibility::quarkusPackageJarAppcdsBuilderImage), entry(List.of("quarkus", "package", "jar", "appcds", "use-container"), ConfigCompatibility::quarkusPackageJarAppcdsUseContainer), entry(List.of("quarkus", "package", "jar", "type"), ConfigCompatibility::quarkusPackageJarType), entry(List.of("quarkus", "package", "jar", "compress"), ConfigCompatibility::quarkusPackageJarCompress), entry(List.of("quarkus", "package", "jar", "filter-optional-dependencies"), ConfigCompatibility::quarkusPackageJarFilterOptionalDependencies), entry(List.of("quarkus", "package", "jar", "add-runner-suffix"), ConfigCompatibility::quarkusPackageJarAddRunnerSuffix), entry(List.of("quarkus", "package", "jar", "user-configured-ignored-entries"), ConfigCompatibility::quarkusPackageJarUserConfiguredIgnoredEntries), entry(List.of("quarkus", "package", "jar", "user-providers-directory"), ConfigCompatibility::quarkusPackageJarUserProvidersDirectory), entry(List.of("quarkus", "package", "jar", "included-optional-dependencies"), ConfigCompatibility::quarkusPackageJarIncludedOptionalDependencies), entry(List.of("quarkus", "package", "jar", "include-dependency-list"), ConfigCompatibility::quarkusPackageJarIncludeDependencyList), entry(List.of("quarkus", "package", "jar", "manifest", "attributes", "*"), ConfigCompatibility::quarkusPackageJarManifestAttributes), entry(List.of("quarkus", "package", "jar", "manifest", "sections", "*", "*"), ConfigCompatibility::quarkusPackageJarManifestSections), entry(List.of("quarkus", "package", "jar", "manifest", "add-implementation-entries"), ConfigCompatibility::quarkusPackageJarManifestAddImplementationEntries), entry(List.of("quarkus", "package", "jar", "decompiler", "enabled"), ConfigCompatibility::quarkusPackageJarDecompilerEnabled), entry(List.of("quarkus", "package", "jar", "decompiler", "jar-directory"), ConfigCompatibility::quarkusPackageJarDecompilerJarDirectory)); /** * The interceptor at the front of the chain which handles hiding deprecated properties from the iterator. */ @Priority(Integer.MAX_VALUE) public static final
ConfigCompatibility
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/action/admin/indices/template/post/SimulateIndexTemplateAction.java
{ "start": 584, "end": 929 }
class ____ extends ActionType<SimulateIndexTemplateResponse> { public static final SimulateIndexTemplateAction INSTANCE = new SimulateIndexTemplateAction(); public static final String NAME = "indices:admin/index_template/simulate_index"; private SimulateIndexTemplateAction() { super(NAME); } }
SimulateIndexTemplateAction
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/index/cache/request/ShardRequestCache.java
{ "start": 765, "end": 1820 }
class ____ { final CounterMetric evictionsMetric = new CounterMetric(); final CounterMetric totalMetric = new CounterMetric(); final CounterMetric hitCount = new CounterMetric(); final CounterMetric missCount = new CounterMetric(); public RequestCacheStats stats() { return new RequestCacheStats(totalMetric.count(), evictionsMetric.count(), hitCount.count(), missCount.count()); } public void onHit() { hitCount.inc(); } public void onMiss() { missCount.inc(); } public void onCached(Accountable key, BytesReference value) { totalMetric.inc(key.ramBytesUsed() + value.ramBytesUsed()); } public void onRemoval(Accountable key, BytesReference value, boolean evicted) { if (evicted) { evictionsMetric.inc(); } long dec = 0; if (key != null) { dec += key.ramBytesUsed(); } if (value != null) { dec += value.ramBytesUsed(); } totalMetric.dec(dec); } }
ShardRequestCache
java
quarkusio__quarkus
integration-tests/rest-client-reactive/src/main/java/io/quarkus/it/rest/client/main/wronghost/WrongHostRejectedClient.java
{ "start": 360, "end": 467 }
interface ____ { @GET @Produces(MediaType.TEXT_PLAIN) Response invoke(); }
WrongHostRejectedClient