language
stringclasses
1 value
repo
stringclasses
60 values
path
stringlengths
22
294
class_span
dict
source
stringlengths
13
1.16M
target
stringlengths
1
113
java
apache__camel
components/camel-chunk/src/generated/java/org/apache/camel/component/chunk/ChunkEndpointUriFactory.java
{ "start": 515, "end": 2416 }
class ____ extends org.apache.camel.support.component.EndpointUriFactorySupport implements EndpointUriFactory { private static final String BASE = ":resourceUri"; private static final Set<String> PROPERTY_NAMES; private static final Set<String> SECRET_PROPERTY_NAMES; private static final Map<String, String> MULTI_VALUE_PREFIXES; static { Set<String> props = new HashSet<>(10); props.add("allowContextMapAll"); props.add("allowTemplateFromHeader"); props.add("contentCache"); props.add("encoding"); props.add("extension"); props.add("lazyStartProducer"); props.add("resourceUri"); props.add("themeFolder"); props.add("themeLayer"); props.add("themeSubfolder"); PROPERTY_NAMES = Collections.unmodifiableSet(props); SECRET_PROPERTY_NAMES = Collections.emptySet(); MULTI_VALUE_PREFIXES = Collections.emptyMap(); } @Override public boolean isEnabled(String scheme) { return "chunk".equals(scheme); } @Override public String buildUri(String scheme, Map<String, Object> properties, boolean encode) throws URISyntaxException { String syntax = scheme + BASE; String uri = syntax; Map<String, Object> copy = new HashMap<>(properties); uri = buildPathParameter(syntax, uri, "resourceUri", null, true, copy); uri = buildQueryParameters(uri, copy, encode); return uri; } @Override public Set<String> propertyNames() { return PROPERTY_NAMES; } @Override public Set<String> secretPropertyNames() { return SECRET_PROPERTY_NAMES; } @Override public Map<String, String> multiValuePrefixes() { return MULTI_VALUE_PREFIXES; } @Override public boolean isLenientProperties() { return false; } }
ChunkEndpointUriFactory
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/annotations/Source.java
{ "start": 1454, "end": 1640 }
interface ____ { /** * The source of timestamps. By default, the * {@linkplain SourceType#VM virtual machine} * is the source. */ SourceType value() default SourceType.VM; }
Source
java
alibaba__nacos
common/src/main/java/com/alibaba/nacos/common/task/AbstractDelayTask.java
{ "start": 766, "end": 1870 }
class ____ implements NacosTask { /** * Task time interval between twice processing, unit is millisecond. */ private long taskInterval; /** * The time which was processed at last time, unit is millisecond. */ private long lastProcessTime; /** * The default time interval, in milliseconds, between tasks. */ protected static final long INTERVAL = 1000L; /** * merge task. * * @param task task */ public abstract void merge(AbstractDelayTask task); public void setTaskInterval(long interval) { this.taskInterval = interval; } public long getTaskInterval() { return this.taskInterval; } public void setLastProcessTime(long lastProcessTime) { this.lastProcessTime = lastProcessTime; } public long getLastProcessTime() { return this.lastProcessTime; } @Override public boolean shouldProcess() { return (System.currentTimeMillis() - this.lastProcessTime >= this.taskInterval); } }
AbstractDelayTask
java
google__guava
android/guava/src/com/google/common/collect/BoundType.java
{ "start": 959, "end": 1367 }
enum ____ { /** The endpoint value <i>is not</i> considered part of the set ("exclusive"). */ OPEN(false), CLOSED(true); final boolean inclusive; BoundType(boolean inclusive) { this.inclusive = inclusive; } /** Returns the bound type corresponding to a boolean value for inclusivity. */ static BoundType forBoolean(boolean inclusive) { return inclusive ? CLOSED : OPEN; } }
BoundType
java
elastic__elasticsearch
x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/optimizer/Optimizer.java
{ "start": 13363, "end": 14124 }
class ____ extends OptimizerRule<Aggregate> { @Override protected LogicalPlan rule(Aggregate agg) { List<Expression> groupings = agg.groupings(); List<Expression> prunedGroupings = new ArrayList<>(); for (Expression g : groupings) { if (g.foldable()) { prunedGroupings.add(g); } } if (prunedGroupings.size() > 0) { List<Expression> newGroupings = new ArrayList<>(groupings); newGroupings.removeAll(prunedGroupings); return new Aggregate(agg.source(), agg.child(), newGroupings, agg.aggregates()); } return agg; } } static
PruneLiteralsInGroupBy
java
apache__rocketmq
store/src/main/java/org/apache/rocketmq/store/PutMessageSpinLock.java
{ "start": 992, "end": 1451 }
class ____ implements PutMessageLock { //true: Can lock, false : in lock. private AtomicBoolean putMessageSpinLock = new AtomicBoolean(true); @Override public void lock() { boolean flag; do { flag = this.putMessageSpinLock.compareAndSet(true, false); } while (!flag); } @Override public void unlock() { this.putMessageSpinLock.compareAndSet(false, true); } }
PutMessageSpinLock
java
ReactiveX__RxJava
src/test/java/io/reactivex/rxjava3/internal/operators/flowable/FlowableConcatMapSchedulerTest.java
{ "start": 41072, "end": 44032 }
class ____ extends Flowable<Object> implements Supplier<Object> { final TestSubscriber<Object> ts; EmptyDisposingFlowable(TestSubscriber<Object> ts) { this.ts = ts; } @Override protected void subscribeActual(@NonNull Subscriber<? super @NonNull Object> subscriber) { EmptySubscription.complete(subscriber); } @Override public @NonNull Object get() throws Throwable { ts.cancel(); return null; } } @Test public void scalarInnerEmptyDisposeDelayError() { TestSubscriber<Object> ts = new TestSubscriber<>(); Flowable.just(1) .hide() .concatMapDelayError(v -> new EmptyDisposingFlowable(ts), true, 2, ImmediateThinScheduler.INSTANCE ) .subscribe(ts); ts.assertEmpty(); } @Test public void mainErrorInnerNextIgnoreCancel() { AtomicReference<Subscriber<? super Integer>> ref = new AtomicReference<>(); Flowable.just(1).concatWith(Flowable.<Integer>error(new TestException())) .concatMap(v -> Flowable.<Integer>fromPublisher(ref::set), 2, ImmediateThinScheduler.INSTANCE) .doOnError(e -> { ref.get().onSubscribe(new BooleanSubscription()); ref.get().onNext(1); }) .test() .assertFailure(TestException.class); } @Test public void scalarSupplierMainError() { PublishProcessor<Integer> pp = PublishProcessor.create(); TestSubscriber<Integer> ts = pp.concatMap(v -> Flowable.fromCallable(() -> { pp.onError(new TestException()); return 2; }), 2, ImmediateThinScheduler.INSTANCE) .test() ; pp.onNext(1); ts.assertFailure(TestException.class); } @Test public void mainErrorInnerErrorRace() throws Throwable { TestHelper.withErrorTracking(errors -> { TestException ex1 = new TestException(); TestException ex2 = new TestException(); for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) { AtomicReference<Subscriber<? super Integer>> ref1 = new AtomicReference<>(); AtomicReference<Subscriber<? super Integer>> ref2 = new AtomicReference<>(); TestSubscriber<Integer> ts = Flowable.<Integer>fromPublisher(ref1::set) .concatMap(v -> Flowable.<Integer>fromPublisher(ref2::set), 2, ImmediateThinScheduler.INSTANCE) .test(); ref1.get().onSubscribe(new BooleanSubscription()); ref1.get().onNext(1); ref2.get().onSubscribe(new BooleanSubscription()); TestHelper.race(() -> ref1.get().onError(ex1), () -> ref2.get().onError(ex2)); ts.assertError(RuntimeException.class); errors.clear(); } }); } }
EmptyDisposingFlowable
java
alibaba__fastjson
src/test/java/com/alibaba/json/bvt/serializer/TestSpecial4.java
{ "start": 144, "end": 657 }
class ____ extends TestCase { public void test_0() throws Exception { StringBuilder buf = new StringBuilder(); buf.append('\r'); buf.append('\r'); for (int i = 0; i < 1000; ++i) { buf.append('\u2028'); } VO vo = new VO(); vo.setValue(buf.toString()); String text = JSON.toJSONString(vo); VO vo2 = JSON.parseObject(text, VO.class); Assert.assertEquals(vo.value, vo2.value); } public static
TestSpecial4
java
square__retrofit
retrofit/java-test/src/test/java/retrofit2/RequestFactoryTest.java
{ "start": 49627, "end": 50118 }
class ____ { @GET Call<ResponseBody> method(@Url String url) { return null; } } Request request = buildRequest(Example.class, "https://example2.com/foo/bar/"); assertThat(request.method()).isEqualTo("GET"); assertThat(request.headers().size()).isEqualTo(0); assertThat(request.url().toString()).isEqualTo("https://example2.com/foo/bar/"); assertThat(request.body()).isNull(); } @Test public void getWithJavaUriUrlAbsolute() {
Example
java
resilience4j__resilience4j
resilience4j-micronaut/src/main/java/io/github/resilience4j/micronaut/timelimiter/TimeLimiterRegistryFactory.java
{ "start": 2139, "end": 7662 }
class ____ { @Bean @TimeLimiterQualifier public CompositeCustomizer<TimeLimiterConfigCustomizer> compositeTimeLimiterCustomizer(@Nullable List<TimeLimiterConfigCustomizer> configCustomizers) { return new CompositeCustomizer<>(configCustomizers); } @Singleton @Requires(beans = CommonTimeLimiterConfigurationProperties.class) public TimeLimiterRegistry timeLimiterRegistry( CommonTimeLimiterConfigurationProperties timeLimiterConfigurationProperties, @TimeLimiterQualifier EventConsumerRegistry<TimeLimiterEvent> timeLimiterEventConsumerRegistry, @TimeLimiterQualifier RegistryEventConsumer<TimeLimiter> timeLimiterRegistryEventConsumer, @TimeLimiterQualifier CompositeCustomizer<TimeLimiterConfigCustomizer> compositeTimeLimiterCustomizer) { TimeLimiterRegistry timeLimiterRegistry = createTimeLimiterRegistry(timeLimiterConfigurationProperties, timeLimiterRegistryEventConsumer, compositeTimeLimiterCustomizer); registerEventConsumer(timeLimiterRegistry, timeLimiterEventConsumerRegistry, timeLimiterConfigurationProperties); initTimeLimiterRegistry(timeLimiterRegistry, timeLimiterConfigurationProperties, compositeTimeLimiterCustomizer); return timeLimiterRegistry; } @Bean @Primary @TimeLimiterQualifier public RegistryEventConsumer<TimeLimiter> timeLimiterRegistryEventConsumer( Optional<List<RegistryEventConsumer<TimeLimiter>>> optionalRegistryEventConsumers ) { return new CompositeRegistryEventConsumer<>( optionalRegistryEventConsumers.orElseGet(ArrayList::new) ); } @Bean @TimeLimiterQualifier public EventConsumerRegistry<TimeLimiterEvent> timeLimiterEventsConsumerRegistry() { return new DefaultEventConsumerRegistry<>(); } void initTimeLimiterRegistry(TimeLimiterRegistry timeLimiterRegistry, CommonTimeLimiterConfigurationProperties timeLimiterConfigurationProperties, CompositeCustomizer<TimeLimiterConfigCustomizer> compositeTimeLimiterCustomizer) { timeLimiterConfigurationProperties.getInstances().forEach( (name, properties) -> timeLimiterRegistry.timeLimiter(name, timeLimiterConfigurationProperties.createTimeLimiterConfig(name, properties, compositeTimeLimiterCustomizer)) ); } /** * Initializes a timeLimiter registry. * * @param timeLimiterConfigurationProperties The timeLimiter configuration properties. * @return a timeLimiterRegistry */ private static TimeLimiterRegistry createTimeLimiterRegistry( CommonTimeLimiterConfigurationProperties timeLimiterConfigurationProperties, RegistryEventConsumer<TimeLimiter> timeLimiterRegistryEventConsumer, CompositeCustomizer<TimeLimiterConfigCustomizer> compositeTimeLimiterCustomizer) { Map<String, TimeLimiterConfig> configs = timeLimiterConfigurationProperties.getConfigs() .entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, entry -> timeLimiterConfigurationProperties.createTimeLimiterConfig( entry.getKey(), entry.getValue(), compositeTimeLimiterCustomizer))); return TimeLimiterRegistry.of(configs, timeLimiterRegistryEventConsumer, timeLimiterConfigurationProperties.getTags()); } /** * Registers the post creation consumer function that registers the consumer events to the timeLimiters. * * @param timeLimiterRegistry The timeLimiter registry. * @param eventConsumerRegistry The event consumer registry. * @param timeLimiterConfigurationProperties timeLimiter configuration properties */ private static void registerEventConsumer(TimeLimiterRegistry timeLimiterRegistry, EventConsumerRegistry<TimeLimiterEvent> eventConsumerRegistry, CommonTimeLimiterConfigurationProperties timeLimiterConfigurationProperties) { timeLimiterRegistry.getEventPublisher() .onEntryAdded(event -> registerEventConsumer(eventConsumerRegistry, event.getAddedEntry(), timeLimiterConfigurationProperties)) .onEntryReplaced(event -> registerEventConsumer(eventConsumerRegistry, event.getNewEntry(), timeLimiterConfigurationProperties)) .onEntryRemoved(event -> unregisterEventConsumer(eventConsumerRegistry, event.getRemovedEntry())); } private static void unregisterEventConsumer(EventConsumerRegistry<TimeLimiterEvent> eventConsumerRegistry, TimeLimiter timeLimiter) { eventConsumerRegistry.removeEventConsumer(timeLimiter.getName()); } private static void registerEventConsumer(EventConsumerRegistry<TimeLimiterEvent> eventConsumerRegistry, TimeLimiter timeLimiter, CommonTimeLimiterConfigurationProperties timeLimiterConfigurationProperties) { int eventConsumerBufferSize = Optional.ofNullable(timeLimiterConfigurationProperties.getInstanceProperties(timeLimiter.getName())) .map(io.github.resilience4j.common.timelimiter.configuration.CommonTimeLimiterConfigurationProperties.InstanceProperties::getEventConsumerBufferSize) .orElse(100); timeLimiter.getEventPublisher().onEvent(eventConsumerRegistry.createEventConsumer(timeLimiter.getName(), eventConsumerBufferSize)); } }
TimeLimiterRegistryFactory
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/index/mapper/BlockLoader.java
{ "start": 31317, "end": 31760 }
interface ____ extends Builder { /** * Appends an ordinal to the builder. */ SingletonOrdinalsBuilder appendOrd(int value); /** * Appends a single ord for the next N positions */ SingletonOrdinalsBuilder appendOrds(int ord, int length); SingletonOrdinalsBuilder appendOrds(int[] values, int from, int length, int minOrd, int maxOrd); }
SingletonOrdinalsBuilder
java
apache__camel
components/camel-dropbox/src/main/java/org/apache/camel/component/dropbox/DropboxComponent.java
{ "start": 1359, "end": 3886 }
class ____ extends DefaultComponent { public DropboxComponent() { this(null); } public DropboxComponent(CamelContext context) { super(context); } /** * Create a camel endpoint after passing validation on the incoming url. * * @param uri the full URI of the endpoint * @param remaining the remaining part of the URI without the query parameters or component prefix * @param parameters the optional parameters passed in * @return the camel endpoint * @throws Exception */ @Override protected Endpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) throws Exception { DropboxConfiguration configuration = new DropboxConfiguration(); // set options from component //set auth parameters configuration.setAccessToken((String) parameters.get("accessToken")); if (parameters.get("expireIn") != null) { configuration.setExpireIn(Long.valueOf((String) parameters.get("expireIn"))); } configuration.setRefreshToken((String) parameters.get("refreshToken")); configuration.setApiKey((String) parameters.get("apiKey")); configuration.setApiSecret((String) parameters.get("apiSecret")); configuration.setLocalPath((String) parameters.get("localPath")); configuration.setRemotePath( parameters.get("remotePath") != null ? ((String) parameters.get("remotePath")).replaceAll("\\s", "+") : null); configuration.setNewRemotePath((String) parameters.get("newRemotePath")); configuration.setQuery((String) parameters.get("query")); configuration.setOperation(DropboxOperation.valueOf(remaining)); configuration.setClientIdentifier( parameters.get("clientIdentifier") == null ? DropboxPropertyManager.getInstance().getProperty("clientIdentifier") : (String) parameters.get("clientIdentifier")); if (parameters.get("uploadMode") != null) { configuration.setUploadMode(DropboxUploadMode.valueOf((String) parameters.get("uploadMode"))); } DropboxEndpoint endpoint = new DropboxEndpoint(uri, this, configuration); setProperties(endpoint, parameters); //pass validation test DropboxConfigurationValidator.validateCommonProperties(configuration); return endpoint; } }
DropboxComponent
java
quarkusio__quarkus
independent-projects/qute/core/src/main/java/io/quarkus/qute/SetSectionHelper.java
{ "start": 4309, "end": 6955 }
class ____ implements SectionHelperFactory<SetSectionHelper> { public static final String HINT_PREFIX = "<set#"; @Override public List<String> getDefaultAliases() { return ImmutableList.of(SET, LET); } @Override public ParametersInfo getParameters() { return ParametersInfo.EMPTY; } @Override public MissingEndTagStrategy missingEndTagStrategy() { return MissingEndTagStrategy.BIND_TO_PARENT; } @Override public SetSectionHelper initialize(SectionInitContext context) { Map<String, Expression> params = new HashMap<>(); Map<String, Expression> keys = null; for (Entry<String, String> e : context.getParameters().entrySet()) { String key = e.getKey(); if (key.endsWith("?")) { // foo? -> foo key = key.substring(0, key.length() - 1); if (keys == null) { keys = new HashMap<>(); } keys.put(key, context.parseValue(key + "??")); } params.put(key, context.getExpression(key)); } return new SetSectionHelper(params, keys); } @Override public Scope initializeBlock(Scope previousScope, BlockInfo block) { if (block.isMainBlock()) { Scope newScope = new Scope(previousScope); for (Entry<String, String> e : block.getParameters().entrySet()) { String key = e.getKey(); boolean isDefaultValue = key.endsWith("?"); if (isDefaultValue) { // foo? -> foo key = key.substring(0, key.length() - 1); } Expression expr = block.addExpression(key, e.getValue()); if (expr.hasTypeInfo()) { // Do not override the binding for a default value boolean add = !isDefaultValue || previousScope.getBinding(key) == null; if (add) { // item.name becomes item<set#1>.name newScope.putBinding(key, key + HINT_PREFIX + expr.getGeneratedId() + ">"); } } else { newScope.putBinding(key, null); } } return newScope; } else { return previousScope; } } } }
Factory
java
apache__hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeStatusUpdaterImpl.java
{ "start": 5556, "end": 36712 }
class ____ extends AbstractService implements NodeStatusUpdater { public static final String YARN_NODEMANAGER_DURATION_TO_TRACK_STOPPED_CONTAINERS = YarnConfiguration.NM_PREFIX + "duration-to-track-stopped-containers"; private static final Logger LOG = LoggerFactory.getLogger(NodeStatusUpdaterImpl.class); private final Object heartbeatMonitor = new Object(); private final Object shutdownMonitor = new Object(); private final Context context; private final Dispatcher dispatcher; private NodeId nodeId; private long nextHeartBeatInterval; private ResourceTracker resourceTracker; private Resource totalResource; private Resource physicalResource; private int httpPort; private String nodeManagerVersionId; private String minimumResourceManagerVersion; private volatile boolean isStopped; private boolean tokenKeepAliveEnabled; private long tokenRemovalDelayMs; /** Keeps track of when the next keep alive request should be sent for an app*/ private Map<ApplicationId, Long> appTokenKeepAliveMap = new HashMap<ApplicationId, Long>(); private Random keepAliveDelayRandom = new Random(); // It will be used to track recently stopped containers on node manager, this // is to avoid the misleading no-such-container exception messages on NM, when // the AM finishes it informs the RM to stop the may-be-already-completed // containers. private final Map<ContainerId, Long> recentlyStoppedContainers; // Save the reported completed containers in case of lost heartbeat responses. // These completed containers will be sent again till a successful response. private final Map<ContainerId, ContainerStatus> pendingCompletedContainers; // Duration for which to track recently stopped container. private long durationToTrackStoppedContainers; private boolean logAggregationEnabled; private final List<LogAggregationReport> logAggregationReportForAppsTempList; private final NodeHealthCheckerService healthChecker; private final NodeManagerMetrics metrics; private Runnable statusUpdaterRunnable; private Thread statusUpdater; private boolean failedToConnect = false; private long rmIdentifier = ResourceManagerConstants.RM_INVALID_IDENTIFIER; private boolean registeredWithRM = false; Set<ContainerId> pendingContainersToRemove = new HashSet<ContainerId>(); private NMNodeLabelsHandler nodeLabelsHandler; private NMNodeAttributesHandler nodeAttributesHandler; private NodeLabelsProvider nodeLabelsProvider; private NodeAttributesProvider nodeAttributesProvider; private long tokenSequenceNo; private boolean timelineServiceV2Enabled; public NodeStatusUpdaterImpl(Context context, Dispatcher dispatcher, NodeHealthCheckerService healthChecker, NodeManagerMetrics metrics) { super(NodeStatusUpdaterImpl.class.getName()); this.healthChecker = healthChecker; this.context = context; this.dispatcher = dispatcher; this.metrics = metrics; this.recentlyStoppedContainers = new LinkedHashMap<ContainerId, Long>(); this.pendingCompletedContainers = new HashMap<ContainerId, ContainerStatus>(); this.logAggregationReportForAppsTempList = new ArrayList<LogAggregationReport>(); } @Override public void setNodeAttributesProvider(NodeAttributesProvider provider) { this.nodeAttributesProvider = provider; } @Override public void setNodeLabelsProvider(NodeLabelsProvider provider) { this.nodeLabelsProvider = provider; } @Override protected void serviceInit(Configuration conf) throws Exception { this.totalResource = NodeManagerHardwareUtils.getNodeResources(conf); long memoryMb = totalResource.getMemorySize(); float vMemToPMem = conf.getFloat( YarnConfiguration.NM_VMEM_PMEM_RATIO, YarnConfiguration.DEFAULT_NM_VMEM_PMEM_RATIO); long virtualMemoryMb = (long)Math.ceil(memoryMb * vMemToPMem); int virtualCores = totalResource.getVirtualCores(); // Update configured resources via plugins. updateConfiguredResourcesViaPlugins(totalResource); LOG.info("Nodemanager resources is set to: {}.", totalResource); metrics.addResource(totalResource); // Get actual node physical resources long physicalMemoryMb = memoryMb; int physicalCores = virtualCores; ResourceCalculatorPlugin rcp = ResourceCalculatorPlugin.getNodeResourceMonitorPlugin(conf); if (rcp != null) { physicalMemoryMb = rcp.getPhysicalMemorySize() / (1024 * 1024); physicalCores = rcp.getNumProcessors(); } this.physicalResource = Resource.newInstance(physicalMemoryMb, physicalCores); this.tokenKeepAliveEnabled = isTokenKeepAliveEnabled(conf); this.tokenRemovalDelayMs = conf.getInt(YarnConfiguration.RM_NM_EXPIRY_INTERVAL_MS, YarnConfiguration.DEFAULT_RM_NM_EXPIRY_INTERVAL_MS); this.minimumResourceManagerVersion = conf.get( YarnConfiguration.NM_RESOURCEMANAGER_MINIMUM_VERSION, YarnConfiguration.DEFAULT_NM_RESOURCEMANAGER_MINIMUM_VERSION); nodeLabelsHandler = createNMNodeLabelsHandler(nodeLabelsProvider); nodeAttributesHandler = createNMNodeAttributesHandler(nodeAttributesProvider); // Default duration to track stopped containers on nodemanager is 10Min. // This should not be assigned very large value as it will remember all the // containers stopped during that time. durationToTrackStoppedContainers = conf.getLong(YARN_NODEMANAGER_DURATION_TO_TRACK_STOPPED_CONTAINERS, 600000); if (durationToTrackStoppedContainers < 0) { String message = "Invalid configuration for " + YARN_NODEMANAGER_DURATION_TO_TRACK_STOPPED_CONTAINERS + " default " + "value is 10Min(600000)."; LOG.error(message); throw new YarnException(message); } LOG.debug("{} :{}", YARN_NODEMANAGER_DURATION_TO_TRACK_STOPPED_CONTAINERS, durationToTrackStoppedContainers); super.serviceInit(conf); LOG.info("Initialized nodemanager with : physical-memory={} virtual-memory={} " + "virtual-cores={}.", memoryMb, virtualMemoryMb, virtualCores); this.logAggregationEnabled = conf.getBoolean(YarnConfiguration.LOG_AGGREGATION_ENABLED, YarnConfiguration.DEFAULT_LOG_AGGREGATION_ENABLED); this.timelineServiceV2Enabled = YarnConfiguration. timelineServiceV2Enabled(conf); } @Override protected void serviceStart() throws Exception { // NodeManager is the last service to start, so NodeId is available. this.nodeId = this.context.getNodeId(); LOG.info("Node ID assigned is : {}.", this.nodeId); this.httpPort = this.context.getHttpPort(); this.nodeManagerVersionId = YarnVersionInfo.getVersion(); try { // Registration has to be in start so that ContainerManager can get the // perNM tokens needed to authenticate ContainerTokens. this.resourceTracker = getRMClient(); registerWithRM(); super.serviceStart(); startStatusUpdater(); } catch (Exception e) { String errorMessage = "Unexpected error starting NodeStatusUpdater"; LOG.error(errorMessage, e); throw new YarnRuntimeException(e); } } @Override protected void serviceStop() throws Exception { // the isStopped check is for avoiding multiple unregistrations. synchronized(shutdownMonitor) { if (this.registeredWithRM && !this.isStopped && !isNMUnderSupervisionWithRecoveryEnabled() && !context.getDecommissioned() && !failedToConnect) { unRegisterNM(); } // Interrupt the updater. this.isStopped = true; stopRMProxy(); super.serviceStop(); } } private boolean isNMUnderSupervisionWithRecoveryEnabled() { Configuration config = getConfig(); return config.getBoolean(YarnConfiguration.NM_RECOVERY_ENABLED, YarnConfiguration.DEFAULT_NM_RECOVERY_ENABLED) && config.getBoolean(YarnConfiguration.NM_RECOVERY_SUPERVISED, YarnConfiguration.DEFAULT_NM_RECOVERY_SUPERVISED); } private void unRegisterNM() { RecordFactory recordFactory = RecordFactoryPBImpl.get(); UnRegisterNodeManagerRequest request = recordFactory .newRecordInstance(UnRegisterNodeManagerRequest.class); request.setNodeId(this.nodeId); try { resourceTracker.unRegisterNodeManager(request); LOG.info("Successfully Unregistered the Node {} with ResourceManager.", this.nodeId); } catch (Exception e) { LOG.warn("Unregistration of the Node {} failed.", this.nodeId, e); } } protected void rebootNodeStatusUpdaterAndRegisterWithRM() { // Interrupt the updater. synchronized(shutdownMonitor) { if(this.isStopped) { LOG.info("Currently being shutdown. Aborting reboot"); return; } this.isStopped = true; sendOutofBandHeartBeat(); try { statusUpdater.join(); registerWithRM(); statusUpdater = new SubjectInheritingThread(statusUpdaterRunnable, "Node Status Updater"); this.isStopped = false; statusUpdater.start(); LOG.info("NodeStatusUpdater thread is reRegistered and restarted"); } catch (Exception e) { String errorMessage = "Unexpected error rebooting NodeStatusUpdater"; LOG.error(errorMessage, e); throw new YarnRuntimeException(e); } } } @VisibleForTesting protected void stopRMProxy() { if(this.resourceTracker != null) { RPC.stopProxy(this.resourceTracker); } } @Private protected boolean isTokenKeepAliveEnabled(Configuration conf) { return conf.getBoolean(YarnConfiguration.LOG_AGGREGATION_ENABLED, YarnConfiguration.DEFAULT_LOG_AGGREGATION_ENABLED) && UserGroupInformation.isSecurityEnabled(); } @VisibleForTesting protected ResourceTracker getRMClient() throws IOException { Configuration conf = getConfig(); return ServerRMProxy.createRMProxy(conf, ResourceTracker.class); } private void updateConfiguredResourcesViaPlugins( Resource configuredResource) throws YarnException { ResourcePluginManager pluginManager = context.getResourcePluginManager(); if (pluginManager != null && pluginManager.getNameToPlugins() != null) { // Update configured resource for (ResourcePlugin resourcePlugin : pluginManager.getNameToPlugins() .values()) { if (resourcePlugin.getNodeResourceHandlerInstance() != null) { resourcePlugin.getNodeResourceHandlerInstance() .updateConfiguredResource(configuredResource); } } } } @VisibleForTesting protected void registerWithRM() throws YarnException, IOException { RegisterNodeManagerResponse regNMResponse; Set<NodeLabel> nodeLabels = nodeLabelsHandler.getNodeLabelsForRegistration(); Set<NodeAttribute> nodeAttributes = nodeAttributesHandler.getNodeAttributesForRegistration(); // Synchronize NM-RM registration with // ContainerManagerImpl#increaseContainersResource and // ContainerManagerImpl#startContainers to avoid race condition // during RM recovery synchronized (this.context) { List<NMContainerStatus> containerReports = getNMContainerStatuses(); NodeStatus nodeStatus = getNodeStatus(0); RegisterNodeManagerRequest request = RegisterNodeManagerRequest.newInstance(nodeId, httpPort, totalResource, nodeManagerVersionId, containerReports, getRunningApplications(), nodeLabels, physicalResource, nodeAttributes, nodeStatus); if (containerReports != null && !containerReports.isEmpty()) { LOG.info("Registering with RM using containers.size : {}.", containerReports.size()); } if (logAggregationEnabled) { // pull log aggregation status for application running in this NM List<LogAggregationReport> logAggregationReports = context.getNMLogAggregationStatusTracker() .pullCachedLogAggregationReports(); if (logAggregationReports != null && !logAggregationReports.isEmpty()) { LOG.debug("The cache log aggregation status size:{}", logAggregationReports.size()); request.setLogAggregationReportsForApps(logAggregationReports); } } regNMResponse = resourceTracker.registerNodeManager(request); // Make sure rmIdentifier is set before we release the lock this.rmIdentifier = regNMResponse.getRMIdentifier(); } // if the Resource Manager instructs NM to shutdown. if (NodeAction.SHUTDOWN.equals(regNMResponse.getNodeAction())) { String message = "Message from ResourceManager: " + regNMResponse.getDiagnosticsMessage(); throw new YarnRuntimeException( "Received SHUTDOWN signal from Resourcemanager, Registration of NodeManager failed, " + message); } // if ResourceManager version is too old then shutdown if (!minimumResourceManagerVersion.equals("NONE")){ if (minimumResourceManagerVersion.equals("EqualToNM")){ minimumResourceManagerVersion = nodeManagerVersionId; } String rmVersion = regNMResponse.getRMVersion(); if (rmVersion == null) { String message = "The Resource Manager's did not return a version. " + "Valid version cannot be checked."; throw new YarnRuntimeException("Shutting down the Node Manager. " + message); } if (VersionUtil.compareVersions(rmVersion,minimumResourceManagerVersion) < 0) { String message = "The Resource Manager's version (" + rmVersion +") is less than the minimum " + "allowed version " + minimumResourceManagerVersion; throw new YarnRuntimeException("Shutting down the Node Manager on RM " + "version error, " + message); } } this.registeredWithRM = true; MasterKey masterKey = regNMResponse.getContainerTokenMasterKey(); // do this now so that its set before we start heartbeating to RM // It is expected that status updater is started by this point and // RM gives the shared secret in registration during // StatusUpdater#start(). if (masterKey != null) { this.context.getContainerTokenSecretManager().setMasterKey(masterKey); } masterKey = regNMResponse.getNMTokenMasterKey(); if (masterKey != null) { this.context.getNMTokenSecretManager().setMasterKey(masterKey); } StringBuilder successfullRegistrationMsg = new StringBuilder(); successfullRegistrationMsg.append("Registered with ResourceManager as ") .append(this.nodeId); Resource newResource = regNMResponse.getResource(); if (newResource != null) { updateNMResource(newResource); successfullRegistrationMsg.append(" with updated total resource of ") .append(this.totalResource); } else { successfullRegistrationMsg.append(" with total resource of ") .append(this.totalResource); } successfullRegistrationMsg.append(nodeLabelsHandler .verifyRMRegistrationResponseForNodeLabels(regNMResponse)); successfullRegistrationMsg.append(nodeAttributesHandler .verifyRMRegistrationResponseForNodeAttributes(regNMResponse)); LOG.info(successfullRegistrationMsg.toString()); } private List<ApplicationId> createKeepAliveApplicationList() { if (!tokenKeepAliveEnabled) { return Collections.emptyList(); } List<ApplicationId> appList = new ArrayList<ApplicationId>(); for (Iterator<Entry<ApplicationId, Long>> i = this.appTokenKeepAliveMap.entrySet().iterator(); i.hasNext();) { Entry<ApplicationId, Long> e = i.next(); ApplicationId appId = e.getKey(); Long nextKeepAlive = e.getValue(); if (!this.context.getApplications().containsKey(appId)) { // Remove if the application has finished. i.remove(); } else if (System.currentTimeMillis() > nextKeepAlive) { // KeepAlive list for the next hearbeat. appList.add(appId); trackAppForKeepAlive(appId); } } return appList; } @VisibleForTesting protected NodeStatus getNodeStatus(int responseId) throws IOException { NodeHealthStatus nodeHealthStatus = this.context.getNodeHealthStatus(); nodeHealthStatus.setHealthReport(healthChecker.getHealthReport()); nodeHealthStatus.setIsNodeHealthy(healthChecker.isHealthy()); nodeHealthStatus.setLastHealthReportTime(healthChecker .getLastHealthReportTime()); LOG.debug("Node's health-status : {}, {}", nodeHealthStatus.getIsNodeHealthy(), nodeHealthStatus.getHealthReport()); List<ContainerStatus> containersStatuses = getContainerStatuses(); ResourceUtilization containersUtilization = getContainersUtilization(); ResourceUtilization nodeUtilization = getNodeUtilization(); List<org.apache.hadoop.yarn.api.records.Container> increasedContainers = getIncreasedContainers(); NodeStatus nodeStatus = NodeStatus.newInstance(nodeId, responseId, containersStatuses, createKeepAliveApplicationList(), nodeHealthStatus, containersUtilization, nodeUtilization, increasedContainers); nodeStatus.setOpportunisticContainersStatus( getOpportunisticContainersStatus()); return nodeStatus; } /** * Get the status of the OPPORTUNISTIC containers. * @return the status of the OPPORTUNISTIC containers. */ private OpportunisticContainersStatus getOpportunisticContainersStatus() { OpportunisticContainersStatus status = this.context.getContainerManager().getOpportunisticContainersStatus(); return status; } /** * Get the aggregated utilization of the containers in this node. * @return Resource utilization of all the containers. */ private ResourceUtilization getContainersUtilization() { ContainersMonitor containersMonitor = this.context.getContainerManager().getContainersMonitor(); return containersMonitor.getContainersUtilization(); } /** * Get the utilization of the node. This includes the containers. * @return Resource utilization of the node. */ private ResourceUtilization getNodeUtilization() { NodeResourceMonitorImpl nodeResourceMonitor = (NodeResourceMonitorImpl) this.context.getNodeResourceMonitor(); return nodeResourceMonitor.getUtilization(); } /* Get the containers whose resource has been increased since last * NM-RM heartbeat. */ private List<org.apache.hadoop.yarn.api.records.Container> getIncreasedContainers() { List<org.apache.hadoop.yarn.api.records.Container> increasedContainers = new ArrayList<>( this.context.getIncreasedContainers().values()); for (org.apache.hadoop.yarn.api.records.Container container : increasedContainers) { this.context.getIncreasedContainers().remove(container.getId()); } return increasedContainers; } // Update NM's Resource. private void updateNMResource(Resource resource) { metrics.addResource(Resources.subtract(resource, totalResource)); this.totalResource = resource; // Update the containers monitor ContainersMonitor containersMonitor = this.context.getContainerManager().getContainersMonitor(); containersMonitor.setAllocatedResourcesForContainers(totalResource); } // Iterate through the NMContext and clone and get all the containers' // statuses. If it's a completed container, add into the // recentlyStoppedContainers collections. @VisibleForTesting protected List<ContainerStatus> getContainerStatuses() throws IOException { List<ContainerStatus> containerStatuses = new ArrayList<ContainerStatus>(); for (Container container : this.context.getContainers().values()) { ContainerId containerId = container.getContainerId(); ApplicationId applicationId = containerId.getApplicationAttemptId() .getApplicationId(); org.apache.hadoop.yarn.api.records.ContainerStatus containerStatus = container.cloneAndGetContainerStatus(); if (containerStatus.getState() == ContainerState.COMPLETE) { if (isApplicationStopped(applicationId)) { LOG.debug("{} is completing, remove {} from NM context.", applicationId, containerId); context.getContainers().remove(containerId); pendingCompletedContainers.put(containerId, containerStatus); } else { if (!isContainerRecentlyStopped(containerId)) { pendingCompletedContainers.put(containerId, containerStatus); } } // Adding to finished containers cache. Cache will keep it around at // least for #durationToTrackStoppedContainers duration. In the // subsequent call to stop container it will get removed from cache. addCompletedContainer(containerId); } else { containerStatuses.add(containerStatus); } } containerStatuses.addAll(pendingCompletedContainers.values()); if (!containerStatuses.isEmpty()) { LOG.debug("Sending out {} container statuses: {}", containerStatuses.size(), containerStatuses); } return containerStatuses; } private List<ApplicationId> getRunningApplications() { List<ApplicationId> runningApplications = new ArrayList<ApplicationId>(); for (Entry<ApplicationId, Application> appEntry : this.context .getApplications().entrySet()) { if (ApplicationState.FINISHED != appEntry.getValue() .getApplicationState()) { runningApplications.add(appEntry.getKey()); } } LOG.info("Running Applications Size : {}.", runningApplications.size()); return runningApplications; } // These NMContainerStatus are sent on NM registration and used by YARN only. private List<NMContainerStatus> getNMContainerStatuses() throws IOException { List<NMContainerStatus> containerStatuses = new ArrayList<NMContainerStatus>(); for (Container container : this.context.getContainers().values()) { ContainerId containerId = container.getContainerId(); ApplicationId applicationId = containerId.getApplicationAttemptId() .getApplicationId(); if (!this.context.getApplications().containsKey(applicationId)) { context.getContainers().remove(containerId); continue; } NMContainerStatus status = container.getNMContainerStatus(); containerStatuses.add(status); if (status.getContainerState() == ContainerState.COMPLETE) { // Adding to finished containers cache. Cache will keep it around at // least for #durationToTrackStoppedContainers duration. In the // subsequent call to stop container it will get removed from cache. addCompletedContainer(containerId); } } if (!containerStatuses.isEmpty()) { LOG.info("Sending out {} container NM container statuses: {}.", containerStatuses.size(), containerStatuses); } return containerStatuses; } private boolean isApplicationStopped(ApplicationId applicationId) { if (!this.context.getApplications().containsKey(applicationId)) { return true; } ApplicationState applicationState = this.context.getApplications().get( applicationId).getApplicationState(); if (applicationState == ApplicationState.FINISHING_CONTAINERS_WAIT || applicationState == ApplicationState.APPLICATION_RESOURCES_CLEANINGUP || applicationState == ApplicationState.FINISHED) { return true; } else { return false; } } @Override public void addCompletedContainer(ContainerId containerId) { synchronized (recentlyStoppedContainers) { removeVeryOldStoppedContainersFromCache(); if (!recentlyStoppedContainers.containsKey(containerId)) { recentlyStoppedContainers.put(containerId, System.currentTimeMillis() + durationToTrackStoppedContainers); } } } @VisibleForTesting @Private public void removeOrTrackCompletedContainersFromContext( List<ContainerId> containerIds) { Set<ContainerId> removedContainers = new HashSet<ContainerId>(); pendingContainersToRemove.addAll(containerIds); Iterator<ContainerId> iter = pendingContainersToRemove.iterator(); while (iter.hasNext()) { ContainerId containerId = iter.next(); // remove the container only if the container is at DONE state Container nmContainer = context.getContainers().get(containerId); if (nmContainer == null) { iter.remove(); } else if (nmContainer.getContainerState().equals( org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerState.DONE)) { context.getContainers().remove(containerId); removedContainers.add(containerId); iter.remove(); } pendingCompletedContainers.remove(containerId); } if (!removedContainers.isEmpty()) { LOG.info("Removed completed containers from NM context: {}.", removedContainers); } } private void trackAppsForKeepAlive(List<ApplicationId> appIds) { if (tokenKeepAliveEnabled && appIds != null && appIds.size() > 0) { for (ApplicationId appId : appIds) { trackAppForKeepAlive(appId); } } } private void trackAppForKeepAlive(ApplicationId appId) { // Next keepAlive request for app between 0.7 & 0.9 of when the token will // likely expire. long nextTime = System.currentTimeMillis() + (long) (0.7 * tokenRemovalDelayMs + (0.2 * tokenRemovalDelayMs * keepAliveDelayRandom.nextInt(100))/100); appTokenKeepAliveMap.put(appId, nextTime); } @Override public void sendOutofBandHeartBeat() { synchronized (this.heartbeatMonitor) { this.heartbeatMonitor.notify(); } } @VisibleForTesting Thread.State getStatusUpdaterThreadState() { return statusUpdater.getState(); } public boolean isContainerRecentlyStopped(ContainerId containerId) { synchronized (recentlyStoppedContainers) { return recentlyStoppedContainers.containsKey(containerId); } } @Override public void clearFinishedContainersFromCache() { synchronized (recentlyStoppedContainers) { recentlyStoppedContainers.clear(); } } @Private @VisibleForTesting public void removeVeryOldStoppedContainersFromCache() { synchronized (recentlyStoppedContainers) { long currentTime = System.currentTimeMillis(); Iterator<Entry<ContainerId, Long>> i = recentlyStoppedContainers.entrySet().iterator(); while (i.hasNext()) { Entry<ContainerId, Long> mapEntry = i.next(); ContainerId cid = mapEntry.getKey(); if (mapEntry.getValue() >= currentTime) { break; } if (!context.getContainers().containsKey(cid)) { ApplicationId appId = cid.getApplicationAttemptId().getApplicationId(); if (isApplicationStopped(appId)) { i.remove(); try { context.getNMStateStore().removeContainer(cid); } catch (IOException e) { LOG.error("Unable to remove container {} in store.", cid, e); } } } } } } @Override public long getRMIdentifier() { return this.rmIdentifier; } private static Map<ApplicationId, Credentials> parseCredentials( Map<ApplicationId, ByteBuffer> systemCredentials) throws IOException { Map<ApplicationId, Credentials> map = new HashMap<ApplicationId, Credentials>(); for (Map.Entry<ApplicationId, ByteBuffer> entry : systemCredentials.entrySet()) { Credentials credentials = new Credentials(); DataInputByteBuffer buf = new DataInputByteBuffer(); ByteBuffer buffer = entry.getValue(); buffer.rewind(); buf.reset(buffer); credentials.readTokenStorageStream(buf); map.put(entry.getKey(), credentials); } if (LOG.isDebugEnabled()) { for (Map.Entry<ApplicationId, Credentials> entry : map.entrySet()) { LOG.debug("Retrieved credentials from RM for {}: {}", entry.getKey(), entry.getValue().getAllTokens()); } } return map; } protected void startStatusUpdater() { statusUpdaterRunnable = new StatusUpdaterRunnable(); statusUpdater = new SubjectInheritingThread(statusUpdaterRunnable, "Node Status Updater"); statusUpdater.start(); } private boolean handleShutdownOrResyncCommand( NodeHeartbeatResponse response) { if (response.getNodeAction() == NodeAction.SHUTDOWN) { LOG.warn("Received SHUTDOWN signal from Resourcemanager as part of" + " heartbeat, hence shutting down."); LOG.warn("Message from ResourceManager: {}.", response.getDiagnosticsMessage()); context.setDecommissioned(true); dispatcher.getEventHandler().handle( new NodeManagerEvent(NodeManagerEventType.SHUTDOWN)); return true; } if (response.getNodeAction() == NodeAction.RESYNC) { LOG.warn("Node is out of sync with ResourceManager, hence resyncing."); LOG.warn("Message from ResourceManager: {}.", response.getDiagnosticsMessage()); // Invalidate the RMIdentifier while resync NodeStatusUpdaterImpl.this.rmIdentifier = ResourceManagerConstants.RM_INVALID_IDENTIFIER; dispatcher.getEventHandler().handle( new NodeManagerEvent(NodeManagerEventType.RESYNC)); pendingCompletedContainers.clear(); return true; } return false; } @Override public void reportException(Exception ex) { healthChecker.reportException(ex); sendOutofBandHeartBeat(); } private List<LogAggregationReport> getLogAggregationReportsForApps( ConcurrentLinkedQueue<LogAggregationReport> lastestLogAggregationStatus) { LogAggregationReport status; while ((status = lastestLogAggregationStatus.poll()) != null) { this.logAggregationReportForAppsTempList.add(status); } List<LogAggregationReport> reports = new ArrayList<LogAggregationReport>(); reports.addAll(logAggregationReportForAppsTempList); return reports; } private NMNodeLabelsHandler createNMNodeLabelsHandler( NodeLabelsProvider nodeLabelsProvider) { if (nodeLabelsProvider == null) { return new NMCentralizedNodeLabelsHandler(); } else { return new NMDistributedNodeLabelsHandler(nodeLabelsProvider, this.getConfig()); } } /** * Returns a handler based on the configured node attributes provider. * returns null if no provider is configured. * @param provider * @return attributes handler */ private NMNodeAttributesHandler createNMNodeAttributesHandler( NodeAttributesProvider provider) { if (provider == null) { return new NMCentralizedNodeAttributesHandler(); } else { return new NMDistributedNodeAttributesHandler(provider, this.getConfig()); } } private static abstract
NodeStatusUpdaterImpl
java
quarkusio__quarkus
extensions/vertx-http/runtime/src/main/java/io/quarkus/vertx/http/runtime/ExtendedQuarkusVertxHttpMetrics.java
{ "start": 101, "end": 479 }
interface ____ the metrics exposed for the Vert.x HTTP server. * <p> * The Vert.x HTTP metrics are managed by Vert.x, and are exposed by the Vert.x metrics SPI. * However, some of the metrics are not exposed by the SPI, and are only available through the Vert.x HTTP SPI. * <p> * Thus, we need to extend the Vert.x HTTP metrics SPI to expose these metrics. */ public
extending
java
apache__logging-log4j2
log4j-jakarta-web/src/test/java/org/apache/logging/log4j/web/PropertyTest.java
{ "start": 1164, "end": 1593 }
class ____ { @Test void testShutdownHookDisabled() { assertFalse( ((Log4jContextFactory) LogManager.getFactory()).isShutdownHookEnabled(), "Shutdown hook should be disabled by default in web applications"); } @Test void testIsWebApp() { assertTrue(Constants.IS_WEB_APP, "When servlet classes are available IS_WEB_APP should default to true"); } }
PropertyTest
java
google__dagger
javatests/artifacts/dagger-ksp/java-app/src/main/java/app/AssistedInjectClasses.java
{ "start": 1462, "end": 1689 }
class ____<T1, T2> { T1 t1; T2 assistedT2; @AssistedInject ParameterizedFoo(T1 t1, @Assisted T2 assistedT2) { this.t1 = t1; this.assistedT2 = assistedT2; } } @AssistedFactory
ParameterizedFoo
java
ReactiveX__RxJava
src/test/java/io/reactivex/rxjava3/internal/subscribers/BasicFuseableConditionalSubscriberTest.java
{ "start": 1140, "end": 7486 }
class ____ extends RxJavaTest { @Test public void offerThrows() { ConditionalSubscriber<Integer> cs = new ConditionalSubscriber<Integer>() { @Override public void onSubscribe(Subscription s) { } @Override public void onNext(Integer t) { } @Override public void onError(Throwable t) { } @Override public void onComplete() { } @Override public boolean tryOnNext(Integer t) { return false; } }; BasicFuseableConditionalSubscriber<Integer, Integer> fcs = new BasicFuseableConditionalSubscriber<Integer, Integer>(cs) { @Override public boolean tryOnNext(Integer t) { return false; } @Override public void onNext(Integer t) { } @Override public int requestFusion(int mode) { return 0; } @Nullable @Override public Integer poll() throws Exception { return null; } }; fcs.onSubscribe(new ScalarSubscription<>(fcs, 1)); TestHelper.assertNoOffer(fcs); assertFalse(fcs.isEmpty()); fcs.clear(); assertTrue(fcs.isEmpty()); } @Test public void implementationStopsOnSubscribe() { @SuppressWarnings("unchecked") ConditionalSubscriber<Integer> ts = mock(ConditionalSubscriber.class); BasicFuseableConditionalSubscriber<Integer, Integer> bfs = new BasicFuseableConditionalSubscriber<Integer, Integer>(ts) { @Override protected boolean beforeDownstream() { return false; } @Override public void onNext(@NonNull Integer t) { ts.onNext(t); } @Override public int requestFusion(int mode) { // TODO Auto-generated method stub return 0; } @Override public boolean tryOnNext(@NonNull Integer t) { // TODO Auto-generated method stub return false; } @Override public @Nullable Integer poll() throws Throwable { return null; } }; bfs.onSubscribe(new BooleanSubscription()); verify(ts, never()).onSubscribe(any()); } @Test public void doubleOnSubscribe() { TestHelper.checkDoubleOnSubscribeFlowable(f -> f .map(v -> v) .filter(v -> true) ); } @Test public void transitiveBoundaryFusionNone() { @SuppressWarnings("unchecked") ConditionalSubscriber<Integer> ts = mock(ConditionalSubscriber.class); BasicFuseableConditionalSubscriber<Integer, Integer> bfs = new BasicFuseableConditionalSubscriber<Integer, Integer>(ts) { @Override protected boolean beforeDownstream() { return false; } @Override public void onNext(@NonNull Integer t) { ts.onNext(t); } @Override public int requestFusion(int mode) { // TODO Auto-generated method stub return 0; } @Override public boolean tryOnNext(@NonNull Integer t) { // TODO Auto-generated method stub return false; } @Override public @Nullable Integer poll() throws Throwable { return null; } }; bfs.onSubscribe(new BooleanSubscription()); assertEquals(QueueFuseable.NONE, bfs.transitiveBoundaryFusion(QueueFuseable.ANY)); } @Test public void transitiveBoundaryFusionAsync() { @SuppressWarnings("unchecked") ConditionalSubscriber<Integer> ts = mock(ConditionalSubscriber.class); BasicFuseableConditionalSubscriber<Integer, Integer> bfs = new BasicFuseableConditionalSubscriber<Integer, Integer>(ts) { @Override protected boolean beforeDownstream() { return false; } @Override public void onNext(@NonNull Integer t) { ts.onNext(t); } @Override public int requestFusion(int mode) { // TODO Auto-generated method stub return 0; } @Override public boolean tryOnNext(@NonNull Integer t) { // TODO Auto-generated method stub return false; } @Override public @Nullable Integer poll() throws Throwable { return null; } }; bfs.onSubscribe(EmptySubscription.INSTANCE); assertEquals(QueueFuseable.ASYNC, bfs.transitiveBoundaryFusion(QueueFuseable.ANY)); } @Test public void transitiveBoundaryFusionAsyncBoundary() { @SuppressWarnings("unchecked") ConditionalSubscriber<Integer> ts = mock(ConditionalSubscriber.class); BasicFuseableConditionalSubscriber<Integer, Integer> bfs = new BasicFuseableConditionalSubscriber<Integer, Integer>(ts) { @Override protected boolean beforeDownstream() { return false; } @Override public void onNext(@NonNull Integer t) { ts.onNext(t); } @Override public int requestFusion(int mode) { // TODO Auto-generated method stub return 0; } @Override public boolean tryOnNext(@NonNull Integer t) { // TODO Auto-generated method stub return false; } @Override public @Nullable Integer poll() throws Throwable { return null; } }; bfs.onSubscribe(EmptySubscription.INSTANCE); assertEquals(QueueFuseable.NONE, bfs.transitiveBoundaryFusion(QueueFuseable.ANY | QueueFuseable.BOUNDARY)); } }
BasicFuseableConditionalSubscriberTest
java
apache__hadoop
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ZKUtil.java
{ "start": 6516, "end": 6774 }
class ____ extends HadoopIllegalArgumentException { private static final long serialVersionUID = 1L; public BadAclFormatException(String message) { super(message); } } @InterfaceAudience.Private public static
BadAclFormatException
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/plugins/DiscoveryPlugin.java
{ "start": 1005, "end": 1110 }
interface ____ implement the {@link #getCustomNameResolver(Settings)} method: * * <pre>{@code * public
and
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/entitygraph/FindGraphCollectionOrderByTest.java
{ "start": 1504, "end": 3799 }
class ____ { @Test public void testLoadGraphFind(SessionFactoryScope scope) { executeTest( scope, AvailableHints.HINT_SPEC_LOAD_GRAPH, true ); } @Test public void testLoadGraphQuery(SessionFactoryScope scope) { executeTest( scope, AvailableHints.HINT_SPEC_LOAD_GRAPH, false ); } @Test public void testFetchGraphFind(SessionFactoryScope scope) { executeTest( scope, AvailableHints.HINT_SPEC_FETCH_GRAPH, true ); } @Test public void testFetchGraphQuery(SessionFactoryScope scope) { executeTest( scope, AvailableHints.HINT_SPEC_FETCH_GRAPH, false ); } private void executeTest(SessionFactoryScope scope, String hint, boolean find) { scope.inTransaction( session -> { final RootGraphImplementor<?> graph = session.getEntityGraph( "level1_loadAll" ); final Level1 root = find ? session.find( Level1.class, 1L, Map.of( hint, graph ) ) : session.createQuery( "from Level1 where id = :id", Level1.class ) .setParameter( "id", 1L ) .setHint( hint, graph ) .getSingleResult(); assertThat( root.getChildren() ).matches( Hibernate::isInitialized ).hasSize( 3 ); long i = 1; for ( final Level2 child : root.getChildren() ) { if ( i == 2 ) { assertThat( child.getChildren() ).matches( Hibernate::isInitialized ).hasSize( 1 ); } assertThat( child.getId() ).as( "Children not in expected order" ).isEqualTo( i++ ); } } ); } @BeforeAll public void setUp(SessionFactoryScope scope) { scope.inTransaction( session -> { final Level1 root = new Level1( 1L ); new Level2( root, 1L ); final Level2 child2 = new Level2( root, 2L ); new Level2( root, 3L ); new Level3( child2, 1L ); session.persist( root ); } ); } @AfterAll public void tearDown(SessionFactoryScope scope) { scope.getSessionFactory().getSchemaManager().truncateMappedObjects(); } @Entity( name = "Level1" ) @NamedEntityGraphs( { @NamedEntityGraph( name = "level1_loadAll", attributeNodes = { @NamedAttributeNode( value = "children", subgraph = "subgraph.children" ) }, subgraphs = { @NamedSubgraph( name = "subgraph.children", attributeNodes = { @NamedAttributeNode( value = "children" ) } ) } ) } ) static
FindGraphCollectionOrderByTest
java
spring-projects__spring-security
config/src/test/java/org/springframework/security/config/web/server/OAuth2ResourceServerSpecTests.java
{ "start": 37605, "end": 38315 }
class ____ { @Bean SecurityWebFilterChain springSecurity(ServerHttpSecurity http) { // @formatter:off http .authorizeExchange((authorize) -> authorize .pathMatchers("/authenticated").authenticated() .pathMatchers("/unobtainable").hasAuthority("unobtainable")) .oauth2ResourceServer((server) -> server .accessDeniedHandler(new HttpStatusServerAccessDeniedHandler(HttpStatus.BANDWIDTH_LIMIT_EXCEEDED)) .authenticationEntryPoint(new HttpStatusServerEntryPoint(HttpStatus.I_AM_A_TEAPOT)) .jwt((jwt) -> jwt.publicKey(publicKey()))); // @formatter:on return http.build(); } } @Configuration @EnableWebFlux @EnableWebFluxSecurity static
CustomErrorHandlingConfig
java
elastic__elasticsearch
x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToDouble.java
{ "start": 769, "end": 9301 }
class ____ { static final RoundTo.Build BUILD = (source, field, points) -> { double[] f = points.stream().mapToDouble(p -> ((Number) p).doubleValue()).toArray(); return switch (f.length) { // TODO should be a consistent way to do the 0 version - is CASE(MV_COUNT(f) == 1, f[0]) case 1 -> new RoundToDouble1Evaluator.Factory(source, field, f[0]); /* * These hand-unrolled implementations are even faster than the linear scan implementations. */ case 2 -> new RoundToDouble2Evaluator.Factory(source, field, f[0], f[1]); case 3 -> new RoundToDouble3Evaluator.Factory(source, field, f[0], f[1], f[2]); case 4 -> new RoundToDouble4Evaluator.Factory(source, field, f[0], f[1], f[2], f[3]); case 5 -> new RoundToDouble5Evaluator.Factory(source, field, f[0], f[1], f[2], f[3], f[4]); case 6 -> new RoundToDouble6Evaluator.Factory(source, field, f[0], f[1], f[2], f[3], f[4], f[5]); case 7 -> new RoundToDouble7Evaluator.Factory(source, field, f[0], f[1], f[2], f[3], f[4], f[5], f[6]); case 8 -> new RoundToDouble8Evaluator.Factory(source, field, f[0], f[1], f[2], f[3], f[4], f[5], f[6], f[7]); case 9 -> new RoundToDouble9Evaluator.Factory(source, field, f[0], f[1], f[2], f[3], f[4], f[5], f[6], f[7], f[8]); case 10 -> new RoundToDouble10Evaluator.Factory(source, field, f[0], f[1], f[2], f[3], f[4], f[5], f[6], f[7], f[8], f[9]); /* * Break point of 10 experimentally derived on Nik's laptop (13th Gen Intel(R) Core(TM) i7-1370P) * on 2025-05-22. */ default -> new RoundToDoubleBinarySearchEvaluator.Factory(source, field, f); }; }; @Evaluator(extraName = "BinarySearch") static double process(double field, @Fixed(includeInToString = false) double[] points) { int idx = Arrays.binarySearch(points, field); return points[idx >= 0 ? idx : Math.max(0, -idx - 2)]; } @Evaluator(extraName = "1") static double process(double field, @Fixed double p0) { return p0; } @Evaluator(extraName = "2") static double process(double field, @Fixed double p0, @Fixed double p1) { if (field < p1) { return p0; } return p1; } @Evaluator(extraName = "3") static double process(double field, @Fixed double p0, @Fixed double p1, @Fixed double p2) { if (field < p1) { return p0; } if (field < p2) { return p1; } return p2; } @Evaluator(extraName = "4") static double process(double field, @Fixed double p0, @Fixed double p1, @Fixed double p2, @Fixed double p3) { if (field < p1) { return p0; } if (field < p2) { return p1; } if (field < p3) { return p2; } return p3; } /* * Manual binary search for 5 rounding points, it is faster than linear search or array style binary search. */ @Evaluator(extraName = "5") static double process(double field, @Fixed double p0, @Fixed double p1, @Fixed double p2, @Fixed double p3, @Fixed double p4) { if (field < p2) { if (field < p1) { return p0; } return p1; } if (field < p3) { return p2; } if (field < p4) { return p3; } return p4; } /* * Manual binary search for 6 rounding points, it is faster than linear search or array style binary search. */ @Evaluator(extraName = "6") static double process( double field, // hack to keep the formatter happy. @Fixed double p0, // int is so short this should be on one line but double is not. @Fixed double p1, // That's not compatible with the templates. @Fixed double p2, // So we comment to make the formatter not try to change the line. @Fixed double p3, @Fixed double p4, @Fixed double p5 ) { if (field < p2) { if (field < p1) { return p0; } return p1; } if (field < p4) { if (field < p3) { return p2; } return p3; } if (field < p5) { return p4; } return p5; } /* * Manual binary search for 7 rounding points, it is faster than linear search or array style binary search. */ @Evaluator(extraName = "7") static double process( double field, // hack to keep the formatter happy. @Fixed double p0, // int is so short this should be on one line but double is not. @Fixed double p1, // That's not compatible with the templates. @Fixed double p2, // So we comment to make the formatter not try to change the line. @Fixed double p3, @Fixed double p4, @Fixed double p5, @Fixed double p6 ) { if (field < p3) { if (field < p1) { return p0; } if (field < p2) { return p1; } return p2; } if (field < p5) { if (field < p4) { return p3; } return p4; } if (field < p6) { return p5; } return p6; } /* * Manual binary search for 8 rounding points, it is faster than linear search or array style binary search. */ @Evaluator(extraName = "8") static double process( double field, @Fixed double p0, @Fixed double p1, @Fixed double p2, @Fixed double p3, @Fixed double p4, @Fixed double p5, @Fixed double p6, @Fixed double p7 ) { if (field < p3) { if (field < p1) { return p0; } if (field < p2) { return p1; } return p2; } if (field < p5) { if (field < p4) { return p3; } return p4; } if (field < p6) { return p5; } if (field < p7) { return p6; } return p7; } /* * Manual binary search for 9 rounding points, it is faster than linear search or array style binary search. */ @Evaluator(extraName = "9") static double process( double field, @Fixed double p0, @Fixed double p1, @Fixed double p2, @Fixed double p3, @Fixed double p4, @Fixed double p5, @Fixed double p6, @Fixed double p7, @Fixed double p8 ) { if (field < p4) { if (field < p1) { return p0; } if (field < p2) { return p1; } if (field < p3) { return p2; } return p3; } if (field < p6) { if (field < p5) { return p4; } return p5; } if (field < p7) { return p6; } if (field < p8) { return p7; } return p8; } /* * Manual binary search for 10 rounding points, it is faster than linear search or array style binary search. */ @Evaluator(extraName = "10") static double process( double field, @Fixed double p0, @Fixed double p1, @Fixed double p2, @Fixed double p3, @Fixed double p4, @Fixed double p5, @Fixed double p6, @Fixed double p7, @Fixed double p8, @Fixed double p9 ) { if (field < p4) { if (field < p1) { return p0; } if (field < p2) { return p1; } if (field < p3) { return p2; } return p3; } if (field < p7) { if (field < p5) { return p4; } if (field < p6) { return p5; } return p6; } if (field < p8) { return p7; } if (field < p9) { return p8; } return p9; } }
RoundToDouble
java
google__guava
android/guava/src/com/google/common/util/concurrent/AtomicLongMap.java
{ "start": 1884, "end": 2016 }
class ____ be used by multiple threads concurrently. All operations are * atomic unless otherwise noted. * * <p>Instances of this
may
java
google__dagger
javatests/dagger/internal/codegen/MissingBindingValidationTest.java
{ "start": 34456, "end": 34731 }
interface ____ {", " Child child();", "}"); Source child = CompilerTests.javaSource( "Child", "import dagger.Subcomponent;", "", "@Subcomponent(modules = ChildModule.class)", "
Parent
java
elastic__elasticsearch
x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/slack/message/Action.java
{ "start": 792, "end": 3354 }
class ____ implements MessageElement { static final ObjectParser<Template, Void> ACTION_PARSER = new ObjectParser<>("action", Template::new); static { ACTION_PARSER.declareField(Template::setType, (p, c) -> new TextTemplate(p.text()), new ParseField("type"), ValueType.STRING); ACTION_PARSER.declareField(Template::setUrl, (p, c) -> new TextTemplate(p.text()), new ParseField("url"), ValueType.STRING); ACTION_PARSER.declareField(Template::setText, (p, c) -> new TextTemplate(p.text()), new ParseField("text"), ValueType.STRING); ACTION_PARSER.declareField(Template::setStyle, (p, c) -> new TextTemplate(p.text()), new ParseField("style"), ValueType.STRING); ACTION_PARSER.declareField(Template::setName, (p, c) -> new TextTemplate(p.text()), new ParseField("name"), ValueType.STRING); } private static final ParseField URL = new ParseField("url"); private static final ParseField TYPE = new ParseField("type"); private static final ParseField TEXT = new ParseField("text"); private static final ParseField STYLE = new ParseField("style"); private static final ParseField NAME = new ParseField("name"); private String style; private String name; private String type; private String text; private String url; public Action() {} public Action(String style, String name, String type, String text, String url) { this.style = style; this.name = name; this.type = type; this.text = text; this.url = url; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Action template = (Action) o; return Objects.equals(style, template.style) && Objects.equals(type, template.type) && Objects.equals(url, template.url) && Objects.equals(text, template.text) && Objects.equals(name, template.name); } @Override public int hashCode() { return Objects.hash(style, type, url, name, text); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { return builder.startObject() .field(NAME.getPreferredName(), name) .field(STYLE.getPreferredName(), style) .field(TYPE.getPreferredName(), type) .field(TEXT.getPreferredName(), text) .field(URL.getPreferredName(), url) .endObject(); } static
Action
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/batchfetch/BatchFetchStrategyLoadingTests.java
{ "start": 979, "end": 3182 }
class ____ { @Test @DomainModel( annotatedClasses = { BatchFetchStrategyLoadingTests.Thing1.class, BatchFetchStrategyLoadingTests.Thing2.class }) @SessionFactory( useCollectingStatementInspector = true ) public void testIt(SessionFactoryScope scope) { final SQLStatementInspector statementInspector = scope.getCollectingStatementInspector(); statementInspector.clear(); scope.inTransaction( (session) -> { final Thing2 thing21 = session.getReference( Thing2.class, 1 ); final Thing2 thing22 = session.getReference( Thing2.class, 2 ); final Thing2 thing23 = session.getReference( Thing2.class, 3 ); assertThat( statementInspector.getSqlQueries() ).isEmpty(); assertThat( Hibernate.isInitialized( thing21 ) ).isFalse(); assertThat( Hibernate.isInitialized( thing22 ) ).isFalse(); assertThat( Hibernate.isInitialized( thing23 ) ).isFalse(); final String name = thing21.getName(); assertThat( name ).isEqualTo( "thing-2.1" ); assertThat( statementInspector.getSqlQueries() ).hasSize( 1 ); if ( MultiKeyLoadHelper.supportsSqlArrayType( scope.getSessionFactory().getJdbcServices().getDialect() ) ) { assertThat( StringHelper.count( statementInspector.getSqlQueries().get( 0 ), "?" ) ).isEqualTo( 1 ); } else { assertThat( StringHelper.count( statementInspector.getSqlQueries().get( 0 ), "?" ) ).isEqualTo( 64 ); } assertThat( Hibernate.isInitialized( thing21 ) ).isTrue(); assertThat( Hibernate.isInitialized( thing22 ) ).isTrue(); assertThat( Hibernate.isInitialized( thing23 ) ).isTrue(); } ); } @BeforeEach public void createTestData(SessionFactoryScope scope) { scope.inTransaction( (session) -> { final Thing2 thing2 = new Thing2( 1, "thing-2.1" ); final Thing1 thing1 = new Thing1( 1, "thing-1", thing2 ); session.persist( thing2 ); session.persist( thing1 ); session.persist( new Thing2( 2, "thing-2.2" ) ); session.persist( new Thing2( 3, "thing-2.3" ) ); } ); } @AfterEach public void dropTestData(SessionFactoryScope scope) { scope.getSessionFactory().getSchemaManager().truncate(); } @Entity( name = "Thing1" ) @Table( name = "Thing1" ) public static
BatchFetchStrategyLoadingTests
java
resilience4j__resilience4j
resilience4j-feign/src/main/java/io/github/resilience4j/feign/FallbackHandler.java
{ "start": 850, "end": 2187 }
interface ____<T> { CheckedFunction<Object[], Object> decorate(CheckedFunction<Object[], Object> invocationCall, Method method, Predicate<Exception> filter); default Method validateAndGetFallbackMethod(T fallback, Method method) { validateFallback(fallback, method); return getFallbackMethod(fallback, method); } default void validateFallback(T fallback, Method method) { if (fallback.getClass().isAssignableFrom(method.getDeclaringClass())) { throw new IllegalArgumentException("Cannot use the fallback [" + fallback.getClass() + "] for [" + method.getDeclaringClass() + "]!"); } } default Method getFallbackMethod(T fallbackInstance, Method method) { Method fallbackMethod; try { fallbackMethod = fallbackInstance.getClass() .getMethod(method.getName(), method.getParameterTypes()); } catch (NoSuchMethodException | SecurityException e) { throw new IllegalArgumentException("Cannot use the fallback [" + fallbackInstance.getClass() + "] for [" + method.getDeclaringClass() + "]", e); } fallbackMethod.setAccessible(true); return fallbackMethod; } }
FallbackHandler
java
google__error-prone
core/src/test/java/com/google/errorprone/fixes/SuggestedFixesTest.java
{ "start": 47042, "end": 47493 }
class ____ { int BEST = 42; } """) .doTest(TestMode.AST_MATCH); } @Test public void removeSuppressWarnings_withValueInit_retainsValue() { BugCheckerRefactoringTestHelper refactorTestHelper = BugCheckerRefactoringTestHelper.newInstance(RemoveSuppressFromMe.class, getClass()); refactorTestHelper .addInputLines( "in/Test.java", """ public
Test
java
quarkusio__quarkus
extensions/redis-client/runtime/src/test/java/io/quarkus/redis/datasource/TransactionalCountMinCommandsTest.java
{ "start": 751, "end": 3252 }
class ____ extends DatasourceTestBase { private RedisDataSource blocking; private ReactiveRedisDataSource reactive; @BeforeEach void initialize() { blocking = new BlockingRedisDataSourceImpl(vertx, redis, api, Duration.ofSeconds(60)); reactive = new ReactiveRedisDataSourceImpl(vertx, redis, api); } @AfterEach public void clear() { blocking.flushall(); } @Test public void countMinBlocking() { TransactionResult result = blocking.withTransaction(tx -> { TransactionalCountMinCommands<String, String> cm = tx.countmin(String.class); assertThat(cm.getDataSource()).isEqualTo(tx); cm.cmsInitByDim(key, 10, 10); cm.cmsIncrBy(key, Map.of("a", 5L, "b", 2L, "c", 4L)); // 1 -> [5,2,4] cm.cmsIncrBy(key, "a", 2); // 2 -> 7 cm.cmsQuery(key, "a"); // 3 -> 7 cm.cmsQuery(key, "b", "c"); // 4 -> [2, 4] }); assertThat(result.size()).isEqualTo(5); assertThat(result.discarded()).isFalse(); assertThat((Void) result.get(0)).isNull(); assertThat((List<Long>) result.get(1)).containsExactlyInAnyOrder(5L, 2L, 4L); assertThat((Long) result.get(2)).isEqualTo(7); assertThat((Long) result.get(3)).isEqualTo(7); assertThat((List<Long>) result.get(4)).containsExactly(2L, 4L); } @Test public void countMinReactive() { TransactionResult result = reactive.withTransaction(tx -> { ReactiveTransactionalCountMinCommands<String, String> cm = tx.countmin(String.class); assertThat(cm.getDataSource()).isEqualTo(tx); return cm.cmsInitByDim(key, 10, 10) .chain(() -> cm.cmsIncrBy(key, Map.of("a", 5L, "b", 2L, "c", 4L))) .chain(() -> cm.cmsIncrBy(key, "a", 2)) .chain(() -> cm.cmsQuery(key, "a")) .chain(() -> cm.cmsQuery(key, "b", "c")) .replaceWithVoid(); }).await().indefinitely(); assertThat(result.size()).isEqualTo(5); assertThat(result.discarded()).isFalse(); assertThat((Void) result.get(0)).isNull(); assertThat((List<Long>) result.get(1)).containsExactlyInAnyOrder(5L, 2L, 4L); assertThat((Long) result.get(2)).isEqualTo(7); assertThat((Long) result.get(3)).isEqualTo(7); assertThat((List<Long>) result.get(4)).containsExactly(2L, 4L); } }
TransactionalCountMinCommandsTest
java
netty__netty
codec-http/src/main/java/io/netty/handler/codec/http/websocketx/WebSocket00FrameEncoder.java
{ "start": 1268, "end": 4120 }
class ____ extends MessageToMessageEncoder<WebSocketFrame> implements WebSocketFrameEncoder { private static final ByteBuf _0X00 = LeakPresenceDetector.staticInitializer(() -> Unpooled.unreleasableBuffer( Unpooled.directBuffer(1, 1).writeByte(0x00)).asReadOnly()); private static final ByteBuf _0XFF = LeakPresenceDetector.staticInitializer(() -> Unpooled.unreleasableBuffer( Unpooled.directBuffer(1, 1).writeByte((byte) 0xFF)).asReadOnly()); private static final ByteBuf _0XFF_0X00 = LeakPresenceDetector.staticInitializer(() -> Unpooled.unreleasableBuffer( Unpooled.directBuffer(2, 2).writeByte((byte) 0xFF).writeByte(0x00)).asReadOnly()); public WebSocket00FrameEncoder() { super(WebSocketFrame.class); } @Override protected void encode(ChannelHandlerContext ctx, WebSocketFrame msg, List<Object> out) throws Exception { if (msg instanceof TextWebSocketFrame) { // Text frame ByteBuf data = msg.content(); out.add(_0X00.duplicate()); out.add(data.retain()); out.add(_0XFF.duplicate()); } else if (msg instanceof CloseWebSocketFrame) { // Close frame, needs to call duplicate to allow multiple writes. // See https://github.com/netty/netty/issues/2768 out.add(_0XFF_0X00.duplicate()); } else { // Binary frame ByteBuf data = msg.content(); int dataLen = data.readableBytes(); ByteBuf buf = ctx.alloc().buffer(5); boolean release = true; try { // Encode type. buf.writeByte((byte) 0x80); // Encode length. int b1 = dataLen >>> 28 & 0x7F; int b2 = dataLen >>> 14 & 0x7F; int b3 = dataLen >>> 7 & 0x7F; int b4 = dataLen & 0x7F; if (b1 == 0) { if (b2 == 0) { if (b3 != 0) { buf.writeByte(b3 | 0x80); } buf.writeByte(b4); } else { buf.writeByte(b2 | 0x80); buf.writeByte(b3 | 0x80); buf.writeByte(b4); } } else { buf.writeByte(b1 | 0x80); buf.writeByte(b2 | 0x80); buf.writeByte(b3 | 0x80); buf.writeByte(b4); } // Encode binary data. out.add(buf); out.add(data.retain()); release = false; } finally { if (release) { buf.release(); } } } } }
WebSocket00FrameEncoder
java
apache__camel
components/camel-wordpress/src/main/java/org/apache/camel/component/wordpress/api/model/PublishableSearchCriteria.java
{ "start": 923, "end": 2548 }
class ____ extends SearchCriteria { private static final long serialVersionUID = 9178680514805178843L; private Date after; private Date before; private List<Integer> author; private List<Integer> authorExclude; private List<Integer> offset; private List<String> slug; private PublishableStatus status; private Context context; public Date getAfter() { return after; } public void setAfter(Date after) { this.after = after; } public Date getBefore() { return before; } public void setBefore(Date before) { this.before = before; } public List<Integer> getAuthor() { return author; } public void setAuthor(List<Integer> author) { this.author = author; } public List<Integer> getAuthorExclude() { return authorExclude; } public void setAuthorExclude(List<Integer> authorExclude) { this.authorExclude = authorExclude; } public List<Integer> getOffset() { return offset; } public void setOffset(List<Integer> offset) { this.offset = offset; } public List<String> getSlug() { return slug; } public void setSlug(List<String> slug) { this.slug = slug; } public PublishableStatus getStatus() { return status; } public void setStatus(PublishableStatus status) { this.status = status; } public Context getContext() { return context; } public void setContext(Context context) { this.context = context; } }
PublishableSearchCriteria
java
apache__flink
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/testutils/RestoreTestCompleteness.java
{ "start": 2111, "end": 6167 }
class ____ { private static final Set<Class<? extends ExecNode<?>>> SKIP_EXEC_NODES = new HashSet<Class<? extends ExecNode<?>>>() { { /** Ignoring python based exec nodes temporarily. */ add(StreamExecPythonCalc.class); add(StreamExecPythonCorrelate.class); add(StreamExecPythonOverAggregate.class); add(StreamExecPythonGroupAggregate.class); add(StreamExecPythonGroupTableAggregate.class); add(StreamExecPythonGroupWindowAggregate.class); } }; private Class<? extends ExecNode<?>> getExecNode(Class<?> restoreTest) throws NoSuchMethodException, InvocationTargetException, InstantiationException, IllegalAccessException { Method getExecNodeMethod = restoreTest.getMethod("getExecNode"); Class<? extends ExecNode<?>> execNode = (Class<? extends ExecNode<?>>) getExecNodeMethod.invoke( restoreTest.getDeclaredConstructor().newInstance()); return execNode; } private List<Class<? extends ExecNode<?>>> getChildExecNodes(Class<?> restoreTest) throws NoSuchMethodException, InvocationTargetException, InstantiationException, IllegalAccessException { Method getChildExecNodesMethod = restoreTest.getMethod("getChildExecNodes"); List<Class<? extends ExecNode<?>>> childExecNodes = (List<Class<? extends ExecNode<?>>>) getChildExecNodesMethod.invoke( restoreTest.getDeclaredConstructor().newInstance()); return childExecNodes; } @Test public void testMissingRestoreTest() throws IOException, NoSuchMethodException, InstantiationException, IllegalAccessException, InvocationTargetException { Map<ExecNodeNameVersion, Class<? extends ExecNode<?>>> versionedExecNodes = ExecNodeMetadataUtil.getVersionedExecNodes(); Set<ClassPath.ClassInfo> classesInPackage = ClassPath.from(this.getClass().getClassLoader()) .getTopLevelClassesRecursive( "org.apache.flink.table.planner.plan.nodes.exec.stream") .stream() .filter(x -> RestoreTestBase.class.isAssignableFrom(x.load())) .collect(Collectors.toSet()); Set<Class<? extends ExecNode<?>>> execNodesWithRestoreTests = new HashSet<>(); for (ClassPath.ClassInfo classInfo : classesInPackage) { Class<?> restoreTest = classInfo.load(); Class<? extends ExecNode<?>> execNode = getExecNode(restoreTest); execNodesWithRestoreTests.add(execNode); List<Class<? extends ExecNode<?>>> childExecNodes = getChildExecNodes(restoreTest); for (Class<? extends ExecNode<?>> childExecNode : childExecNodes) { execNodesWithRestoreTests.add(childExecNode); } } for (Map.Entry<ExecNodeNameVersion, Class<? extends ExecNode<?>>> entry : versionedExecNodes.entrySet()) { ExecNodeNameVersion execNodeNameVersion = entry.getKey(); Class<? extends ExecNode<?>> execNode = entry.getValue(); if (!SKIP_EXEC_NODES.contains(execNode)) { final String msg = "Missing restore test for " + execNodeNameVersion + "\nPlease add a restore test for " + execNode.toString(); Assertions.assertTrue(execNodesWithRestoreTests.contains(execNode), msg); } } } }
RestoreTestCompleteness
java
apache__dubbo
dubbo-rpc/dubbo-rpc-triple/src/main/java/org/apache/dubbo/rpc/protocol/tri/rest/mapping/meta/BeanMeta.java
{ "start": 12962, "end": 16732 }
class ____ extends NestableParameterMeta { private final Field field; private final Method getMethod; private final Method setMethod; private final Parameter parameter; private final int visibility; PropertyMeta(RestToolKit toolKit, Field f, Method gm, Method sm, String prefix, String name, int visibility) { super(toolKit, prefix, name); this.visibility = visibility; field = f; getMethod = gm; setMethod = sm; parameter = setMethod == null ? null : setMethod.getParameters()[0]; initNestedMeta(); } public int getVisibility() { return visibility; } public Field getField() { return field; } public Method getGetMethod() { return getMethod; } public Method getSetMethod() { return setMethod; } public Parameter getParameter() { return parameter; } @Override public Class<?> getType() { if (field != null) { return field.getType(); } if (parameter != null) { return parameter.getType(); } return getMethod.getReturnType(); } @Override public Type getGenericType() { if (field != null) { return field.getGenericType(); } if (parameter != null) { return parameter.getParameterizedType(); } return getMethod.getGenericReturnType(); } @Override protected AnnotatedElement getAnnotatedElement() { if (field != null) { return field; } if (parameter != null) { return parameter; } return getMethod; } @Override public List<? extends AnnotatedElement> getAnnotatedElements() { List<AnnotatedElement> elements = new ArrayList<>(3); if (field != null) { elements.add(field); } if (parameter != null) { elements.add(parameter); } if (getMethod != null) { elements.add(getMethod); } return elements; } public Object getValue(Object bean) { if (getMethod != null) { try { return getMethod.invoke(bean); } catch (Throwable t) { throw ExceptionUtils.wrap(t); } } else if (field != null) { try { return field.get(bean); } catch (Throwable t) { throw ExceptionUtils.wrap(t); } } return null; } public void setValue(Object bean, Object value) { if (setMethod != null) { try { setMethod.invoke(bean, value); } catch (Throwable t) { throw ExceptionUtils.wrap(t); } } else if (field != null) { try { field.set(bean, value); } catch (Throwable t) { throw ExceptionUtils.wrap(t); } } } @Override public String getDescription() { return "PropertyMeta{" + (field == null ? (parameter == null ? getMethod : parameter) : field) + '}'; } public boolean canSetValue() { return setMethod != null || field != null; } } private static final
PropertyMeta
java
ReactiveX__RxJava
src/test/java/io/reactivex/rxjava3/flowable/FlowableSubscriberTest.java
{ "start": 1378, "end": 22757 }
class ____ { /** * Should request n for whatever the final Subscriber asks for. */ @Test public void requestFromFinalSubscribeWithRequestValue() { TestSubscriber<String> s = new TestSubscriber<>(0L); s.request(10); final AtomicLong r = new AtomicLong(); s.onSubscribe(new Subscription() { @Override public void request(long n) { r.set(n); } @Override public void cancel() { } }); assertEquals(10, r.get()); } /** * Should request -1 for infinite. */ @Test public void requestFromFinalSubscribeWithoutRequestValue() { TestSubscriber<String> s = new TestSubscriber<>(); final AtomicLong r = new AtomicLong(); s.onSubscribe(new Subscription() { @Override public void request(long n) { r.set(n); } @Override public void cancel() { } }); assertEquals(Long.MAX_VALUE, r.get()); } @Test public void requestFromChainedOperator() throws Throwable { TestSubscriber<String> s = new TestSubscriber<>(10L); FlowableOperator<String, String> o = new FlowableOperator<String, String>() { @Override public Subscriber<? super String> apply(final Subscriber<? super String> s1) { return new FlowableSubscriber<String>() { @Override public void onSubscribe(Subscription a) { s1.onSubscribe(a); } @Override public void onComplete() { } @Override public void onError(Throwable e) { } @Override public void onNext(String t) { } }; } }; Subscriber<? super String> ns = o.apply(s); final AtomicLong r = new AtomicLong(); // set set the producer at the top of the chain (ns) and it should flow through the operator to the (s) subscriber // and then it should request up with the value set on the final Subscriber (s) ns.onSubscribe(new Subscription() { @Override public void request(long n) { r.set(n); } @Override public void cancel() { } }); assertEquals(10, r.get()); } @Test public void requestFromDecoupledOperator() throws Throwable { TestSubscriber<String> s = new TestSubscriber<>(0L); FlowableOperator<String, String> o = new FlowableOperator<String, String>() { @Override public Subscriber<? super String> apply(final Subscriber<? super String> s1) { return new FlowableSubscriber<String>() { @Override public void onSubscribe(Subscription a) { s1.onSubscribe(a); } @Override public void onComplete() { } @Override public void onError(Throwable e) { } @Override public void onNext(String t) { } }; } }; s.request(10); Subscriber<? super String> ns = o.apply(s); final AtomicLong r = new AtomicLong(); // set set the producer at the top of the chain (ns) and it should flow through the operator to the (s) subscriber // and then it should request up with the value set on the final Subscriber (s) ns.onSubscribe(new Subscription() { @Override public void request(long n) { r.set(n); } @Override public void cancel() { } }); assertEquals(10, r.get()); } @Test public void requestFromDecoupledOperatorThatRequestsN() throws Throwable { TestSubscriber<String> s = new TestSubscriber<>(10L); final AtomicLong innerR = new AtomicLong(); FlowableOperator<String, String> o = new FlowableOperator<String, String>() { @Override public Subscriber<? super String> apply(Subscriber<? super String> child) { // we want to decouple the chain so set our own Producer on the child instead of it coming from the parent child.onSubscribe(new Subscription() { @Override public void request(long n) { innerR.set(n); } @Override public void cancel() { } }); ResourceSubscriber<String> as = new ResourceSubscriber<String>() { @Override protected void onStart() { // we request 99 up to the parent request(99); } @Override public void onComplete() { } @Override public void onError(Throwable e) { } @Override public void onNext(String t) { } }; return as; } }; Subscriber<? super String> ns = o.apply(s); final AtomicLong r = new AtomicLong(); // set set the producer at the top of the chain (ns) and it should flow through the operator to the (s) subscriber // and then it should request up with the value set on the final Subscriber (s) ns.onSubscribe(new Subscription() { @Override public void request(long n) { r.set(n); } @Override public void cancel() { } }); assertEquals(99, r.get()); assertEquals(10, innerR.get()); } @Test public void requestToFlowable() { TestSubscriber<Integer> ts = new TestSubscriber<>(3L); final AtomicLong requested = new AtomicLong(); Flowable.<Integer>unsafeCreate(new Publisher<Integer>() { @Override public void subscribe(Subscriber<? super Integer> s) { s.onSubscribe(new Subscription() { @Override public void request(long n) { requested.set(n); } @Override public void cancel() { } }); } }).subscribe(ts); assertEquals(3, requested.get()); } @Test public void requestThroughMap() { TestSubscriber<Integer> ts = new TestSubscriber<>(0L); ts.request(3); final AtomicLong requested = new AtomicLong(); Flowable.<Integer>unsafeCreate(new Publisher<Integer>() { @Override public void subscribe(Subscriber<? super Integer> s) { s.onSubscribe(new Subscription() { @Override public void request(long n) { requested.set(n); } @Override public void cancel() { } }); } }).map(Functions.<Integer>identity()).subscribe(ts); assertEquals(3, requested.get()); } @Test public void requestThroughTakeThatReducesRequest() { TestSubscriber<Integer> ts = new TestSubscriber<>(0L); ts.request(3); final AtomicLong requested = new AtomicLong(); Flowable.<Integer>unsafeCreate(new Publisher<Integer>() { @Override public void subscribe(Subscriber<? super Integer> s) { s.onSubscribe(new Subscription() { @Override public void request(long n) { requested.set(n); } @Override public void cancel() { } }); } }).take(2).subscribe(ts); assertEquals(2, requested.get()); } @Test public void requestThroughTakeWhereRequestIsSmallerThanTake() { TestSubscriber<Integer> ts = new TestSubscriber<>(0L); ts.request(3); final AtomicLong requested = new AtomicLong(); Flowable.<Integer>unsafeCreate(new Publisher<Integer>() { @Override public void subscribe(Subscriber<? super Integer> s) { s.onSubscribe(new Subscription() { @Override public void request(long n) { requested.set(n); } @Override public void cancel() { } }); } }).take(10).subscribe(ts); assertEquals(3, requested.get()); } @Test public void onStartCalledOnceViaSubscribe() { final AtomicInteger c = new AtomicInteger(); Flowable.just(1, 2, 3, 4).take(2).subscribe(new DefaultSubscriber<Integer>() { @Override public void onStart() { c.incrementAndGet(); request(1); } @Override public void onComplete() { } @Override public void onError(Throwable e) { } @Override public void onNext(Integer t) { request(1); } }); assertEquals(1, c.get()); } @Test public void onStartCalledOnceViaUnsafeSubscribe() { final AtomicInteger c = new AtomicInteger(); Flowable.just(1, 2, 3, 4).take(2).subscribe(new DefaultSubscriber<Integer>() { @Override public void onStart() { c.incrementAndGet(); request(1); } @Override public void onComplete() { } @Override public void onError(Throwable e) { } @Override public void onNext(Integer t) { request(1); } }); assertEquals(1, c.get()); } @Test public void onStartCalledOnceViaLift() { final AtomicInteger c = new AtomicInteger(); Flowable.just(1, 2, 3, 4).lift(new FlowableOperator<Integer, Integer>() { @Override public Subscriber<? super Integer> apply(final Subscriber<? super Integer> child) { return new DefaultSubscriber<Integer>() { @Override public void onStart() { c.incrementAndGet(); request(1); } @Override public void onComplete() { child.onComplete(); } @Override public void onError(Throwable e) { child.onError(e); } @Override public void onNext(Integer t) { child.onNext(t); request(1); } }; } }).subscribe(); assertEquals(1, c.get()); } @Test public void onStartRequestsAreAdditive() { final List<Integer> list = new ArrayList<>(); Flowable.just(1, 2, 3, 4, 5) .subscribe(new DefaultSubscriber<Integer>() { @Override public void onStart() { request(3); request(2); } @Override public void onComplete() { } @Override public void onError(Throwable e) { } @Override public void onNext(Integer t) { list.add(t); }}); assertEquals(Arrays.asList(1, 2, 3, 4, 5), list); } @Test public void onStartRequestsAreAdditiveAndOverflowBecomesMaxValue() { final List<Integer> list = new ArrayList<>(); Flowable.just(1, 2, 3, 4, 5).subscribe(new DefaultSubscriber<Integer>() { @Override public void onStart() { request(2); request(Long.MAX_VALUE - 1); } @Override public void onComplete() { } @Override public void onError(Throwable e) { } @Override public void onNext(Integer t) { list.add(t); }}); assertEquals(Arrays.asList(1, 2, 3, 4, 5), list); } @Test public void forEachWhile() { PublishProcessor<Integer> pp = PublishProcessor.create(); final List<Integer> list = new ArrayList<>(); Disposable d = pp.forEachWhile(new Predicate<Integer>() { @Override public boolean test(Integer v) throws Exception { list.add(v); return v < 3; } }); assertFalse(d.isDisposed()); pp.onNext(1); pp.onNext(2); pp.onNext(3); assertFalse(pp.hasSubscribers()); assertEquals(Arrays.asList(1, 2, 3), list); } @Test public void doubleSubscribe() { ForEachWhileSubscriber<Integer> s = new ForEachWhileSubscriber<>(new Predicate<Integer>() { @Override public boolean test(Integer v) throws Exception { return true; } }, Functions.<Throwable>emptyConsumer(), Functions.EMPTY_ACTION); List<Throwable> list = TestHelper.trackPluginErrors(); try { s.onSubscribe(new BooleanSubscription()); BooleanSubscription bs = new BooleanSubscription(); s.onSubscribe(bs); assertTrue(bs.isCancelled()); TestHelper.assertError(list, 0, IllegalStateException.class, "Subscription already set!"); } finally { RxJavaPlugins.reset(); } } @Test public void suppressAfterCompleteEvents() { List<Throwable> errors = TestHelper.trackPluginErrors(); try { final TestSubscriber<Integer> ts = new TestSubscriber<>(); ts.onSubscribe(new BooleanSubscription()); ForEachWhileSubscriber<Integer> s = new ForEachWhileSubscriber<>(new Predicate<Integer>() { @Override public boolean test(Integer v) throws Exception { ts.onNext(v); return true; } }, new Consumer<Throwable>() { @Override public void accept(Throwable e) throws Exception { ts.onError(e); } }, new Action() { @Override public void run() throws Exception { ts.onComplete(); } }); s.onComplete(); s.onNext(1); s.onError(new TestException()); s.onComplete(); ts.assertResult(); TestHelper.assertUndeliverable(errors, 0, TestException.class); } finally { RxJavaPlugins.reset(); } } @Test public void onNextCrashes() { final TestSubscriber<Integer> ts = new TestSubscriber<>(); ts.onSubscribe(new BooleanSubscription()); ForEachWhileSubscriber<Integer> s = new ForEachWhileSubscriber<>(new Predicate<Integer>() { @Override public boolean test(Integer v) throws Exception { throw new TestException(); } }, new Consumer<Throwable>() { @Override public void accept(Throwable e) throws Exception { ts.onError(e); } }, new Action() { @Override public void run() throws Exception { ts.onComplete(); } }); BooleanSubscription b = new BooleanSubscription(); s.onSubscribe(b); s.onNext(1); assertTrue(b.isCancelled()); ts.assertFailure(TestException.class); } @Test public void onErrorThrows() { ForEachWhileSubscriber<Integer> s = new ForEachWhileSubscriber<>(new Predicate<Integer>() { @Override public boolean test(Integer v) throws Exception { return true; } }, new Consumer<Throwable>() { @Override public void accept(Throwable e) throws Exception { throw new TestException("Inner"); } }, new Action() { @Override public void run() throws Exception { } }); List<Throwable> list = TestHelper.trackPluginErrors(); try { s.onSubscribe(new BooleanSubscription()); s.onError(new TestException("Outer")); TestHelper.assertError(list, 0, CompositeException.class); List<Throwable> cel = TestHelper.compositeList(list.get(0)); TestHelper.assertError(cel, 0, TestException.class, "Outer"); TestHelper.assertError(cel, 1, TestException.class, "Inner"); } finally { RxJavaPlugins.reset(); } } @Test public void onCompleteThrows() { ForEachWhileSubscriber<Integer> s = new ForEachWhileSubscriber<>(new Predicate<Integer>() { @Override public boolean test(Integer v) throws Exception { return true; } }, new Consumer<Throwable>() { @Override public void accept(Throwable e) throws Exception { } }, new Action() { @Override public void run() throws Exception { throw new TestException("Inner"); } }); List<Throwable> list = TestHelper.trackPluginErrors(); try { s.onSubscribe(new BooleanSubscription()); s.onComplete(); TestHelper.assertUndeliverable(list, 0, TestException.class, "Inner"); } finally { RxJavaPlugins.reset(); } } @Test public void subscribeConsumerConsumerWithError() { final List<Integer> list = new ArrayList<>(); Flowable.<Integer>error(new TestException()).subscribe(new Consumer<Integer>() { @Override public void accept(Integer v) throws Exception { list.add(v); } }, new Consumer<Throwable>() { @Override public void accept(Throwable e) throws Exception { list.add(100); } }); assertEquals(Arrays.asList(100), list); } @Test public void methodTestCancelled() { PublishProcessor<Integer> pp = PublishProcessor.create(); pp.test(Long.MAX_VALUE, true); assertFalse(pp.hasSubscribers()); } @Test public void safeSubscriberAlreadySafe() { TestSubscriber<Integer> ts = new TestSubscriber<>(); Flowable.just(1).safeSubscribe(new SafeSubscriber<>(ts)); ts.assertResult(1); } @Test public void methodTestNoCancel() { PublishProcessor<Integer> pp = PublishProcessor.create(); pp.test(Long.MAX_VALUE, false); assertTrue(pp.hasSubscribers()); } @Test public void subscribeConsumerConsumer() { final List<Integer> list = new ArrayList<>(); Flowable.just(1).subscribe(new Consumer<Integer>() { @Override public void accept(Integer v) throws Exception { list.add(v); } }, new Consumer<Throwable>() { @Override public void accept(Throwable e) throws Exception { list.add(100); } }); assertEquals(Arrays.asList(1), list); } @SuppressWarnings("rawtypes") @Test public void pluginNull() { RxJavaPlugins.setOnFlowableSubscribe(new BiFunction<Flowable, Subscriber, Subscriber>() { @Override public Subscriber apply(Flowable a, Subscriber b) throws Exception { return null; } }); try { try { Flowable.just(1).test(); fail("Should have thrown"); } catch (NullPointerException ex) { assertEquals("The RxJavaPlugins.onSubscribe hook returned a null FlowableSubscriber. Please check the handler provided to RxJavaPlugins.setOnFlowableSubscribe for invalid null returns. Further reading: https://github.com/ReactiveX/RxJava/wiki/Plugins", ex.getMessage()); } } finally { RxJavaPlugins.reset(); } } static final
FlowableSubscriberTest
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/converted/converter/elementCollection/CollectionCompositeElementConversionTest.java
{ "start": 2263, "end": 2514 }
class ____ { public ColorType eyeColor; public ColorType hairColor; public Traits() { } public Traits( ColorType eyeColor, ColorType hairColor) { this.eyeColor = eyeColor; this.hairColor = hairColor; } } public static
Traits
java
apache__flink
flink-runtime/src/main/java/org/apache/flink/runtime/io/network/partition/hybrid/tiered/tier/memory/MemoryTierSubpartitionProducerAgent.java
{ "start": 1291, "end": 3472 }
class ____ { private final int subpartitionId; /** * The {@link NettyConnectionWriter} is used to write buffers to the netty connection. * * <p>Note that this field can be null before the netty connection is established. */ @Nullable private volatile NettyConnectionWriter nettyConnectionWriter; private int finishedBufferIndex; MemoryTierSubpartitionProducerAgent(int subpartitionId) { this.subpartitionId = subpartitionId; } // ------------------------------------------------------------------------ // Called by MemoryTierProducerAgent // ------------------------------------------------------------------------ void connectionEstablished(NettyConnectionWriter nettyConnectionWriter) { this.nettyConnectionWriter = nettyConnectionWriter; } void addFinishedBuffer(Buffer buffer) { NettyPayload toAddBuffer = NettyPayload.newBuffer(buffer, finishedBufferIndex, subpartitionId); addFinishedBuffer(toAddBuffer); } void updateSegmentId(int segmentId) { NettyPayload segmentNettyPayload = NettyPayload.newSegment(segmentId); addFinishedBuffer(segmentNettyPayload); } int numQueuedBuffers() { return nettyConnectionWriter == null ? 0 : checkNotNull(nettyConnectionWriter).numQueuedBufferPayloads(); } void release() { if (nettyConnectionWriter != null) { checkNotNull(nettyConnectionWriter).close(null); } } // ------------------------------------------------------------------------ // Internal Methods // ------------------------------------------------------------------------ private void addFinishedBuffer(NettyPayload nettyPayload) { finishedBufferIndex++; checkNotNull(nettyConnectionWriter).writeNettyPayload(nettyPayload); if (checkNotNull(nettyConnectionWriter).numQueuedPayloads() <= 1 || checkNotNull(nettyConnectionWriter).numQueuedBufferPayloads() <= 1) { checkNotNull(nettyConnectionWriter).notifyAvailable(); } } }
MemoryTierSubpartitionProducerAgent
java
google__guava
android/guava/src/com/google/common/collect/MinMaxPriorityQueue.java
{ "start": 17605, "end": 19079 }
class ____<E> { final E toTrickle; final E replaced; MoveDesc(E toTrickle, E replaced) { this.toTrickle = toTrickle; this.replaced = replaced; } } /** Removes and returns the value at {@code index}. */ private E removeAndGet(int index) { E value = elementData(index); removeAt(index); return value; } private Heap heapForIndex(int i) { return isEvenLevel(i) ? minHeap : maxHeap; } private static final int EVEN_POWERS_OF_TWO = 0x55555555; private static final int ODD_POWERS_OF_TWO = 0xaaaaaaaa; @VisibleForTesting static boolean isEvenLevel(int index) { int oneBased = ~~(index + 1); // for GWT checkState(oneBased > 0, "negative index"); return (oneBased & EVEN_POWERS_OF_TWO) > (oneBased & ODD_POWERS_OF_TWO); } /** * Returns {@code true} if the MinMax heap structure holds. This is only used in testing. * * <p>TODO(kevinb): move to the test class? */ @VisibleForTesting boolean isIntact() { for (int i = 1; i < size; i++) { if (!heapForIndex(i).verifyIndex(i)) { return false; } } return true; } /** * Each instance of MinMaxPriorityQueue encapsulates two instances of Heap: a min-heap and a * max-heap. Conceptually, these might each have their own array for storage, but for efficiency's * sake they are stored interleaved on alternate heap levels in the same array (MMPQ.queue). */ @WeakOuter private final
MoveDesc
java
assertj__assertj-core
assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/Assertions_assertThat_with_Stream_Test.java
{ "start": 1032, "end": 4067 }
class ____ { private StringStream stringStream = new StringStream(); @Test void should_create_Assert() { Object assertions = assertThat(Stream.of("Luke", "Leia")); assertThat(assertions).isNotNull(); } @Test void isEqualTo_should_honor_comparing_the_same_mocked_stream() { Stream<?> stream = mock(); assertThat(stream).isEqualTo(stream); } @Test void stream_can_be_asserted_twice() { Stream<String> names = Stream.of("Luke", "Leia"); assertThat(names).containsExactly("Luke", "Leia") .containsExactly("Luke", "Leia"); } @Test void should_not_consume_stream_when_asserting_non_null() { Stream<?> stream = mock(); assertThat(stream).isNotNull(); verifyNoInteractions(stream); } @Test void isInstanceOf_should_check_the_original_stream_without_consuming_it() { Stream<?> stream = mock(); assertThat(stream).isInstanceOf(Stream.class); verifyNoInteractions(stream); } @Test void isInstanceOfAny_should_check_the_original_stream_without_consuming_it() { Stream<?> stream = mock(); assertThat(stream).isInstanceOfAny(Stream.class, String.class); verifyNoInteractions(stream); } @Test void isOfAnyClassIn_should_check_the_original_stream_without_consuming_it() { assertThat(stringStream).isOfAnyClassIn(Double.class, StringStream.class); } @Test void isExactlyInstanceOf_should_check_the_original_stream() { assertThat(new StringStream()).isExactlyInstanceOf(StringStream.class); } @Test void isNotExactlyInstanceOf_should_check_the_original_stream() { assertThat(stringStream).isNotExactlyInstanceOf(Stream.class); Throwable error = catchThrowable(() -> assertThat(stringStream).isNotExactlyInstanceOf(StringStream.class)); assertThat(error).isInstanceOf(AssertionError.class); } @Test void isNotInstanceOf_should_check_the_original_stream() { assertThat(stringStream).isNotInstanceOf(Long.class); } @Test void isNotInstanceOfAny_should_check_the_original_stream() { assertThat(stringStream).isNotInstanceOfAny(Long.class, String.class); } @Test void isNotOfAnyClassIn_should_check_the_original_stream() { assertThat(stringStream).isNotOfAnyClassIn(Long.class, String.class); } @Test void isSameAs_should_check_the_original_stream_without_consuming_it() { Stream<?> stream = mock(); assertThat(stream).isSameAs(stream); verifyNoInteractions(stream); } @Test void isNotSameAs_should_check_the_original_stream_without_consuming_it() { Stream<?> stream = mock(); try { assertThat(stream).isNotSameAs(stream); } catch (AssertionError e) { verifyNoInteractions(stream); return; } Assertions.fail("Expected assertionError, because assert notSame on same stream."); } @Test void stream_with_upper_bound_assertions() { // GIVEN Stream<? extends Foo> foos = Stream.of(); // THEN assertThat(foos).hasSize(0); } public static
Assertions_assertThat_with_Stream_Test
java
quarkusio__quarkus
core/deployment/src/main/java/io/quarkus/deployment/builditem/GeneratedClassBuildItem.java
{ "start": 899, "end": 3037 }
class ____ extends MultiBuildItem { final boolean applicationClass; final String name; String binaryName; String internalName; final byte[] classData; final String source; public GeneratedClassBuildItem(boolean applicationClass, String name, byte[] classData) { this(applicationClass, name, classData, null); } public GeneratedClassBuildItem(boolean applicationClass, String name, byte[] classData, String source) { if (name.startsWith("/")) { throw new IllegalArgumentException("Name cannot start with '/':" + name); } this.applicationClass = applicationClass; this.name = name; this.classData = classData; this.source = source; } public boolean isApplicationClass() { return applicationClass; } /** * {@return a name for this class} * * @deprecated This method may return the binary name, the internal name, or a hybrid thereof and should not be * used. Use {@link #binaryName()} or {@link #internalName()} instead. */ @Deprecated(forRemoval = true) public String getName() { return name; } /** * {@return the <em>binary name</em> of the class, which is delimited by <code>.</code> characters} */ public String binaryName() { String binaryName = this.binaryName; if (binaryName == null) { binaryName = this.binaryName = name.replace('/', '.'); } return binaryName; } /** * {@return the <em>internal name</em> of the class, which is delimited by <code>/</code> characters} */ public String internalName() { String internalName = this.internalName; if (internalName == null) { internalName = this.internalName = name.replace('.', '/'); } return internalName; } public byte[] getClassData() { return classData; } public String getSource() { return source; } public String toString() { return "GeneratedClassBuildItem[" + binaryName() + "]"; } }
GeneratedClassBuildItem
java
apache__flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableEnvironment.java
{ "start": 62086, "end": 62910 }
class ____ for the format of the path. * @param ignoreIfNotExists If false exception will be thrown if the view to drop does not * exist. * @return true if view existed in the given path and was dropped, false if view didn't exist in * the given path and ignoreIfNotExists was true. */ boolean dropView(String path, boolean ignoreIfNotExists); /** * Drops a model registered in the given path. * * <p>This method can only drop permanent objects. Temporary objects can shadow permanent ones. * If a temporary object exists in a given path, make sure to drop the temporary object first * using {@link #dropTemporaryModel}. * * @param path The given path under which the model will be dropped. See also the {@link * TableEnvironment}
description
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/query/sqm/tree/expression/SqmXmlTableFunction.java
{ "start": 2160, "end": 8392 }
class ____<T> extends SelfRenderingSqmSetReturningFunction<T> implements JpaXmlTableFunction { private final Columns columns; public SqmXmlTableFunction( SqmSetReturningFunctionDescriptor descriptor, SetReturningFunctionRenderer renderer, @Nullable ArgumentsValidator argumentsValidator, SetReturningFunctionTypeResolver setReturningTypeResolver, NodeBuilder nodeBuilder, SqmExpression<String> xpath, SqmExpression<?> document) { this( descriptor, renderer, createArgumentsList( xpath, document ), argumentsValidator, setReturningTypeResolver, nodeBuilder, new ArrayList<>() ); } // Need to suppress some Checker Framework errors, because passing the `this` reference is unsafe, // though we make it safe by not calling any methods on it until initialization finishes @SuppressWarnings({"uninitialized", "assignment", "argument"}) private SqmXmlTableFunction( SqmSetReturningFunctionDescriptor descriptor, SetReturningFunctionRenderer renderer, List<SqmTypedNode<?>> arguments, @Nullable ArgumentsValidator argumentsValidator, SetReturningFunctionTypeResolver setReturningTypeResolver, NodeBuilder nodeBuilder, ArrayList<ColumnDefinition> columnDefinitions) { super( descriptor, renderer, arguments, argumentsValidator, setReturningTypeResolver, nodeBuilder, "xmltable" ); this.columns = new Columns( this, columnDefinitions ); arguments.set( arguments.size() - 1, this.columns ); } private static List<SqmTypedNode<?>> createArgumentsList(SqmExpression<String> xpath, SqmExpression<?> document) { // Since the last argument is the Columns object, though that needs the `this` reference, // we need to construct an array with a null slot at the end, where the Columns instance is put into. // Suppress nullness checks as this will eventually turn non-nullable @SuppressWarnings("nullness") final SqmTypedNode<?>[] array = new SqmTypedNode[] {xpath, document, null}; return Arrays.asList( array ); } @Override public SqmXmlTableFunction<T> copy(SqmCopyContext context) { final SqmXmlTableFunction<T> existing = context.getCopy( this ); if ( existing != null ) { return existing; } final List<? extends SqmTypedNode<?>> arguments = getArguments(); final List<SqmTypedNode<?>> argumentsCopy = new ArrayList<>( arguments.size() ); for ( int i = 0; i < arguments.size() - 1; i++ ) { argumentsCopy.add( arguments.get( i ).copy( context ) ); } final SqmXmlTableFunction<T> tableFunction = new SqmXmlTableFunction<>( getFunctionDescriptor(), getFunctionRenderer(), argumentsCopy, getArgumentsValidator(), getSetReturningTypeResolver(), nodeBuilder(), columns.columnDefinitions ); context.registerCopy( this, tableFunction ); tableFunction.columns.columnDefinitions.ensureCapacity( columns.columnDefinitions.size() ); for ( ColumnDefinition columnDefinition : columns.columnDefinitions ) { tableFunction.columns.columnDefinitions.add( columnDefinition.copy( context ) ); } return tableFunction; } @Override protected List<SqlAstNode> resolveSqlAstArguments(List<? extends SqmTypedNode<?>> sqmArguments, SqmToSqlAstConverter walker) { // The last argument is the SqmXmlTableFunction.Columns which will convert to null, so remove that final List<SqlAstNode> sqlAstNodes = super.resolveSqlAstArguments( sqmArguments, 0, sqmArguments.size() - 1, walker ); final List<XmlTableColumnDefinition> definitions = new ArrayList<>( columns.columnDefinitions.size() ); for ( ColumnDefinition columnDefinition : columns.columnDefinitions ) { definitions.add( columnDefinition.convertToSqlAst( walker ) ); } sqlAstNodes.add( new XmlTableColumnsClause( definitions ) ); return sqlAstNodes; } @Override public JpaXmlTableColumnNode<String> queryColumn(String columnName) { return queryColumn( columnName, null ); } @Override public JpaXmlTableColumnNode<String> queryColumn(String columnName, @Nullable String xpath) { final QueryColumnDefinition definition = new QueryColumnDefinition( this, columnName, nodeBuilder().getTypeConfiguration().getBasicTypeRegistry().resolve( String.class, SqlTypes.SQLXML ), xpath ); columns.addColumn( definition ); return definition; } @Override public <X> JpaXmlTableColumnNode<X> valueColumn(String columnName, Class<X> type) { return valueColumn( columnName, type, null ); } @Override public <X> JpaXmlTableColumnNode<X> valueColumn(String columnName, JpaCastTarget<X> castTarget) { return valueColumn( columnName, castTarget, null ); } @Override public <X> JpaXmlTableColumnNode<X> valueColumn(String columnName, Class<X> type, @Nullable String xpath) { return valueColumn( columnName, nodeBuilder().castTarget( type ), xpath ); } @Override public <X> JpaXmlTableColumnNode<X> valueColumn(String columnName, JpaCastTarget<X> castTarget, @Nullable String xpath) { final ValueColumnDefinition<X> definition = new ValueColumnDefinition<>( this, columnName, (SqmCastTarget<X>) castTarget, xpath ); columns.addColumn( definition ); return definition; } @Override public SqmXmlTableFunction<T> ordinalityColumn(String columnName) { columns.addColumn( new OrdinalityColumnDefinition( columnName, nodeBuilder().getLongType() ) ); return this; } @Override public void appendHqlString(StringBuilder hql, SqmRenderContext context) { hql.append( "xmltable(" ); getArguments().get( 0 ).appendHqlString( hql, context ); hql.append( " passing " ); getArguments().get( 1 ).appendHqlString( hql, context ); columns.appendHqlString( hql, context ); hql.append( ')' ); } private void checkTypeResolved() { if ( isTypeResolved() ) { throw new IllegalStateException( "Type for xmltable function is already resolved. Mutation is not allowed anymore" ); } } @Override public boolean isCompatible(Object object) { return object instanceof SqmXmlTableFunction<?> that && super.isCompatible( object ) && columns.isCompatible( that.columns ); } @Override public int cacheHashCode() { int result = super.cacheHashCode(); result = 31 * result + columns.cacheHashCode(); return result; } sealed
SqmXmlTableFunction
java
elastic__elasticsearch
x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/cache/common/CacheFileTests.java
{ "start": 23462, "end": 26089 }
class ____ implements CacheFile.ModificationListener { private final Set<CacheFile> updates = new HashSet<>(); private final Set<CacheFile> deletes = new HashSet<>(); @Override public synchronized void onCacheFileNeedsFsync(CacheFile cacheFile) { assertTrue(updates.add(cacheFile)); } synchronized boolean containsUpdate(CacheFile cacheFile) { return updates.contains(cacheFile); } @Override public synchronized void onCacheFileDelete(CacheFile cacheFile) { assertTrue(deletes.add(cacheFile)); } synchronized boolean containsDelete(CacheFile cacheFile) { return deletes.contains(cacheFile); } synchronized void reset() { updates.clear(); deletes.clear(); } } public static void assertNumberOfFSyncs(final Path path, final Matcher<Integer> matcher) { final BlobCacheTestUtils.FSyncTrackingFileSystemProvider provider = (BlobCacheTestUtils.FSyncTrackingFileSystemProvider) path .getFileSystem() .provider(); final Integer fsyncCount = provider.getNumberOfFSyncs(path); assertThat("File [" + path + "] was never fsynced", fsyncCount, notNullValue()); assertThat("Mismatching number of fsync for [" + path + "]", fsyncCount, matcher); } private static BlobCacheTestUtils.FSyncTrackingFileSystemProvider setupFSyncCountingFileSystem() { final FileSystem defaultFileSystem = PathUtils.getDefaultFileSystem(); final BlobCacheTestUtils.FSyncTrackingFileSystemProvider provider = new BlobCacheTestUtils.FSyncTrackingFileSystemProvider( defaultFileSystem, createTempDir() ); PathUtilsForTesting.installMock(provider.getFileSystem(null)); return provider; } private static void fill(FileChannel fileChannel, int from, int to) { final byte[] buffer = new byte[Math.min(Math.max(0, to - from), 1024)]; Arrays.fill(buffer, (byte) 0xff); assert fileChannel.isOpen(); try { int written = 0; int remaining = to - from; while (remaining > 0) { final int len = Math.min(remaining, buffer.length); fileChannel.write(ByteBuffer.wrap(buffer, 0, len), from + written); remaining -= len; written += len; } assert written == to - from; } catch (IOException e) { throw new AssertionError(e); } } }
TestCacheFileModificationListener
java
apache__avro
lang/java/avro/src/main/java/org/apache/avro/specific/SpecificData.java
{ "start": 20194, "end": 21296 }
class ____ c = (Class) type; String fullName = c.getName(); Schema schema = names.get(fullName); if (schema == null) try { schema = (Schema) (c.getDeclaredField("SCHEMA$").get(null)); if (!fullName.equals(getClassName(schema))) // HACK: schema mismatches class. maven shade plugin? try replacing. schema = new Schema.Parser() .parse(schema.toString().replace(schema.getNamespace(), c.getPackage().getName())); } catch (NoSuchFieldException e) { throw new AvroRuntimeException("Not a Specific class: " + c); } catch (IllegalAccessException e) { throw new AvroRuntimeException(e); } names.put(fullName, schema); return schema; } throw new AvroTypeException("Unknown type: " + type); } @Override protected String getSchemaName(Object datum) { if (datum != null) { Class c = datum.getClass(); if (isStringable(c)) return Schema.Type.STRING.getName(); } return super.getSchemaName(datum); } /** True if a
Class
java
spring-projects__spring-framework
spring-aop/src/test/java/org/springframework/aop/support/PointcutsTests.java
{ "start": 1040, "end": 2094 }
class ____ { public static Method TEST_BEAN_SET_AGE; public static Method TEST_BEAN_GET_AGE; public static Method TEST_BEAN_GET_NAME; public static Method TEST_BEAN_ABSQUATULATE; static { try { TEST_BEAN_SET_AGE = TestBean.class.getMethod("setAge", int.class); TEST_BEAN_GET_AGE = TestBean.class.getMethod("getAge"); TEST_BEAN_GET_NAME = TestBean.class.getMethod("getName"); TEST_BEAN_ABSQUATULATE = TestBean.class.getMethod("absquatulate"); } catch (Exception ex) { throw new RuntimeException("Shouldn't happen: error in test suite"); } } /** * Matches only TestBean class, not subclasses */ public static Pointcut allTestBeanMethodsPointcut = new StaticMethodMatcherPointcut() { @Override public ClassFilter getClassFilter() { return type -> type.equals(TestBean.class); } @Override public boolean matches(Method m, @Nullable Class<?> targetClass) { return true; } }; public static Pointcut allClassSetterPointcut = Pointcuts.SETTERS; // Subclass used for matching public static
PointcutsTests
java
apache__dubbo
dubbo-common/src/test/java/org/apache/dubbo/common/extension/ext5/impl/Ext5Impl1.java
{ "start": 967, "end": 1101 }
class ____ implements NoAdaptiveMethodExt { public String echo(URL url, String s) { return "Ext5Impl1-echo"; } }
Ext5Impl1
java
mapstruct__mapstruct
processor/src/test/java/org/mapstruct/ap/test/abstractclass/SourceTargetMapper.java
{ "start": 409, "end": 825 }
class ____ extends AbstractBaseMapper { public static final SourceTargetMapper INSTANCE = Mappers.getMapper( SourceTargetMapper.class ); public abstract Target sourceToTarget(Source source); protected String calendarToString(Calendar calendar) { DateFormat format = new SimpleDateFormat( "dd.MM.yyyy" ); return "Birthday: " + format.format( calendar.getTime() ); } }
SourceTargetMapper
java
spring-projects__spring-security
web/src/main/java/org/springframework/security/web/server/ObservationWebFilterChainDecorator.java
{ "start": 18121, "end": 19525 }
class ____ implements ObservationConvention<WebFilterChainObservationContext> { static final String CHAIN_OBSERVATION_NAME = "spring.security.filterchains"; private static final String CHAIN_POSITION_NAME = "spring.security.filterchain.position"; private static final String CHAIN_SIZE_NAME = "spring.security.filterchain.size"; private static final String FILTER_SECTION_NAME = "spring.security.reached.filter.section"; private static final String FILTER_NAME = "spring.security.reached.filter.name"; @Override public String getName() { return CHAIN_OBSERVATION_NAME; } @Override public String getContextualName(WebFilterChainObservationContext context) { return "security filterchain " + context.getFilterSection(); } @Override public KeyValues getLowCardinalityKeyValues(WebFilterChainObservationContext context) { return KeyValues.of(CHAIN_SIZE_NAME, String.valueOf(context.getChainSize())) .and(CHAIN_POSITION_NAME, String.valueOf(context.getChainPosition())) .and(FILTER_SECTION_NAME, context.getFilterSection()) .and(FILTER_NAME, (StringUtils.hasText(context.getFilterName())) ? context.getFilterName() : KeyValue.NONE_VALUE); } @Override public boolean supportsContext(Observation.Context context) { return context instanceof WebFilterChainObservationContext; } } private static final
WebFilterChainObservationConvention
java
quarkusio__quarkus
integration-tests/maven/src/test/java/io/quarkus/maven/it/CreateExtensionMojoIT.java
{ "start": 1026, "end": 9769 }
class ____ extends QuarkusPlatformAwareMojoTestBase { private Invoker invoker; private File testDir; @Test public void testCreateCoreExtension(TestInfo testInfo) throws Throwable { testDir = initProject("projects/create-extension-quarkus-core", "output/create-extension-quarkus-core"); assertThat(testDir).isDirectory(); invoker = initInvoker(testDir); Properties properties = new Properties(); properties.put("extensionId", "my-ext"); InvocationResult result = setup(properties); assertThat(result.getExitCode()).isZero(); final Path testDirPath = testDir.toPath(); assertThatDirectoryTreeMatchSnapshots(testInfo, testDirPath) .contains( "extensions/my-ext/pom.xml", "extensions/my-ext/runtime/src/main/resources/META-INF/quarkus-extension.yaml", "extensions/my-ext/deployment/src/main/java/org/acme/my/ext/deployment/MyExtProcessor.java", "integration-tests/my-ext/pom.xml", "integration-tests/my-ext/src/test/java/org/acme/my/ext/it/MyExtResourceTest.java"); assertThatMatchSnapshot(testInfo, testDirPath, "extensions/my-ext/pom.xml"); assertThatMatchSnapshot(testInfo, testDirPath, "extensions/my-ext/runtime/src/main/resources/META-INF/quarkus-extension.yaml"); assertThatMatchSnapshot(testInfo, testDirPath, "bom/application/pom.xml"); assertThatMatchSnapshot(testInfo, testDirPath, "integration-tests/pom.xml"); assertThatMatchSnapshot(testInfo, testDirPath, "extensions/pom.xml"); } @Test public void testCreateCoreExtensionFromExtensionsDir(TestInfo testInfo) throws Throwable { testDir = initProject("projects/create-extension-quarkus-core", "output/create-extension-quarkus-core-extensions-dir"); assertThat(testDir).isDirectory(); invoker = initInvoker(testDir.toPath().resolve("extensions/").toFile()); Properties properties = new Properties(); properties.put("extensionId", "quarkus-my-ext"); InvocationResult result = setup(properties); assertThat(result.getExitCode()).isZero(); final Path testDirPath = testDir.toPath(); assertThatDirectoryTreeMatchSnapshots(testInfo, testDirPath) .contains( "extensions/my-ext/pom.xml", "extensions/my-ext/deployment/src/main/java/org/acme/my/ext/deployment/MyExtProcessor.java", "integration-tests/my-ext/pom.xml", "integration-tests/my-ext/src/test/java/org/acme/my/ext/it/MyExtResourceTest.java"); assertThatMatchSnapshot(testInfo, testDirPath, "extensions/my-ext/pom.xml"); assertThatMatchSnapshot(testInfo, testDirPath, "extensions/my-ext/runtime/src/main/resources/META-INF/quarkus-extension.yaml"); assertThatMatchSnapshot(testInfo, testDirPath, "bom/application/pom.xml"); assertThatMatchSnapshot(testInfo, testDirPath, "integration-tests/pom.xml"); assertThatMatchSnapshot(testInfo, testDirPath, "extensions/pom.xml"); } @Test public void testCreateQuarkiverseExtension(TestInfo testInfo) throws Throwable { testDir = initEmptyProject("output/create-quarkiverse-extension"); assertThat(testDir).isDirectory(); invoker = initInvoker(testDir); Properties properties = new Properties(); properties.put("groupId", "io.quarkiverse.my-quarkiverse-ext"); properties.put("extensionId", "my-quarkiverse-ext"); properties.put("quarkusVersion", "3.14.0"); properties.put("extensionName", "My Quarkiverse extension"); properties.put("extensionDescription", "My Quarkiverse extension description"); properties.put("withCodestart", "true"); InvocationResult result = setup(properties); assertThat(result.getExitCode()).isZero(); final Path testDirPath = testDir.toPath(); assertThatDirectoryTreeMatchSnapshots(testInfo, testDirPath) .contains( "quarkus-my-quarkiverse-ext/pom.xml", "quarkus-my-quarkiverse-ext/runtime/src/main/codestarts/quarkus/my-quarkiverse-ext-codestart/codestart.yml", "quarkus-my-quarkiverse-ext/deployment/src/main/java/io/quarkiverse/my/quarkiverse/ext/deployment/MyQuarkiverseExtProcessor.java", "quarkus-my-quarkiverse-ext/integration-tests/pom.xml", "quarkus-my-quarkiverse-ext/integration-tests/src/test/java/io/quarkiverse/my/quarkiverse/ext/it/MyQuarkiverseExtResourceTest.java"); assertThatMatchSnapshot(testInfo, testDirPath, "quarkus-my-quarkiverse-ext/pom.xml"); assertThatMatchSnapshot(testInfo, testDirPath, "quarkus-my-quarkiverse-ext/runtime/pom.xml"); assertThatMatchSnapshot(testInfo, testDirPath, "quarkus-my-quarkiverse-ext/runtime/src/main/resources/META-INF/quarkus-extension.yaml"); assertThatMatchSnapshot(testInfo, testDirPath, "quarkus-my-quarkiverse-ext/LICENSE"); assertThatMatchSnapshot(testInfo, testDirPath, "quarkus-my-quarkiverse-ext/README.md"); assertThatMatchSnapshot(testInfo, testDirPath, "quarkus-my-quarkiverse-ext/.github/workflows/build.yml"); assertThatMatchSnapshot(testInfo, testDirPath, "quarkus-my-quarkiverse-ext/.github/workflows/pre-release.yml"); assertThatMatchSnapshot(testInfo, testDirPath, "quarkus-my-quarkiverse-ext/.github/workflows/quarkus-snapshot.yaml"); assertThatMatchSnapshot(testInfo, testDirPath, "quarkus-my-quarkiverse-ext/.github/workflows/release-perform.yml"); assertThatMatchSnapshot(testInfo, testDirPath, "quarkus-my-quarkiverse-ext/.github/workflows/release-prepare.yml"); assertThatMatchSnapshot(testInfo, testDirPath, "quarkus-my-quarkiverse-ext/docs/pom.xml"); assertThatMatchSnapshot(testInfo, testDirPath, "quarkus-my-quarkiverse-ext/docs/antora.yml"); assertThatMatchSnapshot(testInfo, testDirPath, "quarkus-my-quarkiverse-ext/docs/modules/ROOT/nav.adoc"); assertThatMatchSnapshot(testInfo, testDirPath, "quarkus-my-quarkiverse-ext/docs/modules/ROOT/pages/index.adoc"); assertThatMatchSnapshot(testInfo, testDirPath, "quarkus-my-quarkiverse-ext/integration-tests/pom.xml"); } @Test public void testCreateStandaloneExtension(TestInfo testInfo) throws Throwable { testDir = initEmptyProject("output/create-standalone-extension"); assertThat(testDir).isDirectory(); invoker = initInvoker(testDir); Properties properties = new Properties(); properties.put("groupId", "io.standalone"); properties.put("extensionId", "my-own-ext"); properties.put("namespaceId", "my-org-"); properties.put("quarkusVersion", "3.14.0"); InvocationResult result = setup(properties); assertThat(result.getExitCode()).isZero(); final Path testDirPath = testDir.toPath(); assertThatDirectoryTreeMatchSnapshots(testInfo, testDirPath) .contains( "my-org-my-own-ext/pom.xml", "my-org-my-own-ext/deployment/src/main/java/io/standalone/my/own/ext/deployment/MyOwnExtProcessor.java", "my-org-my-own-ext/integration-tests/pom.xml", "my-org-my-own-ext/integration-tests/src/test/java/io/standalone/my/own/ext/it/MyOwnExtResourceTest.java"); assertThatMatchSnapshot(testInfo, testDirPath, "my-org-my-own-ext/pom.xml"); assertThatMatchSnapshot(testInfo, testDirPath, "my-org-my-own-ext/runtime/pom.xml"); } private InvocationResult setup(Properties params) throws MavenInvocationException, FileNotFoundException, UnsupportedEncodingException { InvocationRequest request = new DefaultInvocationRequest(); request.setBatchMode(true); request.setGoals(Collections.singletonList( getMavenPluginGroupId() + ":" + getMavenPluginArtifactId() + ":" + getMavenPluginVersion() + ":create-extension")); request.setDebug(false); request.setShowErrors(true); request.setProperties(params); File log = new File(testDir.getParent(), "build-create-extension-" + testDir.getName() + ".log"); PrintStreamLogger logger = new PrintStreamLogger(new PrintStream(new FileOutputStream(log), false, "UTF-8"), InvokerLogger.DEBUG); invoker.setLogger(logger); return invoker.execute(request); } }
CreateExtensionMojoIT
java
apache__dubbo
dubbo-remoting/dubbo-remoting-http12/src/main/java/org/apache/dubbo/remoting/http12/message/codec/HtmlCodecFactory.java
{ "start": 1295, "end": 1890 }
class ____ implements HttpMessageEncoderFactory, HttpMessageDecoderFactory { @Override public HttpMessageCodec createCodec(URL url, FrameworkModel frameworkModel, String mediaType) { return frameworkModel == FrameworkModel.defaultModel() ? HtmlCodec.INSTANCE : new HtmlCodec(frameworkModel); } @Override public MediaType mediaType() { return MediaType.TEXT_HTML; } @Override public boolean supports(String mediaType) { return mediaType.startsWith(mediaType().getName()) || mediaType.startsWith("application/xhtml"); } }
HtmlCodecFactory
java
spring-projects__spring-boot
module/spring-boot-actuator/src/test/java/org/springframework/boot/actuate/endpoint/annotation/DiscovererEndpointFilterTests.java
{ "start": 1500, "end": 2684 }
class ____ { @Test @SuppressWarnings("NullAway") // Test null check void createWhenDiscovererIsNullShouldThrowException() { assertThatIllegalArgumentException().isThrownBy(() -> new TestDiscovererEndpointFilter(null)) .withMessageContaining("'discoverer' must not be null"); } @Test void matchWhenDiscoveredByDiscovererShouldReturnTrue() { DiscovererEndpointFilter filter = new TestDiscovererEndpointFilter(TestDiscovererA.class); DiscoveredEndpoint<?> endpoint = mockDiscoveredEndpoint(TestDiscovererA.class); assertThat(filter.match(endpoint)).isTrue(); } @Test void matchWhenNotDiscoveredByDiscovererShouldReturnFalse() { DiscovererEndpointFilter filter = new TestDiscovererEndpointFilter(TestDiscovererA.class); DiscoveredEndpoint<?> endpoint = mockDiscoveredEndpoint(TestDiscovererB.class); assertThat(filter.match(endpoint)).isFalse(); } @SuppressWarnings({ "rawtypes", "unchecked" }) private DiscoveredEndpoint<?> mockDiscoveredEndpoint(Class<?> discoverer) { DiscoveredEndpoint endpoint = mock(DiscoveredEndpoint.class); given(endpoint.wasDiscoveredBy(discoverer)).willReturn(true); return endpoint; } static
DiscovererEndpointFilterTests
java
quarkusio__quarkus
extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/xml/orm/OtherNonAnnotatedEntity.java
{ "start": 50, "end": 505 }
class ____ { private long id; private String name; public OtherNonAnnotatedEntity() { } public OtherNonAnnotatedEntity(String name) { this.name = name; } public long getId() { return id; } public void setId(long id) { this.id = id; } public String getName() { return name; } public void setName(String name) { this.name = name; } }
OtherNonAnnotatedEntity
java
apache__flink
flink-queryable-state/flink-queryable-state-runtime/src/test/java/org/apache/flink/queryablestate/network/AbstractServerTest.java
{ "start": 7086, "end": 8833 }
class ____ extends AbstractServerBase<TestMessage, TestMessage> implements AutoCloseable { private final KvStateRequestStats requestStats; TestServer(String name, KvStateRequestStats stats, Iterator<Integer> bindPort) throws UnknownHostException { super(name, InetAddress.getLocalHost().getHostName(), bindPort, 1, 1); this.requestStats = stats; } @Override public AbstractServerHandler<TestMessage, TestMessage> initializeHandler() { return new AbstractServerHandler<TestMessage, TestMessage>( this, new MessageSerializer<>( new TestMessage.TestMessageDeserializer(), new TestMessage.TestMessageDeserializer()), requestStats) { @Override public CompletableFuture<TestMessage> handleRequest( long requestId, TestMessage request) { TestMessage response = new TestMessage(getServerName() + '-' + request.getMessage()); return CompletableFuture.completedFuture(response); } @Override public CompletableFuture<Void> shutdown() { return CompletableFuture.completedFuture(null); } }; } @Override public void close() throws Exception { shutdownServer().get(); assertThat(getQueryExecutor().isTerminated()).isTrue(); assertThat(isEventGroupShutdown()).isTrue(); } } /** Message with a string as payload. */ private static
TestServer
java
elastic__elasticsearch
qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java
{ "start": 5371, "end": 42360 }
class ____ extends PackagingTestCase { private Path tempDir; private static final String PASSWORD = "nothunter2"; private static final String EXAMPLE_PLUGIN_SYSPROP = "tests.example-plugin"; private static final String EXAMPLE_PLUGIN_PATH = System.getProperty(EXAMPLE_PLUGIN_SYSPROP); @BeforeClass public static void filterDistros() { assumeTrue("only Docker", distribution().isDocker()); } @Before public void setupTest() throws IOException { installation = runContainer(distribution(), builder().envVar("ELASTIC_PASSWORD", PASSWORD)); tempDir = createTempDir(DockerTests.class.getSimpleName()); } @After public void teardownTest() { removeContainer(); rm(tempDir); } @Override protected void dumpDebug() { final Result containerLogs = getContainerLogs(); logger.warn("Elasticsearch log stdout:\n" + containerLogs.stdout()); logger.warn("Elasticsearch log stderr:\n" + containerLogs.stderr()); } /** * Checks that the Docker image can be run, and that it passes various checks. */ public void test010Install() throws Exception { verifyContainerInstallation(installation); verifySecurityAutoConfigured(installation); } /** * Check that security is enabled */ public void test011SecurityEnabledStatus() throws Exception { waitForElasticsearch(installation, "elastic", PASSWORD); final int statusCode = makeRequestAsElastic("wrong_password"); assertThat(statusCode, equalTo(401)); } /** * Check that security can be disabled */ public void test012SecurityCanBeDisabled() throws Exception { // restart container with security disabled // We need to set discovery to single-node as with security disabled, autoconfiguration won't run and we won't set // cluster.initial_master_nodes runContainer(distribution(), builder().envVar("xpack.security.enabled", "false").envVar("discovery.type", "single-node")); waitForElasticsearch(installation); final int unauthStatusCode = ServerUtils.makeRequestAndGetStatus(Request.Get("http://localhost:9200"), null, null, null); assertThat(unauthStatusCode, equalTo(200)); } /** * Checks that no plugins are initially active. */ public void test020PluginsListWithNoPlugins() { assumeTrue("Only applies to non-Cloud images", distribution().packaging != Packaging.DOCKER_CLOUD_ESS); final Installation.Executables bin = installation.executables(); final Result r = sh.run(bin.pluginTool + " list"); assertThat("Expected no plugins to be listed", r.stdout(), emptyString()); } /** * Check that a plugin can be installed without special permissions. */ public void test021InstallPlugin() { runContainer( distribution(), builder().envVar("ELASTIC_PASSWORD", PASSWORD).volume(Path.of(EXAMPLE_PLUGIN_PATH), "/analysis-icu.zip") ); final String plugin = "analysis-icu"; assertThat("Expected " + plugin + " to not be installed", listPlugins(), not(hasItems(plugin))); final Installation.Executables bin = installation.executables(); sh.run(bin.pluginTool + " install file:///analysis-icu.zip"); assertThat("Expected installed plugins to be listed", listPlugins(), equalTo(List.of("analysis-icu"))); } /** * Checks that ESS images can install plugins from the local archive. */ public void test022InstallPluginsFromLocalArchive() { assumeTrue("Only ESS images have a local archive", distribution().packaging == Packaging.DOCKER_CLOUD_ESS); final String plugin = "analysis-icu"; final Installation.Executables bin = installation.executables(); listPluginArchive().forEach(System.out::println); assertThat("Expected " + plugin + " to not be installed", listPlugins(), not(hasItems(plugin))); assertThat("Expected " + plugin + " available in archive", listPluginArchive(), hasItems(containsString(plugin))); // Stuff the proxy settings with garbage, so any attempt to go out to the internet would fail sh.getEnv() .put( "CLI_JAVA_OPTS", "-Dhttp.proxyHost=example.org -Dhttp.proxyPort=9999 -Dhttps.proxyHost=example.org -Dhttps.proxyPort=9999" ); sh.run(bin.pluginTool + " install --batch analysis-icu"); assertThat("Expected " + plugin + " to be installed", listPlugins(), hasItems(plugin)); } /** * Checks that plugins can be installed by deploying a plugins config file. */ public void test023InstallPluginUsingConfigFile() { final String filename = "elasticsearch-plugins.yml"; append(tempDir.resolve(filename), """ plugins: - id: analysis-icu location: file:///analysis-icu.zip """); // Restart the container. This will sync the plugins automatically. Also // stuff the proxy settings with garbage, so any attempt to go out to the internet would fail. The // command should instead use the bundled plugin archive. runContainer( distribution(), builder().volume(tempDir.resolve(filename), installation.config.resolve(filename)) .volume(Path.of(EXAMPLE_PLUGIN_PATH), "/analysis-icu.zip") .envVar("ELASTIC_PASSWORD", PASSWORD) .envVar( "CLI_JAVA_OPTS", "-Dhttp.proxyHost=example.org -Dhttp.proxyPort=9999 -Dhttps.proxyHost=example.org -Dhttps.proxyPort=9999" ) ); // Since ES is doing the installing, give it a chance to complete waitForElasticsearch(installation, "elastic", PASSWORD); assertThat("List of installed plugins is incorrect", listPlugins(), hasItems("analysis-icu")); } /** * Checks that ESS images can manage plugins from the local archive by deploying a plugins config file. */ public void test024InstallPluginFromArchiveUsingConfigFile() { assumeTrue("Only ESS image has a plugin archive", distribution().packaging == Packaging.DOCKER_CLOUD_ESS); final String filename = "elasticsearch-plugins.yml"; append(tempDir.resolve(filename), """ plugins: - id: analysis-icu - id: analysis-phonetic """); // Restart the container. This will sync the plugins automatically. Also // stuff the proxy settings with garbage, so any attempt to go out to the internet would fail. The // command should instead use the bundled plugin archive. runContainer( distribution(), builder().volume(tempDir.resolve(filename), installation.config.resolve(filename)) .envVar("ELASTIC_PASSWORD", PASSWORD) .envVar( "CLI_JAVA_OPTS", "-Dhttp.proxyHost=example.org -Dhttp.proxyPort=9999 -Dhttps.proxyHost=example.org -Dhttps.proxyPort=9999" ) ); // Since ES is doing the installing, give it a chance to complete waitForElasticsearch(installation, "elastic", PASSWORD); assertThat("List of installed plugins is incorrect", listPlugins(), containsInAnyOrder("analysis-icu", "analysis-phonetic")); } /** * Check that when using Elasticsearch's plugins sync capability, it will use a proxy when configured to do so. * This could either be in the plugins config file, or via the standard Java system properties. */ public void test025SyncPluginsUsingProxy() { MockServer.withMockServer(mockServer -> { for (boolean useConfigFile : List.of(true, false)) { mockServer.clearExpectations(); final String config = String.format(Locale.ROOT, """ plugins: # This is the new plugin to install. We don't use an official plugin because then Elasticsearch # will attempt an SSL connection and that just makes everything more complicated. - id: my-plugin location: http://example.com/my-plugin.zip %s """, useConfigFile ? "proxy: mockserver:" + mockServer.getPort() : ""); final String filename = "elasticsearch-plugins.yml"; final Path pluginsConfigPath = tempDir.resolve(filename); deleteIfExists(pluginsConfigPath); append(pluginsConfigPath, config); final DockerRun builder = builder().volume(pluginsConfigPath, installation.config.resolve(filename)) .extraArgs("--link " + mockServer.getContainerId() + ":mockserver"); if (useConfigFile == false) { builder.envVar("CLI_JAVA_OPTS", "-Dhttp.proxyHost=mockserver -Dhttp.proxyPort=" + mockServer.getPort()); } // Restart the container. This will sync plugins automatically, which will fail because // ES will be unable to install `my-plugin` final Result result = runContainerExpectingFailure(distribution(), builder); final List<Map<String, String>> interactions = mockServer.getInteractions(); assertThat(result.stderr(), containsString("FileNotFoundException: http://example.com/my-plugin.zip")); // Now check that Elasticsearch did use the proxy server assertThat(interactions, hasSize(1)); final Map<String, String> interaction = interactions.get(0); assertThat(interaction, hasEntry("httpRequest.headers.Host[0]", "example.com")); assertThat(interaction, hasEntry("httpRequest.headers.User-Agent[0]", "elasticsearch-plugin-installer")); assertThat(interaction, hasEntry("httpRequest.method", "GET")); assertThat(interaction, hasEntry("httpRequest.path", "/my-plugin.zip")); } }); } /** * Check that attempting to install the repository plugins that have been migrated to modules succeeds, but does nothing. */ public void test026InstallBundledRepositoryPlugins() { assertThat("Expected no plugins to be installed", listPlugins(), is(empty())); installation.executables().pluginTool.run("install repository-azure repository-gcs repository-s3"); assertThat("Still expected no plugins to be installed", listPlugins(), is(empty())); // Removal should also succeed installation.executables().pluginTool.run("remove repository-azure repository-gcs repository-s3"); } /** * Check that attempting to install the repository plugins that have been migrated to modules succeeds * when using a plugins config file but does nothing. */ public void test026InstallBundledRepositoryPluginsViaConfigFile() { final String filename = "elasticsearch-plugins.yml"; append(tempDir.resolve(filename), """ plugins: - id: repository-azure - id: repository-gcs - id: repository-s3 """); // Restart the container. This will sync the plugins automatically. Also // stuff the proxy settings with garbage, so any attempt to go out to the internet would fail. The // sync shouldn't be doing anything anyway. runContainer( distribution(), builder().volume(tempDir.resolve(filename), installation.config.resolve(filename)) .envVar("ELASTIC_PASSWORD", PASSWORD) .envVar( "CLI_JAVA_OPTS", "-Dhttp.proxyHost=example.org -Dhttp.proxyPort=9999 -Dhttps.proxyHost=example.org -Dhttps.proxyPort=9999" ) ); // Since ES is doing the installing, give it a chance to complete waitForElasticsearch(installation, "elastic", PASSWORD); assertThat("Expected no plugins to be installed", listPlugins(), is(empty())); } /** * Check that the JDK's `cacerts` file is a symlink to the copy provided by the operating system. */ public void test040JavaUsesTheOsProvidedKeystore() { final String path = sh.run("realpath jdk/lib/security/cacerts").stdout(); if (distribution.packaging == Packaging.DOCKER || distribution.packaging == Packaging.DOCKER_IRON_BANK) { // In these images, the `cacerts` file ought to be a symlink here assertThat(path, equalTo("/etc/pki/ca-trust/extracted/java/cacerts")); } else if (distribution.packaging == Packaging.DOCKER_WOLFI || distribution.packaging == Packaging.DOCKER_CLOUD_ESS) { // In these images, the `cacerts` file ought to be a symlink here assertThat(path, equalTo("/etc/ssl/certs/java/cacerts")); } else { fail("Unknown distribution: " + distribution.packaging); } } /** * Checks that there are Amazon trusted certificates in the cacaerts keystore. */ public void test041AmazonCaCertsAreInTheKeystore() { final boolean matches = sh.run("jdk/bin/keytool -cacerts -storepass changeit -list | grep trustedCertEntry") .stdout() .lines() .anyMatch(line -> line.contains("amazonrootca")); assertTrue("Expected Amazon trusted cert in cacerts", matches); } /** * Check that when the keystore is created on startup, it is created with the correct permissions. */ public void test042KeystorePermissionsAreCorrect() { waitForElasticsearch(installation, "elastic", PASSWORD); assertThat(installation.config("elasticsearch.keystore"), file(p660)); } /** * Send some basic index, count and delete requests, in order to check that the installation * is minimally functional. */ public void test050BasicApiTests() throws Exception { waitForElasticsearch(installation, "elastic", PASSWORD); assertTrue(existsInContainer(installation.logs.resolve("gc.log"))); runElasticsearchTestsAsElastic(PASSWORD); } /** * Check that the default config can be overridden using a bind mount, and that env vars are respected */ public void test070BindMountCustomPathConfAndJvmOptions() throws Exception { copyFromContainer(installation.config("elasticsearch.yml"), tempDir.resolve("elasticsearch.yml")); copyFromContainer(installation.config("elasticsearch.keystore"), tempDir.resolve("elasticsearch.keystore")); copyFromContainer(installation.config("log4j2.properties"), tempDir.resolve("log4j2.properties")); final Path autoConfigurationDir = findInContainer(installation.config, "d", "\"certs\""); final String autoConfigurationDirName = autoConfigurationDir.getFileName().toString(); copyFromContainer(autoConfigurationDir, tempDir.resolve(autoConfigurationDirName)); // we have to disable Log4j from using JMX lest it will hit a security // manager exception before we have configured logging; this will fail // startup since we detect usages of logging before it is configured final String jvmOptions = "-Xms512m\n-Xmx512m\n-Dlog4j2.disable.jmx=true\n"; append(tempDir.resolve("jvm.options"), jvmOptions); // Make the temp directory and contents accessible when bind-mounted. Files.setPosixFilePermissions(tempDir, fromString("rwxrwxrwx")); // These permissions are necessary to run the tests under Vagrant Files.setPosixFilePermissions(tempDir.resolve("elasticsearch.yml"), p644); Files.setPosixFilePermissions(tempDir.resolve("elasticsearch.keystore"), p644); Files.setPosixFilePermissions(tempDir.resolve("log4j2.properties"), p644); Files.setPosixFilePermissions(tempDir.resolve(autoConfigurationDirName), p750); // Restart the container // We need to set discovery to single-node as autoconfiguration has already run when the node started the first time // cluster.initial_master_nodes is set to the name of the original docker container ServerUtils.removeSettingFromExistingConfiguration(tempDir, "cluster.initial_master_nodes"); runContainer( distribution(), builder().volume(tempDir, "/usr/share/elasticsearch/config") .envVar("ES_JAVA_OPTS", "-XX:-UseCompressedOops") .envVar("ELASTIC_PASSWORD", PASSWORD) .envVar("discovery.type", "single-node") ); waitForElasticsearch(installation, "elastic", PASSWORD); final JsonNode nodes = getJson("/_nodes", "elastic", PASSWORD, ServerUtils.getCaCert(installation)).get("nodes"); final String nodeId = nodes.fieldNames().next(); final int heapSize = nodes.at("/" + nodeId + "/jvm/mem/heap_init_in_bytes").intValue(); final boolean usingCompressedPointers = nodes.at("/" + nodeId + "/jvm/using_compressed_ordinary_object_pointers").asBoolean(); logger.warn(nodes.at("/" + nodeId + "/jvm/mem/heap_init_in_bytes")); assertThat("heap_init_in_bytes", heapSize, equalTo(536870912)); assertThat("using_compressed_ordinary_object_pointers", usingCompressedPointers, equalTo(false)); } /** * Check that the default config can be overridden using a bind mount, and that env vars are respected. */ public void test071BindMountCustomPathWithDifferentUID() throws Exception { Platforms.onLinux(() -> { final Path tempEsDataDir = tempDir.resolve("esDataDir"); // Make the local directory and contents accessible when bind-mounted mkDirWithPrivilegeEscalation(tempEsDataDir, 1500, 0); // Restart the container runContainer( distribution(), builder().volume(tempEsDataDir.toAbsolutePath(), installation.data).envVar("ELASTIC_PASSWORD", PASSWORD) ); waitForElasticsearch(installation, "elastic", PASSWORD); final JsonNode nodes = getJson("/_nodes", "elastic", PASSWORD, ServerUtils.getCaCert(installation)); assertThat(nodes.at("/_nodes/total").intValue(), equalTo(1)); assertThat(nodes.at("/_nodes/successful").intValue(), equalTo(1)); assertThat(nodes.at("/_nodes/failed").intValue(), equalTo(0)); // Ensure container is stopped before we remove tempEsDataDir, so nothing // is using the directory. removeContainer(); rmDirWithPrivilegeEscalation(tempEsDataDir); }); } /** * Check that it is possible to run Elasticsearch under a different user and group to the default. * Note that while the default configuration files are world-readable, when we execute Elasticsearch * it will attempt to create a keystore under the `config` directory. This will fail unless * we also bind-mount the config dir. */ public void test072RunEsAsDifferentUserAndGroup() throws Exception { assumeFalse(Platforms.WINDOWS); final Path tempEsDataDir = tempDir.resolve("esDataDir"); final Path tempEsConfigDir = tempDir.resolve("esConfDir"); final Path tempEsLogsDir = tempDir.resolve("esLogsDir"); Files.createDirectory(tempEsConfigDir); Files.createDirectory(tempEsConfigDir.resolve("jvm.options.d")); Files.createDirectory(tempEsDataDir); Files.createDirectory(tempEsLogsDir); copyFromContainer(installation.config("elasticsearch.yml"), tempEsConfigDir); copyFromContainer(installation.config("jvm.options"), tempEsConfigDir); copyFromContainer(installation.config("elasticsearch.keystore"), tempEsConfigDir); copyFromContainer(installation.config("log4j2.properties"), tempEsConfigDir); final Path autoConfigurationDir = findInContainer(installation.config, "d", "\"certs\""); assertThat(autoConfigurationDir, notNullValue()); final String autoConfigurationDirName = autoConfigurationDir.getFileName().toString(); copyFromContainer(autoConfigurationDir, tempEsConfigDir.resolve(autoConfigurationDirName)); chownWithPrivilegeEscalation(tempEsConfigDir, "501:501"); chownWithPrivilegeEscalation(tempEsDataDir, "501:501"); chownWithPrivilegeEscalation(tempEsLogsDir, "501:501"); try { // Restart the container // We need to set discovery to single-node as autoconfiguration has already run when the node started the first time // cluster.initial_master_nodes is set to the name of the original docker container ServerUtils.removeSettingFromExistingConfiguration(tempEsConfigDir, "cluster.initial_master_nodes"); runContainer( distribution(), builder().envVar("ELASTIC_PASSWORD", PASSWORD) .uid(501, 501) .volume(tempEsDataDir.toAbsolutePath(), installation.data) .volume(tempEsConfigDir.toAbsolutePath(), installation.config) .volume(tempEsLogsDir.toAbsolutePath(), installation.logs) .envVar("discovery.type", "single-node") ); waitForElasticsearch(installation, "elastic", PASSWORD); removeContainer(); } finally { rmDirWithPrivilegeEscalation(tempEsConfigDir); rmDirWithPrivilegeEscalation(tempEsDataDir); rmDirWithPrivilegeEscalation(tempEsLogsDir); } } /** * Check that it is possible to run Elasticsearch under a different user and group to the default, * without bind-mounting any directories, provided the container user is added to the `root` group. */ public void test073RunEsAsDifferentUserAndGroupWithoutBindMounting() { // Restart the container // We need to set discovery to single-node as autoconfiguration won't run, and we won't set // cluster.initial_master_nodes runContainer( distribution(), builder().extraArgs("--group-add 0").uid(501, 501).envVar("ELASTIC_PASSWORD", PASSWORD).envVar("discovery.type", "single-node") ); waitForElasticsearch(installation, "elastic", PASSWORD); } /** * Check that the elastic user's password can be configured via a file and the ELASTIC_PASSWORD_FILE environment variable. */ public void test080ConfigurePasswordThroughEnvironmentVariableFile() throws Exception { final String xpackPassword = "hunter2"; final String passwordFilename = "password.txt"; // ELASTIC_PASSWORD_FILE Files.writeString(tempDir.resolve(passwordFilename), xpackPassword + "\n"); // File permissions need to be secured in order for the ES wrapper to accept // them for populating env var values Files.setPosixFilePermissions(tempDir.resolve(passwordFilename), p600); // But when running in Vagrant, also ensure ES can actually access the file chownWithPrivilegeEscalation(tempDir.resolve(passwordFilename), "1000:0"); // Restart the container runContainer( distribution(), builder().volume(tempDir, "/run/secrets").envVar("ELASTIC_PASSWORD_FILE", "/run/secrets/" + passwordFilename) ); // If we configured security correctly, then this call will only work if we specify the correct credentials. try { waitForElasticsearch(installation, "elastic", "hunter2"); } catch (Exception e) { throw new AssertionError( "Failed to check whether Elasticsearch had started. This could be because " + "authentication isn't working properly. Check the container logs", e ); } // Also check that an unauthenticated call fails final int statusCode = ServerUtils.makeRequestAndGetStatus( Request.Get("https://localhost:9200"), null, null, ServerUtils.getCaCert(installation) ); assertThat("Expected server to require authentication", statusCode, equalTo(401)); } /** * Check that when verifying the file permissions of _FILE environment variables, symlinks * are followed. */ public void test081SymlinksAreFollowedWithEnvironmentVariableFiles() throws Exception { // Test relies on symlinks assumeFalse(Platforms.WINDOWS); final String xpackPassword = "hunter2"; final String passwordFilename = "password.txt"; final String symlinkFilename = "password_symlink"; // ELASTIC_PASSWORD_FILE Files.writeString(tempDir.resolve(passwordFilename), xpackPassword + "\n"); // Link to the password file. We can't use an absolute path for the target, because // it won't resolve inside the container. Files.createSymbolicLink(tempDir.resolve(symlinkFilename), Path.of(passwordFilename)); // File permissions need to be secured in order for the ES wrapper to accept // them for populating env var values. The wrapper will resolve the symlink // and check the target's permissions. Files.setPosixFilePermissions(tempDir.resolve(passwordFilename), p600); // Restart the container - this will check that Elasticsearch started correctly, // and didn't fail to follow the symlink and check the file permissions runContainer( distribution(), builder().volume(tempDir, "/run/secrets").envVar("ELASTIC_PASSWORD_FILE", "/run/secrets/" + symlinkFilename) ); } /** * Check that environment variables cannot be used with _FILE environment variables. */ public void test082CannotUseEnvVarsAndFiles() throws Exception { final String passwordFilename = "password.txt"; Files.writeString(tempDir.resolve(passwordFilename), "other_hunter2\n"); // File permissions need to be secured in order for the ES wrapper to accept // them for populating env var values Files.setPosixFilePermissions(tempDir.resolve(passwordFilename), p600); final Result dockerLogs = runContainerExpectingFailure( distribution, builder().volume(tempDir, "/run/secrets") .envVar("ELASTIC_PASSWORD", "hunter2") .envVar("ELASTIC_PASSWORD_FILE", "/run/secrets/" + passwordFilename) ); assertThat( dockerLogs.stderr(), containsString("ERROR: Both ELASTIC_PASSWORD_FILE and ELASTIC_PASSWORD are set. These are mutually exclusive.") ); } /** * Check that when populating environment variables by setting variables with the suffix "_FILE", * the files' permissions are checked. */ public void test083EnvironmentVariablesUsingFilesHaveCorrectPermissions() throws Exception { final String passwordFilename = "password.txt"; Files.writeString(tempDir.resolve(passwordFilename), "hunter2\n"); // Set invalid file permissions Files.setPosixFilePermissions(tempDir.resolve(passwordFilename), p660); // Restart the container final Result dockerLogs = runContainerExpectingFailure( distribution(), builder().volume(tempDir, "/run/secrets").envVar("ELASTIC_PASSWORD_FILE", "/run/secrets/" + passwordFilename) ); assertThat( dockerLogs.stderr(), containsString( "ERROR: File /run/secrets/" + passwordFilename + " from ELASTIC_PASSWORD_FILE must have file permissions 400 or 600" ) ); } /** * Check that when verifying the file permissions of _FILE environment variables, symlinks * are followed, and that invalid target permissions are detected. */ public void test084SymlinkToFileWithInvalidPermissionsIsRejected() throws Exception { // Test relies on symlinks assumeFalse(Platforms.WINDOWS); final String xpackPassword = "hunter2"; final String passwordFilename = "password.txt"; final String symlinkFilename = "password_symlink"; // ELASTIC_PASSWORD_FILE Files.writeString(tempDir.resolve(passwordFilename), xpackPassword + "\n"); // Link to the password file. We can't use an absolute path for the target, because // it won't resolve inside the container. Files.createSymbolicLink(tempDir.resolve(symlinkFilename), Path.of(passwordFilename)); // Set invalid permissions on the file that the symlink targets Files.setPosixFilePermissions(tempDir.resolve(passwordFilename), p775); // Restart the container final Result dockerLogs = runContainerExpectingFailure( distribution(), builder().volume(tempDir, "/run/secrets").envVar("ELASTIC_PASSWORD_FILE", "/run/secrets/" + symlinkFilename) ); assertThat( dockerLogs.stderr(), containsString( "ERROR: File " + passwordFilename + " (target of symlink /run/secrets/" + symlinkFilename + " from ELASTIC_PASSWORD_FILE) must have file permissions 400 or 600, but actually has: 775" ) ); } /** * Check that environment variables are translated to -E options even for commands invoked under * `docker exec`, where the Docker image's entrypoint is not executed. */ public void test085EnvironmentVariablesAreRespectedUnderDockerExec() { installation = runContainer(distribution(), builder().envVar("ELASTIC_PASSWORD", "hunter2")); // The tool below requires a keystore, so ensure that ES is fully initialised before proceeding. waitForElasticsearch(installation, "elastic", "hunter2"); sh.getEnv().put("http.host", "this.is.not.valid"); // This will fail because of the extra env var final Result result = sh.runIgnoreExitCode("bash -c 'echo y | elasticsearch-setup-passwords auto'"); assertFalse("elasticsearch-setup-passwords command should have failed", result.isSuccess()); assertThat(result.stdout(), containsString("java.net.UnknownHostException: this.is.not.valid")); } /** * Check whether the elasticsearch-certutil tool has been shipped correctly, * and if present then it can execute. */ public void test090SecurityCliPackaging() { final Installation.Executables bin = installation.executables(); final Path securityCli = installation.lib.resolve("tools").resolve("security-cli"); assertTrue(existsInContainer(securityCli)); Result result = sh.run(bin.certutilTool + " --help"); assertThat(result.stdout(), containsString("Simplifies certificate creation for use with the Elastic Stack")); // Ensure that the exit code from the java command is passed back up through the shell script result = sh.runIgnoreExitCode(bin.certutilTool + " invalid-command"); assertThat(result.isSuccess(), is(false)); assertThat(result.stdout(), containsString("Unknown command [invalid-command]")); } /** * Check that the elasticsearch-shard tool is shipped in the Docker image and is executable. */ public void test091ElasticsearchShardCliPackaging() { final Installation.Executables bin = installation.executables(); final Result result = sh.run(bin.shardTool + " -h"); assertThat(result.stdout(), containsString("A CLI tool to remove corrupted parts of unrecoverable shards")); } /** * Check that the elasticsearch-node tool is shipped in the Docker image and is executable. */ public void test092ElasticsearchNodeCliPackaging() { final Installation.Executables bin = installation.executables(); final Result result = sh.run(bin.nodeTool + " -h"); assertThat( "Failed to find expected message about the elasticsearch-node CLI tool", result.stdout(), containsString("A CLI tool to do unsafe cluster and index manipulations on current node") ); } /** * Check that no core dumps have been accidentally included in the Docker image. */ public void test100NoCoreFilesInImage() { assertFalse("Unexpected core dump found in Docker image", existsInContainer("/core*")); } /** * Check that there are no files with a GID other than 0. */ public void test101AllFilesAreGroupZero() { // Run a `find` command in a new container without Elasticsearch running, so // that the results aren't subject to sporadic failures from files appearing / // disappearing while `find` is traversing the filesystem. // // We also create a file under `data/` to ensure that files are created with the // expected group. final Shell localSh = new Shell(); final String findResults = localSh.run( "docker run --rm --tty " + DockerRun.getImageName(distribution) + " bash -c ' touch data/test && find . \\! -group 0 ' " ).stdout(); assertThat("Found some files whose GID != 0", findResults, is(emptyString())); } /** * Check that the Docker image has the expected "Label Schema" labels. * @see <a href="http://label-schema.org/">Label Schema website</a> */ public void test110OrgLabelSchemaLabels() throws Exception { assumeTrue(distribution.packaging != Packaging.DOCKER_IRON_BANK); final Map<String, String> labels = getImageLabels(distribution); final Map<String, String> staticLabels = new HashMap<>(); staticLabels.put("name", "Elasticsearch"); staticLabels.put("schema-version", "1.0"); staticLabels.put("url", "https://www.elastic.co/products/elasticsearch"); staticLabels.put("usage", "https://www.elastic.co/guide/en/elasticsearch/reference/index.html"); staticLabels.put("vcs-url", "https://github.com/elastic/elasticsearch"); staticLabels.put("vendor", "Elastic"); staticLabels.put("license", "Elastic-License-2.0"); // TODO: we should check the actual version value final Set<String> dynamicLabels = Set.of("build-date", "vcs-ref", "version"); final String prefix = "org.label-schema"; staticLabels.forEach((suffix, value) -> { String key = prefix + "." + suffix; assertThat(labels, hasKey(key)); assertThat(labels.get(key), equalTo(value)); }); dynamicLabels.forEach(label -> { String key = prefix + "." + label; assertThat(labels, hasKey(key)); }); } /** * Check that the Docker image has the expected "Open Containers Annotations" labels. * @see <a href="https://github.com/opencontainers/image-spec/blob/master/annotations.md">Open Containers Annotations</a> */ public void test110OrgOpencontainersLabels() throws Exception { assumeTrue(distribution.packaging != Packaging.DOCKER_IRON_BANK); final Map<String, String> labels = getImageLabels(distribution); final Map<String, String> staticLabels = new HashMap<>(); staticLabels.put("title", "Elasticsearch"); staticLabels.put("url", "https://www.elastic.co/products/elasticsearch"); staticLabels.put("documentation", "https://www.elastic.co/guide/en/elasticsearch/reference/index.html"); staticLabels.put("source", "https://github.com/elastic/elasticsearch"); staticLabels.put("vendor", "Elastic"); staticLabels.put("licenses", "Elastic-License-2.0"); // TODO: we should check the actual version value final Set<String> dynamicLabels = Set.of("created", "revision", "version"); final String prefix = "org.opencontainers.image"; staticLabels.forEach((suffix, value) -> { String key = prefix + "." + suffix; assertThat(labels, hasKey(key)); assertThat(labels.get(key), equalTo(value)); }); dynamicLabels.forEach(label -> { String key = prefix + "." + label; assertThat(labels, hasKey(key)); }); } /** * Check that the container logs contain the expected content for Elasticsearch itself. */ public void test120DockerLogsIncludeElasticsearchLogs() { waitForElasticsearch(installation, "elastic", PASSWORD); final Result containerLogs = getContainerLogs(); assertThat("Container logs should contain full
DockerTests
java
apache__hadoop
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/avro/AvroSerialization.java
{ "start": 3406, "end": 4020 }
class ____ implements Deserializer<T> { private DatumReader<T> reader; private BinaryDecoder decoder; private InputStream inStream; AvroDeserializer(Class<T> clazz) { this.reader = getReader(clazz); } @Override public void close() throws IOException { inStream.close(); } @Override public T deserialize(T t) throws IOException { return reader.read(t, decoder); } @Override public void open(InputStream in) throws IOException { inStream = in; decoder = DecoderFactory.get().binaryDecoder(in, decoder); } } }
AvroDeserializer
java
elastic__elasticsearch
x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorFeaturesAction.java
{ "start": 817, "end": 1914 }
class ____ extends HandledTransportAction< UpdateConnectorFeaturesAction.Request, ConnectorUpdateActionResponse> { protected final ConnectorIndexService connectorIndexService; @Inject public TransportUpdateConnectorFeaturesAction(TransportService transportService, ActionFilters actionFilters, Client client) { super( UpdateConnectorFeaturesAction.NAME, transportService, actionFilters, UpdateConnectorFeaturesAction.Request::new, EsExecutors.DIRECT_EXECUTOR_SERVICE ); this.connectorIndexService = new ConnectorIndexService(client); } @Override protected void doExecute( Task task, UpdateConnectorFeaturesAction.Request request, ActionListener<ConnectorUpdateActionResponse> listener ) { connectorIndexService.updateConnectorFeatures( request.getConnectorId(), request.getFeatures(), listener.map(r -> new ConnectorUpdateActionResponse(r.getResult())) ); } }
TransportUpdateConnectorFeaturesAction
java
jhy__jsoup
src/test/java/org/jsoup/internal/ReaderTest.java
{ "start": 477, "end": 2077 }
class ____ { @Test void readerOfStringAndFile() throws IOException { // make sure that reading from a String and from a File produce the same bytes Path path = getPath("/fuzztests/garble.html"); byte[] bytes = Files.readAllBytes(path); String fromBytes = new String(bytes, StandardCharsets.UTF_8); SimpleStreamReader streamReader = getReader(path); String fromStream = getString(streamReader); assertEquals(fromBytes, fromStream); SimpleStreamReader reader2 = getReader(path); CharacterReader cr = new CharacterReader(reader2); String fullRead = cr.consumeTo('X'); // does not exist in input assertEquals(fromBytes, fullRead); } private static String getString(SimpleStreamReader streamReader) throws IOException { // read streamreader to a string: StringBuilder builder = new StringBuilder(); char[] cbuffer = new char[1024]; int read; while ((read = streamReader.read(cbuffer)) != -1) { builder.append(cbuffer, 0, read); } return builder.toString(); } private static SimpleStreamReader getReader(Path path) throws IOException { // set up a chain as in when we parse: simplebufferedinput -> controllableinputstream -> simplestreamreader -> characterreader SimpleBufferedInput input = new SimpleBufferedInput(Files.newInputStream(path)); ControllableInputStream stream = ControllableInputStream.wrap(input, 0); return new SimpleStreamReader(stream, StandardCharsets.UTF_8); } }
ReaderTest
java
elastic__elasticsearch
x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/completion/CohereCompletionServiceSettings.java
{ "start": 2101, "end": 7752 }
class ____ extends FilteredXContentObject implements ServiceSettings, CohereRateLimitServiceSettings { public static final String NAME = "cohere_completion_service_settings"; private static final TransportVersion ML_INFERENCE_COHERE_API_VERSION = TransportVersion.fromName("ml_inference_cohere_api_version"); // Production key rate limits for all endpoints: https://docs.cohere.com/docs/going-live#production-key-specifications // 10K requests per minute private static final RateLimitSettings DEFAULT_RATE_LIMIT_SETTINGS = new RateLimitSettings(10_000); public static CohereCompletionServiceSettings fromMap(Map<String, Object> map, ConfigurationParseContext context) { ValidationException validationException = new ValidationException(); String url = extractOptionalString(map, URL, ModelConfigurations.SERVICE_SETTINGS, validationException); URI uri = convertToUri(url, URL, ModelConfigurations.SERVICE_SETTINGS, validationException); RateLimitSettings rateLimitSettings = RateLimitSettings.of( map, DEFAULT_RATE_LIMIT_SETTINGS, validationException, CohereService.NAME, context ); String modelId = extractOptionalString(map, MODEL_ID, ModelConfigurations.SERVICE_SETTINGS, validationException); var apiVersion = apiVersionFromMap(map, context, validationException); if (apiVersion == CohereServiceSettings.CohereApiVersion.V2) { if (modelId == null) { validationException.addValidationError(MODEL_REQUIRED_FOR_V2_API); } } if (validationException.validationErrors().isEmpty() == false) { throw validationException; } return new CohereCompletionServiceSettings(uri, modelId, rateLimitSettings, apiVersion); } private final URI uri; private final String modelId; private final RateLimitSettings rateLimitSettings; private final CohereServiceSettings.CohereApiVersion apiVersion; public CohereCompletionServiceSettings( @Nullable URI uri, @Nullable String modelId, @Nullable RateLimitSettings rateLimitSettings, CohereServiceSettings.CohereApiVersion apiVersion ) { this.uri = uri; this.modelId = modelId; this.rateLimitSettings = Objects.requireNonNullElse(rateLimitSettings, DEFAULT_RATE_LIMIT_SETTINGS); this.apiVersion = apiVersion; } public CohereCompletionServiceSettings( @Nullable String url, @Nullable String modelId, @Nullable RateLimitSettings rateLimitSettings, CohereServiceSettings.CohereApiVersion apiVersion ) { this(createOptionalUri(url), modelId, rateLimitSettings, apiVersion); } public CohereCompletionServiceSettings(StreamInput in) throws IOException { uri = createOptionalUri(in.readOptionalString()); modelId = in.readOptionalString(); rateLimitSettings = new RateLimitSettings(in); if (in.getTransportVersion().supports(ML_INFERENCE_COHERE_API_VERSION)) { this.apiVersion = in.readEnum(CohereServiceSettings.CohereApiVersion.class); } else { this.apiVersion = CohereServiceSettings.CohereApiVersion.V1; } } @Override public RateLimitSettings rateLimitSettings() { return rateLimitSettings; } @Override public CohereServiceSettings.CohereApiVersion apiVersion() { return apiVersion; } public URI uri() { return uri; } public String modelId() { return modelId; } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); toXContentFragmentOfExposedFields(builder, params); builder.field(API_VERSION, apiVersion); // API version is persisted but not exposed to the user builder.endObject(); return builder; } @Override public String getWriteableName() { return NAME; } @Override public TransportVersion getMinimalSupportedVersion() { return TransportVersions.V_8_15_0; } @Override public void writeTo(StreamOutput out) throws IOException { var uriToWrite = uri != null ? uri.toString() : null; out.writeOptionalString(uriToWrite); out.writeOptionalString(modelId); rateLimitSettings.writeTo(out); if (out.getTransportVersion().supports(ML_INFERENCE_COHERE_API_VERSION)) { out.writeEnum(apiVersion); } } @Override protected XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder builder, Params params) throws IOException { if (uri != null) { builder.field(URL, uri.toString()); } if (modelId != null) { builder.field(MODEL_ID, modelId); } rateLimitSettings.toXContent(builder, params); return builder; } @Override public boolean equals(Object object) { if (this == object) return true; if (object == null || getClass() != object.getClass()) return false; CohereCompletionServiceSettings that = (CohereCompletionServiceSettings) object; return Objects.equals(uri, that.uri) && Objects.equals(modelId, that.modelId) && Objects.equals(rateLimitSettings, that.rateLimitSettings) && apiVersion == that.apiVersion; } @Override public int hashCode() { return Objects.hash(uri, modelId, rateLimitSettings, apiVersion); } }
CohereCompletionServiceSettings
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/hql/ClassificationType.java
{ "start": 655, "end": 805 }
enum ____ in JPA. Here we are * using the approach of storing the ordinal values, rather than the names. * * @author Steve Ebersole */ public
support
java
spring-projects__spring-framework
spring-context/src/main/java/org/springframework/scheduling/config/Task.java
{ "start": 862, "end": 1128 }
class ____ a {@code Runnable} to be executed as a task, typically at a * scheduled time or interval. See subclass hierarchy for various scheduling approaches. * * @author Chris Beams * @author Juergen Hoeller * @author Brian Clozel * @since 3.2 */ public
defining
java
apache__flink
flink-kubernetes/src/test/java/org/apache/flink/kubernetes/kubeclient/decorators/InitJobManagerDecoratorWithPodTemplateTest.java
{ "start": 1541, "end": 3578 }
class ____ extends DecoratorWithPodTemplateTestBase { @Override protected void setupFlinkConfig() { super.setupFlinkConfig(); this.flinkConfig.set(KubernetesConfigOptions.JOB_MANAGER_ANNOTATIONS, ANNOTATIONS); this.flinkConfig.setString( KubernetesConfigOptions.JOB_MANAGER_TOLERATIONS.key(), TOLERATION_STRING); this.customizedEnvs.forEach( (k, v) -> this.flinkConfig.setString( ResourceManagerOptions.CONTAINERIZED_MASTER_ENV_PREFIX + k, v)); this.flinkConfig.set(KubernetesConfigOptions.JOB_MANAGER_LABELS, userLabels); this.flinkConfig.set(KubernetesConfigOptions.JOB_MANAGER_NODE_SELECTOR, nodeSelector); } @Override public FlinkPod getResultPod(FlinkPod podTemplate) { final KubernetesJobManagerParameters kubernetesJobManagerParameters = new KubernetesJobManagerParameters( flinkConfig, new KubernetesClusterClientFactory().getClusterSpecification(flinkConfig)); final InitJobManagerDecorator initJobManagerDecorator = new InitJobManagerDecorator(kubernetesJobManagerParameters); return initJobManagerDecorator.decorateFlinkPod(podTemplate); } @Test void testJobManagerManagerMainContainerPortsMerging() { final List<String> expectedContainerPorts = new ArrayList<>(); expectedContainerPorts.add(Constants.REST_PORT_NAME); expectedContainerPorts.add(Constants.JOB_MANAGER_RPC_PORT_NAME); expectedContainerPorts.add(Constants.BLOB_SERVER_PORT_NAME); // Add port from pod template expectedContainerPorts.add("testing-port"); assertThat( this.resultPod.getMainContainer().getPorts().stream() .map(ContainerPort::getName)) .containsExactlyInAnyOrderElementsOf(expectedContainerPorts); } }
InitJobManagerDecoratorWithPodTemplateTest
java
apache__camel
catalog/camel-route-parser/src/main/java/org/apache/camel/parser/model/CamelSimpleExpressionDetails.java
{ "start": 924, "end": 1242 }
class ____ extends LanguageExpressionDetails { private String simple; public String getSimple() { return simple; } public void setSimple(String simple) { this.simple = simple; } @Override public String toString() { return simple; } }
CamelSimpleExpressionDetails
java
mybatis__mybatis-3
src/test/java/org/apache/ibatis/domain/jpetstore/Account.java
{ "start": 732, "end": 3865 }
class ____ implements Serializable { private static final long serialVersionUID = 1L; private String username; private String password; private String email; private String firstName; private String lastName; private String status; private String address1; private String address2; private String city; private String state; private String zip; private String country; private String phone; private String favouriteCategoryId; private String languagePreference; private boolean listOption; private boolean bannerOption; private String bannerName; public String getUsername() { return username; } public void setUsername(String username) { this.username = username; } public String getPassword() { return password; } public void setPassword(String password) { this.password = password; } public String getEmail() { return email; } public void setEmail(String email) { this.email = email; } public String getFirstName() { return firstName; } public void setFirstName(String firstName) { this.firstName = firstName; } public String getLastName() { return lastName; } public void setLastName(String lastName) { this.lastName = lastName; } public String getStatus() { return status; } public void setStatus(String status) { this.status = status; } public String getAddress1() { return address1; } public void setAddress1(String address1) { this.address1 = address1; } public String getAddress2() { return address2; } public void setAddress2(String address2) { this.address2 = address2; } public String getCity() { return city; } public void setCity(String city) { this.city = city; } public String getState() { return state; } public void setState(String state) { this.state = state; } public String getZip() { return zip; } public void setZip(String zip) { this.zip = zip; } public String getCountry() { return country; } public void setCountry(String country) { this.country = country; } public String getPhone() { return phone; } public void setPhone(String phone) { this.phone = phone; } public String getFavouriteCategoryId() { return favouriteCategoryId; } public void setFavouriteCategoryId(String favouriteCategoryId) { this.favouriteCategoryId = favouriteCategoryId; } public String getLanguagePreference() { return languagePreference; } public void setLanguagePreference(String languagePreference) { this.languagePreference = languagePreference; } public boolean isListOption() { return listOption; } public void setListOption(boolean listOption) { this.listOption = listOption; } public boolean isBannerOption() { return bannerOption; } public void setBannerOption(boolean bannerOption) { this.bannerOption = bannerOption; } public String getBannerName() { return bannerName; } public void setBannerName(String bannerName) { this.bannerName = bannerName; } }
Account
java
apache__camel
components/camel-braintree/src/test/java/org/apache/camel/component/braintree/DiscountGatewayIT.java
{ "start": 1460, "end": 2306 }
class ____ extends AbstractBraintreeTestSupport { private static final Logger LOG = LoggerFactory.getLogger(DiscountGatewayIT.class); private static final String PATH_PREFIX = BraintreeApiCollection.getCollection().getApiName(DiscountGatewayApiMethod.class).getName(); @Disabled @Test public void testAll() { final List<Discount> result = requestBody("direct://ALL", null, List.class); assertNotNull(result, "all result"); LOG.debug("all: {}", result); } @Override protected RouteBuilder createRouteBuilder() { return new RouteBuilder() { public void configure() { // test route for all from("direct://ALL") .to("braintree://" + PATH_PREFIX + "/all"); } }; } }
DiscountGatewayIT
java
alibaba__nacos
naming/src/main/java/com/alibaba/nacos/naming/core/v2/metadata/ClusterMetadata.java
{ "start": 983, "end": 2571 }
class ____ implements Serializable { private static final long serialVersionUID = -80030989533083615L; private int healthyCheckPort = 80; private String healthyCheckType = Tcp.TYPE; private AbstractHealthChecker healthChecker = new Tcp(); /** * Whether or not use instance port to do health check. */ private boolean useInstancePortForCheck = true; private Map<String, String> extendData = new ConcurrentHashMap<>(1); public int getHealthyCheckPort() { return healthyCheckPort; } public void setHealthyCheckPort(int healthyCheckPort) { this.healthyCheckPort = healthyCheckPort; } public String getHealthyCheckType() { return healthyCheckType; } public void setHealthyCheckType(String healthyCheckType) { this.healthyCheckType = healthyCheckType; } public AbstractHealthChecker getHealthChecker() { return healthChecker; } public void setHealthChecker(AbstractHealthChecker healthChecker) { this.healthChecker = healthChecker; } public boolean isUseInstancePortForCheck() { return useInstancePortForCheck; } public void setUseInstancePortForCheck(boolean useInstancePortForCheck) { this.useInstancePortForCheck = useInstancePortForCheck; } public Map<String, String> getExtendData() { return extendData; } public void setExtendData(Map<String, String> extendData) { this.extendData = extendData; } }
ClusterMetadata
java
quarkusio__quarkus
independent-projects/resteasy-reactive/client/runtime/src/main/java/org/jboss/resteasy/reactive/client/impl/ClientResponseBuilderImpl.java
{ "start": 341, "end": 1590 }
class ____ extends AbstractResponseBuilder { //TODO: should not extend the server version InputStream entityStream; RestClientRequestContext restClientRequestContext; public ClientResponseBuilderImpl invocationState(RestClientRequestContext restClientRequestContext) { this.restClientRequestContext = restClientRequestContext; return this; } public ClientResponseBuilderImpl entityStream(InputStream entityStream) { this.entityStream = entityStream; return this; } @Override protected AbstractResponseBuilder doClone() { return new ClientResponseBuilderImpl(); } @Override public ResponseImpl build() { ClientResponseImpl response = new ClientResponseImpl(); populateResponse(response); response.restClientRequestContext = restClientRequestContext; response.setEntityStream(entityStream); return response; } @Override public Response.ResponseBuilder contentLocation(URI location) { //TODO: needs some thinking throw new NotImplementedYet(); } @Override public Response.ResponseBuilder location(URI location) { throw new NotImplementedYet(); } }
ClientResponseBuilderImpl
java
qos-ch__slf4j
integration/src/test/java/org/slf4j/test_osgi/FelixHost.java
{ "start": 1854, "end": 2621 }
class ____ { private Felix felix = null; Properties otherProps = new Properties(); final FrameworkErrorListener frameworkErrorListener; final CheckingBundleListener myBundleListener; public FelixHost(FrameworkErrorListener frameworkErrorListener, CheckingBundleListener myBundleListener) { this.frameworkErrorListener = frameworkErrorListener; this.myBundleListener = myBundleListener; } public void doLaunch() { // Create a case-insensitive configuration property map. StringMap configMap = new StringMap(); // Configure the Felix instance to be embedded. // configMap.put(FelixConstants.EMBEDDED_EXECUTION_PROP, "true"); // Add core OSGi packages to be exported from the
FelixHost
java
apache__flink
flink-clients/src/test/java/org/apache/flink/client/cli/CliFrontendCheckpointTest.java
{ "start": 1283, "end": 3339 }
class ____ extends CliFrontendTestBase { @Test void testTriggerCheckpointSuccess() throws Exception { JobID jobId = new JobID(); long checkpointId = 15L; String[] parameters = {jobId.toString()}; TestingClusterClient<String> clusterClient = new TestingClusterClient<>(); try { clusterClient.setTriggerCheckpointFunction( (ignore, checkpointType) -> { return CompletableFuture.completedFuture(checkpointId); }); MockedCliFrontend testFrontend = new MockedCliFrontend(clusterClient); testFrontend.checkpoint(parameters); } finally { clusterClient.close(); } } @Test void testMissingJobId() { assertThatThrownBy( () -> { String[] parameters = {}; Configuration configuration = getConfiguration(); CliFrontend testFrontend = new CliFrontend( configuration, Collections.singletonList(getCli())); testFrontend.checkpoint(parameters); }) .isInstanceOf(CliArgsException.class); } @Test void testFullCheckpoint() throws Exception { JobID jobId = new JobID(); long checkpointId = 15L; String[] parameters = {"-full", jobId.toString()}; TestingClusterClient<String> clusterClient = new TestingClusterClient<>(); try { clusterClient.setTriggerCheckpointFunction( (ignore, checkpointType) -> { return CompletableFuture.completedFuture(checkpointId); }); MockedCliFrontend testFrontend = new MockedCliFrontend(clusterClient); testFrontend.checkpoint(parameters); } finally { clusterClient.close(); } } }
CliFrontendCheckpointTest
java
apache__camel
components/camel-salesforce/camel-salesforce-component/src/main/java/org/apache/camel/component/salesforce/api/dto/analytics/reports/ReportType.java
{ "start": 983, "end": 1346 }
class ____ extends AbstractDTOBase { private String type; private String label; public String getType() { return type; } public void setType(String type) { this.type = type; } public String getLabel() { return label; } public void setLabel(String label) { this.label = label; } }
ReportType
java
apache__flink
flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/functions/aggregate/BatchApproxCountDistinctAggFunctions.java
{ "start": 11249, "end": 12024 }
class ____ extends ApproxCountDistinctAggFunction<BinaryStringData> { public StringApproxCountDistinctAggFunction() { super(new VarCharType()); } @Override long getHashcode(BinaryStringData s) { MemorySegment[] segments = s.getSegments(); if (segments.length == 1) { return hashUnsafeBytes( segments[0], s.getOffset(), s.getSizeInBytes(), DEFAULT_SEED); } else { return hashUnsafeBytes( MemorySegmentFactory.wrap(s.toBytes()), 0, s.getSizeInBytes(), DEFAULT_SEED); } } } }
StringApproxCountDistinctAggFunction
java
square__javapoet
src/test/java/com/squareup/javapoet/JavaFileTest.java
{ "start": 31795, "end": 32515 }
class ____ extends JavaFileTest.Parent {\n" + " java.util.Optional<String> optionalString() {\n" + " return java.util.Optional.empty();\n" + " }\n" + "\n" + " java.util.regex.Pattern pattern() {\n" + " return null;\n" + " }\n" + "}\n"); } @Test public void avoidClashes_parentChild_superclass_typeMirror() { String source = JavaFile.builder("com.squareup.javapoet", childTypeBuilder().superclass(getElement(Parent.class).asType()).build()) .build() .toString(); assertThat(source).isEqualTo("package com.squareup.javapoet;\n" + "\n" + "import java.lang.String;\n" + "\n" + "
Child
java
spring-projects__spring-boot
module/spring-boot-liquibase/src/test/java/org/springframework/boot/liquibase/endpoint/LiquibaseEndpointTests.java
{ "start": 7693, "end": 8375 }
class ____ { @Bean DataSource dataSource() { DataSource dataSource = new EmbeddedDatabaseBuilder() .setType(EmbeddedDatabaseConnection.get(getClass().getClassLoader()).getType()) .setName(UUID.randomUUID().toString()) .build(); DatabaseInitializationSettings settings = new DatabaseInitializationSettings(); settings.setSchemaLocations(List.of("classpath:/db/create-custom-schema.sql")); DataSourceScriptDatabaseInitializer initializer = new DataSourceScriptDatabaseInitializer(dataSource, settings); initializer.initializeDatabase(); return dataSource; } } @Configuration(proxyBeanMethods = false) static
DataSourceWithSchemaConfiguration
java
processing__processing4
java/test/processing/mode/java/ProblemFactoryTest.java
{ "start": 312, "end": 1618 }
class ____ { private PdePreprocessIssue pdePreprocessIssue; private List<Integer> tabStarts; private List<Integer> starts; @Before public void setUp() { pdePreprocessIssue = new PdePreprocessIssue(8, 2, "test"); tabStarts = new ArrayList<>(); tabStarts.add(5); starts = new ArrayList<>(); starts.add(0); starts.add(5); starts.add(10); } @Test public void buildWithoutEditor() { Problem problem = ProblemFactory.build(pdePreprocessIssue, tabStarts); Assert.assertEquals(3, problem.getLineNumber()); Assert.assertEquals("test", problem.getMessage()); } @Test public void getTabStart() { Assert.assertEquals(0, ProblemFactory.getTab(starts, 0).getTab()); } @Test public void getTabMiddleFrontEdge() { Assert.assertEquals(1, ProblemFactory.getTab(starts, 5).getTab()); } @Test public void getTabMiddle() { TabLine tabLine = ProblemFactory.getTab(starts, 7); Assert.assertEquals(1, tabLine.getTab()); Assert.assertEquals(2, tabLine.getLineInTab()); } @Test public void getTabMiddleBackEdge() { Assert.assertEquals(2, ProblemFactory.getTab(starts, 10).getTab()); } @Test public void getTabEnd() { Assert.assertEquals(2, ProblemFactory.getTab(starts, 15).getTab()); } }
ProblemFactoryTest
java
apache__camel
components/camel-aws/camel-aws2-ddb/src/test/java/org/apache/camel/component/aws2/ddb/transform/Ddb2JsonDataTypeTransformerTest.java
{ "start": 1772, "end": 12041 }
class ____ { public static final String AWS_2_DDB_APPLICATION_JSON_TRANSFORMER = "aws2-ddb:application-json"; private DefaultCamelContext camelContext; private final Ddb2JsonDataTypeTransformer transformer = new Ddb2JsonDataTypeTransformer(); private final String keyJson = "{" + "\"name\": \"Rajesh Koothrappali\"" + "}"; private final String itemJson = "{" + "\"name\": \"Rajesh Koothrappali\"," + "\"age\": 29," + "\"super-heroes\": [\"batman\", \"spiderman\", \"wonderwoman\"]," + "\"issues\": [5, 3, 9, 1]," + "\"girlfriend\": null," + "\"doctorate\": true" + "}"; @BeforeEach void setup() { this.camelContext = new DefaultCamelContext(); } @Test @SuppressWarnings("unchecked") void shouldMapPutItemHeaders() throws Exception { Exchange exchange = new DefaultExchange(camelContext); exchange.getMessage().setBody(Json.mapper().readTree(itemJson)); exchange.setProperty("operation", Ddb2Operations.PutItem.name()); transformer.transform(exchange.getMessage(), DataType.ANY, new DataType(AWS_2_DDB_APPLICATION_JSON_TRANSFORMER)); Assertions.assertTrue(exchange.getMessage().hasHeaders()); Assertions.assertEquals(Ddb2Operations.PutItem, exchange.getMessage().getHeader(Ddb2Constants.OPERATION)); Assertions.assertEquals(ReturnValue.ALL_OLD.toString(), exchange.getMessage().getHeader(Ddb2Constants.RETURN_VALUES)); assertAttributeValueMap(exchange.getMessage().getHeader(Ddb2Constants.ITEM, Map.class)); } @Test @SuppressWarnings("unchecked") void shouldMapUpdateItemHeaders() throws Exception { Exchange exchange = new DefaultExchange(camelContext); exchange.getMessage() .setBody(Json.mapper().readTree("{\"operation\": \"" + Ddb2Operations.UpdateItem.name() + "\", \"key\": " + keyJson + ", \"item\": " + itemJson + "}")); transformer.transform(exchange.getMessage(), DataType.ANY, new DataType(AWS_2_DDB_APPLICATION_JSON_TRANSFORMER)); Assertions.assertTrue(exchange.getMessage().hasHeaders()); Assertions.assertEquals(Ddb2Operations.UpdateItem, exchange.getMessage().getHeader(Ddb2Constants.OPERATION)); Assertions.assertEquals(ReturnValue.ALL_NEW.toString(), exchange.getMessage().getHeader(Ddb2Constants.RETURN_VALUES)); Map<String, AttributeValue> attributeValueMap = exchange.getMessage().getHeader(Ddb2Constants.KEY, Map.class); Assertions.assertEquals(1L, attributeValueMap.size()); Assertions.assertEquals(AttributeValue.builder().s("Rajesh Koothrappali").build(), attributeValueMap.get("name")); assertAttributeValueUpdateMap(exchange.getMessage().getHeader(Ddb2Constants.UPDATE_VALUES, Map.class)); } @Test @SuppressWarnings("unchecked") void shouldMapDeleteItemHeaders() throws Exception { Exchange exchange = new DefaultExchange(camelContext); exchange.getMessage().setBody(Json.mapper().readTree("{\"key\": " + keyJson + "}")); exchange.setProperty("operation", Ddb2Operations.DeleteItem.name()); transformer.transform(exchange.getMessage(), DataType.ANY, new DataType(AWS_2_DDB_APPLICATION_JSON_TRANSFORMER)); Assertions.assertTrue(exchange.getMessage().hasHeaders()); Assertions.assertEquals(Ddb2Operations.DeleteItem, exchange.getMessage().getHeader(Ddb2Constants.OPERATION)); Assertions.assertEquals(ReturnValue.ALL_OLD.toString(), exchange.getMessage().getHeader(Ddb2Constants.RETURN_VALUES)); Map<String, AttributeValue> attributeValueMap = exchange.getMessage().getHeader(Ddb2Constants.KEY, Map.class); Assertions.assertEquals(1L, attributeValueMap.size()); Assertions.assertEquals(AttributeValue.builder().s("Rajesh Koothrappali").build(), attributeValueMap.get("name")); } @Test @SuppressWarnings("unchecked") void shouldMapNestedObjects() throws Exception { Exchange exchange = new DefaultExchange(camelContext); exchange.getMessage().setBody(Json.mapper().readTree("{\"user\":" + itemJson + "}")); exchange.setProperty("operation", Ddb2Operations.PutItem.name()); transformer.transform(exchange.getMessage(), DataType.ANY, new DataType(AWS_2_DDB_APPLICATION_JSON_TRANSFORMER)); Assertions.assertTrue(exchange.getMessage().hasHeaders()); Assertions.assertEquals(Ddb2Operations.PutItem, exchange.getMessage().getHeader(Ddb2Constants.OPERATION)); Assertions.assertEquals(ReturnValue.ALL_OLD.toString(), exchange.getMessage().getHeader(Ddb2Constants.RETURN_VALUES)); Map<String, AttributeValue> attributeValueMap = exchange.getMessage().getHeader(Ddb2Constants.ITEM, Map.class); Assertions.assertEquals(1L, attributeValueMap.size()); Assertions.assertEquals("AttributeValue(M={name=AttributeValue(S=Rajesh Koothrappali), " + "age=AttributeValue(N=29), " + "super-heroes=AttributeValue(SS=[batman, spiderman, wonderwoman]), " + "issues=AttributeValue(NS=[5, 3, 9, 1]), " + "girlfriend=AttributeValue(NUL=true), " + "doctorate=AttributeValue(BOOL=true)})", attributeValueMap.get("user").toString()); } @Test @SuppressWarnings("unchecked") void shouldMapEmptyJson() throws Exception { Exchange exchange = new DefaultExchange(camelContext); exchange.getMessage().setBody("{}"); exchange.getMessage().setHeader(Ddb2Constants.OPERATION, Ddb2Operations.PutItem.name()); transformer.transform(exchange.getMessage(), DataType.ANY, new DataType(AWS_2_DDB_APPLICATION_JSON_TRANSFORMER)); Assertions.assertTrue(exchange.getMessage().hasHeaders()); Assertions.assertEquals(Ddb2Operations.PutItem, exchange.getMessage().getHeader(Ddb2Constants.OPERATION)); Assertions.assertEquals(ReturnValue.ALL_OLD.toString(), exchange.getMessage().getHeader(Ddb2Constants.RETURN_VALUES)); Map<String, AttributeValue> attributeValueMap = exchange.getMessage().getHeader(Ddb2Constants.ITEM, Map.class); Assertions.assertEquals(0L, attributeValueMap.size()); } @Test void shouldFailForWrongBodyType() throws Exception { Exchange exchange = new DefaultExchange(camelContext); exchange.getMessage().setBody("Hello"); Assertions.assertThrows(CamelExecutionException.class, () -> transformer.transform(exchange.getMessage(), DataType.ANY, new DataType(AWS_2_DDB_APPLICATION_JSON_TRANSFORMER))); } @Test void shouldFailForUnsupportedOperation() throws Exception { Exchange exchange = new DefaultExchange(camelContext); exchange.getMessage().setBody(Json.mapper().readTree("{}")); exchange.setProperty("operation", Ddb2Operations.BatchGetItems.name()); Assertions.assertThrows(UnsupportedOperationException.class, () -> transformer.transform(exchange.getMessage(), DataType.ANY, new DataType(AWS_2_DDB_APPLICATION_JSON_TRANSFORMER))); } @Test public void shouldLookupDataType() throws Exception { Transformer transformer = camelContext.getTransformerRegistry() .resolveTransformer(new TransformerKey(DataType.ANY, new DataType(AWS_2_DDB_APPLICATION_JSON_TRANSFORMER))); Assertions.assertNotNull(transformer); } private void assertAttributeValueMap(Map<String, AttributeValue> attributeValueMap) { Assertions.assertEquals(6L, attributeValueMap.size()); Assertions.assertEquals(AttributeValue.builder().s("Rajesh Koothrappali").build(), attributeValueMap.get("name")); Assertions.assertEquals(AttributeValue.builder().n("29").build(), attributeValueMap.get("age")); Assertions.assertEquals(AttributeValue.builder().ss("batman", "spiderman", "wonderwoman").build(), attributeValueMap.get("super-heroes")); Assertions.assertEquals(AttributeValue.builder().ns("5", "3", "9", "1").build(), attributeValueMap.get("issues")); Assertions.assertEquals(AttributeValue.builder().nul(true).build(), attributeValueMap.get("girlfriend")); Assertions.assertEquals(AttributeValue.builder().bool(true).build(), attributeValueMap.get("doctorate")); } private void assertAttributeValueUpdateMap(Map<String, AttributeValueUpdate> attributeValueMap) { Assertions.assertEquals(6L, attributeValueMap.size()); Assertions.assertEquals(AttributeValueUpdate.builder().value(AttributeValue.builder().s("Rajesh Koothrappali").build()) .action(AttributeAction.PUT).build(), attributeValueMap.get("name")); Assertions.assertEquals(AttributeValueUpdate.builder().value(AttributeValue.builder().n("29").build()) .action(AttributeAction.PUT).build(), attributeValueMap.get("age")); Assertions.assertEquals( AttributeValueUpdate.builder().value(AttributeValue.builder().ss("batman", "spiderman", "wonderwoman").build()) .action(AttributeAction.PUT).build(), attributeValueMap.get("super-heroes")); Assertions.assertEquals(AttributeValueUpdate.builder().value(AttributeValue.builder().ns("5", "3", "9", "1").build()) .action(AttributeAction.PUT).build(), attributeValueMap.get("issues")); Assertions.assertEquals(AttributeValueUpdate.builder().value(AttributeValue.builder().nul(true).build()) .action(AttributeAction.PUT).build(), attributeValueMap.get("girlfriend")); Assertions.assertEquals(AttributeValueUpdate.builder().value(AttributeValue.builder().bool(true).build()) .action(AttributeAction.PUT).build(), attributeValueMap.get("doctorate")); } }
Ddb2JsonDataTypeTransformerTest
java
apache__hadoop
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HeapSort.java
{ "start": 1092, "end": 2365 }
class ____ implements IndexedSorter { public HeapSort() { } private static void downHeap(final IndexedSortable s, final int b, int i, final int N) { for (int idx = i << 1; idx < N; idx = i << 1) { if (idx + 1 < N && s.compare(b + idx, b + idx + 1) < 0) { if (s.compare(b + i, b + idx + 1) < 0) { s.swap(b + i, b + idx + 1); } else return; i = idx + 1; } else if (s.compare(b + i, b + idx) < 0) { s.swap(b + i, b + idx); i = idx; } else return; } } /** * Sort the given range of items using heap sort. * {@inheritDoc} */ @Override public void sort(IndexedSortable s, int p, int r) { sort(s, p, r, null); } @Override public void sort(final IndexedSortable s, final int p, final int r, final Progressable rep) { final int N = r - p; // build heap w/ reverse comparator, then write in-place from end final int t = Integer.highestOneBit(N); for (int i = t; i > 1; i >>>= 1) { for (int j = i >>> 1; j < i; ++j) { downHeap(s, p-1, j, N + 1); } if (null != rep) { rep.progress(); } } for (int i = r - 1; i > p; --i) { s.swap(p, i); downHeap(s, p - 1, 1, i - p + 1); } } }
HeapSort
java
apache__rocketmq
proxy/src/main/java/org/apache/rocketmq/proxy/grpc/pipeline/AuthenticationPipeline.java
{ "start": 1847, "end": 3880 }
class ____ implements RequestPipeline { private static final Logger LOGGER = LoggerFactory.getLogger(LoggerName.PROXY_LOGGER_NAME); private final AuthConfig authConfig; private final AuthenticationEvaluator authenticationEvaluator; public AuthenticationPipeline(AuthConfig authConfig, MessagingProcessor messagingProcessor) { this.authConfig = authConfig; this.authenticationEvaluator = AuthenticationFactory.getEvaluator(authConfig, messagingProcessor::getMetadataService); } @Override public void execute(ProxyContext context, Metadata headers, GeneratedMessageV3 request) { if (!authConfig.isAuthenticationEnabled()) { return; } try { Metadata metadata = GrpcConstants.METADATA.get(Context.current()); AuthenticationContext authenticationContext = newContext(context, metadata, request); authenticationEvaluator.evaluate(authenticationContext); } catch (AuthenticationException ex) { throw ex; } catch (Throwable ex) { LOGGER.error("authenticate failed, request:{}", request, ex); throw ex; } } /** * Create Context, for extension * * @param context for extension * @param headers gRPC headers * @param request * @return */ protected AuthenticationContext newContext(ProxyContext context, Metadata headers, GeneratedMessageV3 request) { AuthenticationContext result = AuthenticationFactory.newContext(authConfig, headers, request); if (result instanceof DefaultAuthenticationContext) { DefaultAuthenticationContext defaultAuthenticationContext = (DefaultAuthenticationContext) result; if (StringUtils.isNotBlank(defaultAuthenticationContext.getUsername())) { GrpcUtils.putHeaderIfNotExist(headers, GrpcConstants.AUTHORIZATION_AK, defaultAuthenticationContext.getUsername()); } } return result; } }
AuthenticationPipeline
java
spring-projects__spring-framework
spring-context/src/test/java/org/springframework/cache/CacheReproTests.java
{ "start": 21971, "end": 22109 }
class ____ { @Bean public CacheManager cacheManager() { return new ConcurrentMapCacheManager(); } } public static
Spr14235Config
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/EqualsHashCodeTest.java
{ "start": 2092, "end": 2287 }
class ____ {} }\ """) .doTest(); } @Test public void superClassWithoutHashCode() { compilationHelper .addSourceLines("Super.java", "abstract
Neither
java
micronaut-projects__micronaut-core
test-suite-groovy/src/test/groovy/io/micronaut/docs/config/env/RateLimit.java
{ "start": 74, "end": 396 }
class ____ { private Duration period; private Integer limit; public RateLimit(Duration period, Integer limit) { this.period = period; this.limit = limit; } public Duration getPeriod() { return period; } public Integer getLimit() { return limit; } }
RateLimit
java
apache__spark
common/unsafe/src/main/java/org/apache/spark/unsafe/types/UTF8String.java
{ "start": 1983, "end": 2477 }
class ____ implements Comparable<UTF8String>, Externalizable, KryoSerializable, Cloneable { // These are only updated by readExternal() or read() @Nonnull private Object base; private long offset; private int numBytes; private volatile int numChars = -1; /** * The validity of the UTF8Strings can be cached to avoid repeated validation checks, because * that operation requires full string scan. Valid strings have no illegal UTF-8 byte sequences. */ private
UTF8String
java
apache__camel
components/camel-rest/src/main/java/org/apache/camel/component/rest/DefaultRestRegistryFactory.java
{ "start": 1037, "end": 1212 }
class ____ implements RestRegistryFactory { @Override public RestRegistry createRegistry() { return new DefaultRestRegistry(); } }
DefaultRestRegistryFactory
java
resilience4j__resilience4j
resilience4j-retry/src/test/java/io/github/resilience4j/retry/MaxRetriesExceededTest.java
{ "start": 116, "end": 387 }
class ____ { @Test public void errorMessageShouldReportedRight() { MaxRetriesExceeded maxRetriesExceeded = new MaxRetriesExceeded("test max retries"); assertEquals(maxRetriesExceeded.getMessage(), "test max retries"); } }
MaxRetriesExceededTest
java
google__guava
guava-testlib/test/com/google/common/testing/NullPointerTesterTest.java
{ "start": 42494, "end": 42760 }
class ____<T> extends DefaultValueChecker { @SuppressWarnings("unused") // called by NullPointerTester void checkGeneric(T value, String s) { calledWith(value, s); } } private static
AbstractGenericDefaultValueForPackagePrivateMethodChecker
java
apache__hadoop
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/LocalKeyStoreProvider.java
{ "start": 1811, "end": 6688 }
class ____ extends AbstractJavaKeyStoreProvider { private File file; private Set<PosixFilePermission> permissions; protected LocalKeyStoreProvider(URI uri, Configuration conf) throws IOException { super(uri, conf); } @Override protected OutputStream getOutputStreamForKeystore() throws IOException { if (LOG.isDebugEnabled()) { LOG.debug("using '" + file + "' for output stream."); } OutputStream out = Files.newOutputStream(file.toPath()); return out; } @Override protected boolean keystoreExists() throws IOException { /* The keystore loader doesn't handle zero length files. */ return file.exists() && (file.length() > 0); } @Override protected InputStream getInputStreamForFile() throws IOException { InputStream is = Files.newInputStream(file.toPath()); return is; } @Override protected void createPermissions(String perms) throws IOException { int mode = 700; try { mode = Integer.parseInt(perms, 8); } catch (NumberFormatException nfe) { throw new IOException("Invalid permissions mode provided while " + "trying to createPermissions", nfe); } permissions = modeToPosixFilePermission(mode); } @Override protected void stashOriginalFilePermissions() throws IOException { // save off permissions in case we need to // rewrite the keystore in flush() if (!Shell.WINDOWS) { Path path = Paths.get(file.getCanonicalPath()); permissions = Files.getPosixFilePermissions(path); } else { // On Windows, the JDK does not support the POSIX file permission APIs. // Instead, we can do a winutils call and translate. String[] cmd = Shell.getGetPermissionCommand(); String[] args = new String[cmd.length + 1]; System.arraycopy(cmd, 0, args, 0, cmd.length); args[cmd.length] = file.getCanonicalPath(); String out = Shell.execCommand(args); StringTokenizer t = new StringTokenizer(out, Shell.TOKEN_SEPARATOR_REGEX); // The winutils output consists of 10 characters because of the leading // directory indicator, i.e. "drwx------". The JDK parsing method expects // a 9-character string, so remove the leading character. String permString = t.nextToken().substring(1); permissions = PosixFilePermissions.fromString(permString); } } @Override protected void initFileSystem(URI uri) throws IOException { super.initFileSystem(uri); try { file = new File(new URI(getPath().toString())); if (LOG.isDebugEnabled()) { LOG.debug("initialized local file as '" + file + "'."); if (file.exists()) { LOG.debug("the local file exists and is size " + file.length()); if (LOG.isTraceEnabled()) { if (file.canRead()) { LOG.trace("we can read the local file."); } if (file.canWrite()) { LOG.trace("we can write the local file."); } } } else { LOG.debug("the local file does not exist."); } } } catch (URISyntaxException e) { throw new IOException(e); } } @Override public void flush() throws IOException { super.flush(); if (LOG.isDebugEnabled()) { LOG.debug("Resetting permissions to '" + permissions + "'"); } if (!Shell.WINDOWS) { Files.setPosixFilePermissions(Paths.get(file.getCanonicalPath()), permissions); } else { // FsPermission expects a 10-character string because of the leading // directory indicator, i.e. "drwx------". The JDK toString method returns // a 9-character string, so prepend a leading character. FsPermission fsPermission = FsPermission.valueOf( "-" + PosixFilePermissions.toString(permissions)); FileUtil.setPermission(file, fsPermission); } } private static Set<PosixFilePermission> modeToPosixFilePermission( int mode) { Set<PosixFilePermission> perms = EnumSet.noneOf(PosixFilePermission.class); if ((mode & 0001) != 0) { perms.add(PosixFilePermission.OTHERS_EXECUTE); } if ((mode & 0002) != 0) { perms.add(PosixFilePermission.OTHERS_WRITE); } if ((mode & 0004) != 0) { perms.add(PosixFilePermission.OTHERS_READ); } if ((mode & 0010) != 0) { perms.add(PosixFilePermission.GROUP_EXECUTE); } if ((mode & 0020) != 0) { perms.add(PosixFilePermission.GROUP_WRITE); } if ((mode & 0040) != 0) { perms.add(PosixFilePermission.GROUP_READ); } if ((mode & 0100) != 0) { perms.add(PosixFilePermission.OWNER_EXECUTE); } if ((mode & 0200) != 0) { perms.add(PosixFilePermission.OWNER_WRITE); } if ((mode & 0400) != 0) { perms.add(PosixFilePermission.OWNER_READ); } return perms; } }
LocalKeyStoreProvider
java
quarkusio__quarkus
extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/config/dialect/DialectSpecificSettingsMariaDBTest.java
{ "start": 550, "end": 1638 }
class ____ { @RegisterExtension static QuarkusUnitTest runner = new QuarkusUnitTest() .withApplicationRoot((jar) -> jar .addClass(MyEntity.class) .addAsResource("application-start-offline-mariadb-dialect.properties", "application.properties")) .setForcedDependencies(List.of( Dependency.of("io.quarkus", "quarkus-jdbc-mariadb-deployment", Version.getVersion()))) .overrideConfigKey("quarkus.hibernate-orm.dialect.mariadb.bytes-per-character", "8") .overrideConfigKey("quarkus.hibernate-orm.dialect.mariadb.no-backslash-escapes", "true"); @Inject EntityManagerFactory entityManagerFactory; @Test public void applicationStarts() { assertThat(entityManagerFactory.getProperties().get("hibernate.dialect.mysql.bytes_per_character")) .isEqualTo("8"); assertThat(entityManagerFactory.getProperties().get("hibernate.dialect.mysql.no_backslash_escapes")) .isEqualTo("true"); } }
DialectSpecificSettingsMariaDBTest
java
apache__camel
components/camel-quartz/src/test/java/org/apache/camel/component/quartz/QuartzManuallyTriggerJobTest.java
{ "start": 1218, "end": 2363 }
class ____ extends BaseQuartzTest { @Test public void testQuartzCronRoute() throws Exception { MockEndpoint mock = getMockEndpoint("mock:result"); mock.expectedMinimumMessageCount(1); QuartzComponent component = context.getComponent("quartz", QuartzComponent.class); Scheduler scheduler = component.getScheduler(); // collect all jobKeys of this route (ideally only one). ArrayList<JobKey> jobKeys = new ArrayList<>(); for (String group : scheduler.getJobGroupNames()) { jobKeys.addAll(scheduler.getJobKeys(GroupMatcher.jobGroupEquals(group))); } JobDataMap jobDataMap = scheduler.getJobDetail(jobKeys.get(0)).getJobDataMap(); // trigger job manually scheduler.triggerJob(jobKeys.get(0), jobDataMap); MockEndpoint.assertIsSatisfied(context); } @Override protected RouteBuilder createRouteBuilder() { return new RouteBuilder() { public void configure() { from("quartz://MyTimer?cron=05+00+00+*+*+?").to("mock:result"); } }; } }
QuartzManuallyTriggerJobTest
java
apache__camel
components/camel-wordpress/src/main/java/org/apache/camel/component/wordpress/api/model/Media.java
{ "start": 867, "end": 1005 }
class ____ extends Publishable { private static final long serialVersionUID = 3585407536958913479L; public Media() { } }
Media
java
elastic__elasticsearch
x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java
{ "start": 818, "end": 22000 }
class ____ { public static final String LOCAL_BREAKER_OVER_RESERVED_SIZE_SETTING = "esql.block_factory.local_breaker.over_reserved"; public static final ByteSizeValue LOCAL_BREAKER_OVER_RESERVED_DEFAULT_SIZE = ByteSizeValue.ofKb(8); public static final String LOCAL_BREAKER_OVER_RESERVED_MAX_SIZE_SETTING = "esql.block_factory.local_breaker.max_over_reserved"; public static final ByteSizeValue LOCAL_BREAKER_OVER_RESERVED_DEFAULT_MAX_SIZE = ByteSizeValue.ofKb(512); public static final String MAX_BLOCK_PRIMITIVE_ARRAY_SIZE_SETTING = "esql.block_factory.max_block_primitive_array_size"; public static final ByteSizeValue DEFAULT_MAX_BLOCK_PRIMITIVE_ARRAY_SIZE = ByteSizeValue.ofKb(512); private final CircuitBreaker breaker; private final BigArrays bigArrays; private final long maxPrimitiveArrayBytes; private final BlockFactory parent; public BlockFactory(CircuitBreaker breaker, BigArrays bigArrays) { this(breaker, bigArrays, DEFAULT_MAX_BLOCK_PRIMITIVE_ARRAY_SIZE); } public BlockFactory(CircuitBreaker breaker, BigArrays bigArrays, ByteSizeValue maxPrimitiveArraySize) { this(breaker, bigArrays, maxPrimitiveArraySize, null); } protected BlockFactory(CircuitBreaker breaker, BigArrays bigArrays, ByteSizeValue maxPrimitiveArraySize, BlockFactory parent) { assert breaker instanceof LocalCircuitBreaker == false || (parent != null && ((LocalCircuitBreaker) breaker).parentBreaker() == parent.breaker) : "use local breaker without parent block factory"; this.breaker = breaker; this.bigArrays = bigArrays; this.parent = parent; this.maxPrimitiveArrayBytes = maxPrimitiveArraySize.getBytes(); } public static BlockFactory getInstance(CircuitBreaker breaker, BigArrays bigArrays) { return new BlockFactory(breaker, bigArrays, DEFAULT_MAX_BLOCK_PRIMITIVE_ARRAY_SIZE, null); } // For testing public CircuitBreaker breaker() { return breaker; } // For testing public BigArrays bigArrays() { return bigArrays; } protected BlockFactory parent() { return parent != null ? parent : this; } public BlockFactory newChildFactory(LocalCircuitBreaker childBreaker) { if (childBreaker.parentBreaker() != breaker) { throw new IllegalStateException("Different parent breaker"); } return new BlockFactory(childBreaker, bigArrays, ByteSizeValue.ofBytes(maxPrimitiveArrayBytes), this); } /** * Adjust the circuit breaker with the given delta, if the delta is negative, the breaker will * be adjusted without tripping. * @throws CircuitBreakingException if the breaker was put above its limit */ public void adjustBreaker(final long delta) throws CircuitBreakingException { // checking breaker means potentially tripping, but it doesn't // have to if the delta is negative if (delta > 0) { breaker.addEstimateBytesAndMaybeBreak(delta, "<esql_block_factory>"); } else { breaker.addWithoutBreaking(delta); } } /** Pre-adjusts the breaker for the given position count and element type. Returns the pre-adjusted amount. */ public long preAdjustBreakerForBoolean(int positionCount) { long bytes = (long) positionCount * Byte.BYTES; adjustBreaker(bytes); return bytes; } public long preAdjustBreakerForInt(int positionCount) { long bytes = (long) positionCount * Integer.BYTES; adjustBreaker(bytes); return bytes; } public long preAdjustBreakerForLong(int positionCount) { long bytes = (long) positionCount * Long.BYTES; adjustBreaker(bytes); return bytes; } public long preAdjustBreakerForDouble(int positionCount) { long bytes = (long) positionCount * Double.BYTES; adjustBreaker(bytes); return bytes; } public BooleanBlock.Builder newBooleanBlockBuilder(int estimatedSize) { return new BooleanBlockBuilder(estimatedSize, this); } /** * Build a {@link BooleanVector.FixedBuilder} that never grows. */ public BooleanVector.FixedBuilder newBooleanVectorFixedBuilder(int size) { return new BooleanVectorFixedBuilder(size, this); } public final BooleanBlock newBooleanArrayBlock(boolean[] values, int pc, int[] firstValueIndexes, BitSet nulls, MvOrdering mvOrdering) { return newBooleanArrayBlock(values, pc, firstValueIndexes, nulls, mvOrdering, 0L); } public BooleanBlock newBooleanArrayBlock(boolean[] values, int pc, int[] fvi, BitSet nulls, MvOrdering mvOrder, long preAdjustedBytes) { var b = new BooleanArrayBlock(values, pc, fvi, nulls, mvOrder, this); adjustBreaker(b.ramBytesUsed() - preAdjustedBytes); return b; } public BooleanVector.Builder newBooleanVectorBuilder(int estimatedSize) { return new BooleanVectorBuilder(estimatedSize, this); } public final BooleanVector newBooleanArrayVector(boolean[] values, int positionCount) { return newBooleanArrayVector(values, positionCount, 0L); } public BooleanVector newBooleanArrayVector(boolean[] values, int positionCount, long preAdjustedBytes) { var b = new BooleanArrayVector(values, positionCount, this); adjustBreaker(b.ramBytesUsed() - preAdjustedBytes); return b; } public final BooleanBlock newConstantBooleanBlockWith(boolean value, int positions) { return newConstantBooleanBlockWith(value, positions, 0L); } public BooleanBlock newConstantBooleanBlockWith(boolean value, int positions, long preAdjustedBytes) { var b = new ConstantBooleanVector(value, positions, this).asBlock(); adjustBreaker(b.ramBytesUsed() - preAdjustedBytes); return b; } public BooleanVector newConstantBooleanVector(boolean value, int positions) { adjustBreaker(ConstantBooleanVector.RAM_BYTES_USED); var v = new ConstantBooleanVector(value, positions, this); assert v.ramBytesUsed() == ConstantBooleanVector.RAM_BYTES_USED; return v; } public IntBlock.Builder newIntBlockBuilder(int estimatedSize) { return new IntBlockBuilder(estimatedSize, this); } public final IntBlock newIntArrayBlock(int[] values, int positionCount, int[] firstValueIndexes, BitSet nulls, MvOrdering mvOrdering) { return newIntArrayBlock(values, positionCount, firstValueIndexes, nulls, mvOrdering, 0L); } public IntBlock newIntArrayBlock(int[] values, int pc, int[] fvi, BitSet nulls, MvOrdering mvOrdering, long preAdjustedBytes) { var b = new IntArrayBlock(values, pc, fvi, nulls, mvOrdering, this); adjustBreaker(b.ramBytesUsed() - preAdjustedBytes); return b; } public IntVector.Builder newIntVectorBuilder(int estimatedSize) { return new IntVectorBuilder(estimatedSize, this); } /** * Build a {@link IntVector.FixedBuilder} that never grows. */ public IntVector.FixedBuilder newIntVectorFixedBuilder(int size) { return new IntVectorFixedBuilder(size, this); } /** * Creates a new Vector with the given values and positionCount. Equivalent to: * newIntArrayVector(values, positionCount, 0L); // with zero pre-adjusted bytes */ public final IntVector newIntArrayVector(int[] values, int positionCount) { return newIntArrayVector(values, positionCount, 0L); } /** * Creates a new Vector with the given values and positionCount, where the caller has already * pre-adjusted a number of bytes with the factory's breaker. * * long preAdjustedBytes = blockFactory.preAdjustBreakerForInt(positionCount); * int[] values = new int[positionCount]; * for (int i = 0; i &lt; positionCount; i++) { * values[i] = doWhateverStuff * } * var vector = blockFactory.newIntArrayVector(values, positionCount, preAdjustedBytes); */ public IntVector newIntArrayVector(int[] values, int positionCount, long preAdjustedBytes) { var b = new IntArrayVector(values, positionCount, this); adjustBreaker(b.ramBytesUsed() - preAdjustedBytes); return b; } public final IntBlock newConstantIntBlockWith(int value, int positions) { return newConstantIntBlockWith(value, positions, 0L); } public IntBlock newConstantIntBlockWith(int value, int positions, long preAdjustedBytes) { var b = new ConstantIntVector(value, positions, this).asBlock(); adjustBreaker(b.ramBytesUsed() - preAdjustedBytes); return b; } public IntVector newConstantIntVector(int value, int positions) { adjustBreaker(ConstantIntVector.RAM_BYTES_USED); var v = new ConstantIntVector(value, positions, this); assert v.ramBytesUsed() == ConstantIntVector.RAM_BYTES_USED; return v; } public FloatBlock.Builder newFloatBlockBuilder(int estimatedSize) { return new FloatBlockBuilder(estimatedSize, this); } public final FloatBlock newFloatArrayBlock(float[] values, int pc, int[] firstValueIndexes, BitSet nulls, MvOrdering mvOrdering) { return newFloatArrayBlock(values, pc, firstValueIndexes, nulls, mvOrdering, 0L); } public FloatBlock newFloatArrayBlock(float[] values, int pc, int[] fvi, BitSet nulls, MvOrdering mvOrdering, long preAdjustedBytes) { var b = new FloatArrayBlock(values, pc, fvi, nulls, mvOrdering, this); adjustBreaker(b.ramBytesUsed() - preAdjustedBytes); return b; } public FloatVector.Builder newFloatVectorBuilder(int estimatedSize) { return new FloatVectorBuilder(estimatedSize, this); } /** * Build a {@link FloatVector.FixedBuilder} that never grows. */ public FloatVector.FixedBuilder newFloatVectorFixedBuilder(int size) { return new FloatVectorFixedBuilder(size, this); } public final FloatVector newFloatArrayVector(float[] values, int positionCount) { return newFloatArrayVector(values, positionCount, 0L); } public FloatVector newFloatArrayVector(float[] values, int positionCount, long preAdjustedBytes) { var b = new FloatArrayVector(values, positionCount, this); adjustBreaker(b.ramBytesUsed() - preAdjustedBytes); return b; } public final FloatBlock newConstantFloatBlockWith(float value, int positions) { return newConstantFloatBlockWith(value, positions, 0L); } public FloatBlock newConstantFloatBlockWith(float value, int positions, long preAdjustedBytes) { var b = new ConstantFloatVector(value, positions, this).asBlock(); adjustBreaker(b.ramBytesUsed() - preAdjustedBytes); return b; } public FloatVector newConstantFloatVector(float value, int positions) { adjustBreaker(ConstantFloatVector.RAM_BYTES_USED); var v = new ConstantFloatVector(value, positions, this); assert v.ramBytesUsed() == ConstantFloatVector.RAM_BYTES_USED; return v; } public LongBlock.Builder newLongBlockBuilder(int estimatedSize) { return new LongBlockBuilder(estimatedSize, this); } public final LongBlock newLongArrayBlock(long[] values, int pc, int[] firstValueIndexes, BitSet nulls, MvOrdering mvOrdering) { return newLongArrayBlock(values, pc, firstValueIndexes, nulls, mvOrdering, 0L); } public LongBlock newLongArrayBlock(long[] values, int pc, int[] fvi, BitSet nulls, MvOrdering mvOrdering, long preAdjustedBytes) { var b = new LongArrayBlock(values, pc, fvi, nulls, mvOrdering, this); adjustBreaker(b.ramBytesUsed() - preAdjustedBytes); return b; } public LongVector.Builder newLongVectorBuilder(int estimatedSize) { return new LongVectorBuilder(estimatedSize, this); } /** * Build a {@link LongVector.FixedBuilder} that never grows. */ public LongVector.FixedBuilder newLongVectorFixedBuilder(int size) { return new LongVectorFixedBuilder(size, this); } public final LongVector newLongArrayVector(long[] values, int positionCount) { return newLongArrayVector(values, positionCount, 0L); } public LongVector newLongArrayVector(long[] values, int positionCount, long preAdjustedBytes) { var b = new LongArrayVector(values, positionCount, this); adjustBreaker(b.ramBytesUsed() - preAdjustedBytes); return b; } public final LongBlock newConstantLongBlockWith(long value, int positions) { return newConstantLongBlockWith(value, positions, 0L); } public LongBlock newConstantLongBlockWith(long value, int positions, long preAdjustedBytes) { var b = new ConstantLongVector(value, positions, this).asBlock(); adjustBreaker(b.ramBytesUsed() - preAdjustedBytes); return b; } public LongVector newConstantLongVector(long value, int positions) { adjustBreaker(ConstantLongVector.RAM_BYTES_USED); var v = new ConstantLongVector(value, positions, this); assert v.ramBytesUsed() == ConstantLongVector.RAM_BYTES_USED; return v; } public DoubleBlock.Builder newDoubleBlockBuilder(int estimatedSize) { return new DoubleBlockBuilder(estimatedSize, this); } public final DoubleBlock newDoubleArrayBlock(double[] values, int pc, int[] firstValueIndexes, BitSet nulls, MvOrdering mvOrdering) { return newDoubleArrayBlock(values, pc, firstValueIndexes, nulls, mvOrdering, 0L); } public DoubleBlock newDoubleArrayBlock(double[] values, int pc, int[] fvi, BitSet nulls, MvOrdering mvOrdering, long preAdjustedBytes) { var b = new DoubleArrayBlock(values, pc, fvi, nulls, mvOrdering, this); adjustBreaker(b.ramBytesUsed() - preAdjustedBytes); return b; } public DoubleVector.Builder newDoubleVectorBuilder(int estimatedSize) { return new DoubleVectorBuilder(estimatedSize, this); } /** * Build a {@link DoubleVector.FixedBuilder} that never grows. */ public DoubleVector.FixedBuilder newDoubleVectorFixedBuilder(int size) { return new DoubleVectorFixedBuilder(size, this); } public final DoubleVector newDoubleArrayVector(double[] values, int positionCount) { return newDoubleArrayVector(values, positionCount, 0L); } public DoubleVector newDoubleArrayVector(double[] values, int positionCount, long preAdjustedBytes) { var b = new DoubleArrayVector(values, positionCount, this); adjustBreaker(b.ramBytesUsed() - preAdjustedBytes); return b; } public final DoubleBlock newConstantDoubleBlockWith(double value, int positions) { return newConstantDoubleBlockWith(value, positions, 0L); } public DoubleBlock newConstantDoubleBlockWith(double value, int positions, long preAdjustedBytes) { var b = new ConstantDoubleVector(value, positions, this).asBlock(); adjustBreaker(b.ramBytesUsed() - preAdjustedBytes); return b; } public DoubleVector newConstantDoubleVector(double value, int positions) { adjustBreaker(ConstantDoubleVector.RAM_BYTES_USED); var v = new ConstantDoubleVector(value, positions, this); assert v.ramBytesUsed() == ConstantDoubleVector.RAM_BYTES_USED; return v; } public BytesRefBlock.Builder newBytesRefBlockBuilder(int estimatedSize) { return new BytesRefBlockBuilder(estimatedSize, bigArrays, this); } public BytesRefBlock newBytesRefArrayBlock(BytesRefArray values, int pc, int[] firstValueIndexes, BitSet nulls, MvOrdering mvOrdering) { var b = new BytesRefArrayBlock(values, pc, firstValueIndexes, nulls, mvOrdering, this); adjustBreaker(b.ramBytesUsed() - values.bigArraysRamBytesUsed()); return b; } public BytesRefVector.Builder newBytesRefVectorBuilder(int estimatedSize) { return new BytesRefVectorBuilder(estimatedSize, bigArrays, this); } public BytesRefVector newBytesRefArrayVector(BytesRefArray values, int positionCount) { var b = new BytesRefArrayVector(values, positionCount, this); adjustBreaker(b.ramBytesUsed() - values.bigArraysRamBytesUsed()); return b; } public BytesRefBlock newConstantBytesRefBlockWith(BytesRef value, int positions) { var b = new ConstantBytesRefVector(value, positions, this).asBlock(); adjustBreaker(b.ramBytesUsed()); return b; } public BytesRefVector newConstantBytesRefVector(BytesRef value, int positions) { long preadjusted = ConstantBytesRefVector.ramBytesUsed(value); adjustBreaker(preadjusted); var v = new ConstantBytesRefVector(value, positions, this); assert v.ramBytesUsed() == preadjusted; return v; } public Block newConstantNullBlock(int positions) { var b = new ConstantNullBlock(positions, this); adjustBreaker(b.ramBytesUsed()); return b; } public AggregateMetricDoubleBlockBuilder newAggregateMetricDoubleBlockBuilder(int estimatedSize) { return new AggregateMetricDoubleBlockBuilder(estimatedSize, this); } public final AggregateMetricDoubleBlock newAggregateMetricDoubleBlock( Block minBlock, Block maxBlock, Block sumBlock, Block countBlock ) { return new AggregateMetricDoubleArrayBlock( (DoubleBlock) minBlock, (DoubleBlock) maxBlock, (DoubleBlock) sumBlock, (IntBlock) countBlock ); } public final AggregateMetricDoubleBlock newConstantAggregateMetricDoubleBlock( AggregateMetricDoubleBlockBuilder.AggregateMetricDoubleLiteral value, int positions ) { try (AggregateMetricDoubleBlockBuilder builder = newAggregateMetricDoubleBlockBuilder(positions)) { for (int i = 0; i < positions; i++) { if (value.min() != null) { builder.min().appendDouble(value.min()); } else { builder.min().appendNull(); } if (value.max() != null) { builder.max().appendDouble(value.max()); } else { builder.max().appendNull(); } if (value.sum() != null) { builder.sum().appendDouble(value.sum()); } else { builder.sum().appendNull(); } if (value.count() != null) { builder.count().appendInt(value.count()); } else { builder.count().appendNull(); } } return builder.build(); } } public BlockLoader.Block newAggregateMetricDoubleBlockFromDocValues( DoubleBlock minBlock, DoubleBlock maxBlock, DoubleBlock sumBlock, IntBlock countBlock ) { return new AggregateMetricDoubleArrayBlock(minBlock, maxBlock, sumBlock, countBlock); } public ExponentialHistogramBlockBuilder newExponentialHistogramBlockBuilder(int estimatedSize) { return new ExponentialHistogramBlockBuilder(estimatedSize, this); } public final ExponentialHistogramBlock newConstantExponentialHistogramBlock(ExponentialHistogram value, int positionCount) { return ExponentialHistogramArrayBlock.createConstant(value, positionCount, this); } public BlockLoader.Block newExponentialHistogramBlockFromDocValues( DoubleBlock minima, DoubleBlock maxima, DoubleBlock sums, DoubleBlock valueCounts, DoubleBlock zeroThresholds, BytesRefBlock encodedHistograms ) { return new ExponentialHistogramArrayBlock(minima, maxima, sums, valueCounts, zeroThresholds, encodedHistograms); } public BlockLoader.Block newTDigestBlockFromDocValues( BytesRefBlock encodedDigests, DoubleBlock minima, DoubleBlock maxima, DoubleBlock sums, LongBlock counts ) { return new TDigestArrayBlock(encodedDigests, minima, maxima, sums, counts); } public final AggregateMetricDoubleBlock newAggregateMetricDoubleBlock( double[] minValues, double[] maxValues, double[] sumValues, int[] countValues, int positions ) { DoubleBlock min = newDoubleArrayVector(minValues, positions).asBlock(); DoubleBlock max = newDoubleArrayVector(maxValues, positions).asBlock(); DoubleBlock sum = newDoubleArrayVector(sumValues, positions).asBlock(); IntBlock count = newIntArrayVector(countValues, positions).asBlock(); return new AggregateMetricDoubleArrayBlock(min, max, sum, count); } /** * Returns the maximum number of bytes that a Block should be backed by a primitive array before switching to using BigArrays. */ public long maxPrimitiveArrayBytes() { return maxPrimitiveArrayBytes; } }
BlockFactory
java
apache__thrift
lib/java/src/main/java/org/apache/thrift/transport/sasl/TSaslProcessorFactory.java
{ "start": 1081, "end": 1199 }
interface ____ { TProcessor getProcessor(NonblockingSaslHandler saslHandler) throws TException; }
TSaslProcessorFactory
java
quarkusio__quarkus
independent-projects/tools/analytics-common/src/main/java/io/quarkus/analytics/dto/segment/TrackProperties.java
{ "start": 1348, "end": 2456 }
class ____ { private String groupId; private String artifactId; private String version; public AppExtension() { } public AppExtension(String groupId, String artifactId, String version) { this.groupId = groupId; this.artifactId = artifactId; this.version = version; } public static AppExtensionBuilder builder() { return new AppExtensionBuilder(); } @JsonProperty("group_id") public String getGroupId() { return groupId; } public void setGroupId(String groupId) { this.groupId = groupId; } @JsonProperty("artifact_id") public String getArtifactId() { return artifactId; } public void setArtifactId(String artifactId) { this.artifactId = artifactId; } public String getVersion() { return version; } public void setVersion(String version) { this.version = version; } public static
AppExtension
java
apache__flink
flink-core/src/test/java/org/apache/flink/configuration/ConfigUtilsTest.java
{ "start": 1132, "end": 4939 }
class ____ { private static final ConfigOption<List<String>> TEST_OPTION = key("test.option.key").stringType().asList().noDefaultValue(); private static final Integer[] intArray = {1, 3, 2, 4}; private static final List<Integer> intList = Arrays.asList(intArray); @Test void collectionIsCorrectlyPutAndFetched() { final Configuration configurationUnderTest = new Configuration(); ConfigUtils.encodeCollectionToConfig( configurationUnderTest, TEST_OPTION, intList, Object::toString); final List<Integer> recovered = ConfigUtils.decodeListFromConfig( configurationUnderTest, TEST_OPTION, Integer::valueOf); assertThat(recovered).isEqualTo(intList); } @Test void arrayIsCorrectlyPutAndFetched() { final Configuration configurationUnderTest = new Configuration(); ConfigUtils.encodeArrayToConfig( configurationUnderTest, TEST_OPTION, intArray, Object::toString); final List<Integer> recovered = ConfigUtils.decodeListFromConfig( configurationUnderTest, TEST_OPTION, Integer::valueOf); assertThat(recovered).isEqualTo(intList); } @Test void nullCollectionPutsNothingInConfig() { final Configuration configurationUnderTest = new Configuration(); ConfigUtils.encodeCollectionToConfig( configurationUnderTest, TEST_OPTION, null, Object::toString); assertThat(configurationUnderTest.keySet()).isEmpty(); final Object recovered = configurationUnderTest.get(TEST_OPTION); assertThat(recovered).isNull(); final List<Integer> recoveredList = ConfigUtils.decodeListFromConfig( configurationUnderTest, TEST_OPTION, Integer::valueOf); assertThat(recoveredList).isEmpty(); } @Test void nullArrayPutsNothingInConfig() { final Configuration configurationUnderTest = new Configuration(); ConfigUtils.encodeArrayToConfig( configurationUnderTest, TEST_OPTION, null, Object::toString); assertThat(configurationUnderTest.keySet()).isEmpty(); final Object recovered = configurationUnderTest.get(TEST_OPTION); assertThat(recovered).isNull(); final List<Integer> recoveredList = ConfigUtils.decodeListFromConfig( configurationUnderTest, TEST_OPTION, Integer::valueOf); assertThat(recoveredList).isEmpty(); } @Test void emptyCollectionPutsNothingInConfig() { final Configuration configurationUnderTest = new Configuration(); ConfigUtils.encodeCollectionToConfig( configurationUnderTest, TEST_OPTION, Collections.emptyList(), Object::toString); final List<String> recovered = configurationUnderTest.get(TEST_OPTION); assertThat(recovered).isNull(); final List<Integer> recoveredList = ConfigUtils.decodeListFromConfig( configurationUnderTest, TEST_OPTION, Integer::valueOf); assertThat(recoveredList).isEmpty(); } @Test void emptyArrayPutsNothingInConfig() { final Configuration configurationUnderTest = new Configuration(); ConfigUtils.encodeArrayToConfig( configurationUnderTest, TEST_OPTION, new Integer[5], Object::toString); final List<String> recovered = configurationUnderTest.get(TEST_OPTION); assertThat(recovered).isNull(); final List<Integer> recoveredList = ConfigUtils.decodeListFromConfig( configurationUnderTest, TEST_OPTION, Integer::valueOf); assertThat(recoveredList).isEmpty(); } }
ConfigUtilsTest
java
spring-projects__spring-framework
spring-core/src/main/java/org/springframework/cglib/core/ReflectUtils.java
{ "start": 18002, "end": 20682 }
class ____ all."; } }; } // Force static initializers to run. Class.forName(className, true, loader); return c; } public static void setLoadedClassHandler(Consumer<Class<?>> loadedClassHandler) { ReflectUtils.loadedClassHandler = loadedClassHandler; } public static Class<?> loadClass(String className, ClassLoader classLoader) throws ClassNotFoundException { // Force static initializers to run. Class<?> clazz = Class.forName(className, true, classLoader); Consumer<Class<?>> handlerToUse = loadedClassHandler; if (handlerToUse != null) { handlerToUse.accept(clazz); } return clazz; } // SPRING PATCH END public static int findPackageProtected(Class[] classes) { for (int i = 0; i < classes.length; i++) { if (!Modifier.isPublic(classes[i].getModifiers())) { return i; } } return 0; } public static MethodInfo getMethodInfo(final Member member, final int modifiers) { final Signature sig = getSignature(member); return new MethodInfo() { private ClassInfo ci; @Override public ClassInfo getClassInfo() { if (ci == null) { ci = ReflectUtils.getClassInfo(member.getDeclaringClass()); } return ci; } @Override public int getModifiers() { return modifiers; } @Override public Signature getSignature() { return sig; } @Override public Type[] getExceptionTypes() { return ReflectUtils.getExceptionTypes(member); } }; } public static MethodInfo getMethodInfo(Member member) { return getMethodInfo(member, member.getModifiers()); } public static ClassInfo getClassInfo(final Class clazz) { final Type type = Type.getType(clazz); final Type sc = (clazz.getSuperclass() == null) ? null : Type.getType(clazz.getSuperclass()); return new ClassInfo() { @Override public Type getType() { return type; } @Override public Type getSuperType() { return sc; } @Override public Type[] getInterfaces() { return TypeUtils.getTypes(clazz.getInterfaces()); } @Override public int getModifiers() { return clazz.getModifiers(); } }; } // used by MethodInterceptorGenerated generated code public static Method[] findMethods(String[] namesAndDescriptors, Method[] methods) { Map map = new HashMap(); for (Method method : methods) { map.put(method.getName() + Type.getMethodDescriptor(method), method); } Method[] result = new Method[namesAndDescriptors.length / 2]; for (int i = 0; i < result.length; i++) { result[i] = (Method) map.get(namesAndDescriptors[i * 2] + namesAndDescriptors[i * 2 + 1]); if (result[i] == null) { // TODO: error? } } return result; } }
at
java
apache__dubbo
dubbo-cluster/src/test/java/org/apache/dubbo/rpc/cluster/filter/LogFilter.java
{ "start": 1216, "end": 1631 }
class ____ implements Filter, Filter.Listener { @Override public Result invoke(Invoker<?> invoker, Invocation invocation) throws RpcException { return invoker.invoke(invocation); } @Override public void onResponse(Result appResponse, Invoker<?> invoker, Invocation invocation) {} @Override public void onError(Throwable t, Invoker<?> invoker, Invocation invocation) {} }
LogFilter
java
apache__camel
core/camel-core/src/test/java/org/apache/camel/impl/DefaultCamelContextAutoStartupTest.java
{ "start": 1195, "end": 5680 }
class ____ extends TestSupport { @Test public void testAutoStartupFalseContextStart() throws Exception { DefaultCamelContext camel = new DefaultCamelContext(false); camel.disableJMX(); camel.setAutoStartup(false); camel.addRoutes(new RouteBuilder() { @Override public void configure() { from("direct:start").routeId("foo").to("mock:result"); } }); camel.start(); assertTrue(camel.isStarted()); assertEquals(1, camel.getRoutes().size()); assertTrue(camel.getRouteController().getRouteStatus("foo").isStopped()); // now start all routes camel.getRouteController().startAllRoutes(); assertTrue(camel.getRouteController().getRouteStatus("foo").isStarted()); // and now its started we can test that it works by sending in a message // to the route MockEndpoint mock = camel.getEndpoint("mock:result", MockEndpoint.class); mock.expectedMessageCount(1); camel.createProducerTemplate().sendBody("direct:start", "Hello World"); mock.assertIsSatisfied(); camel.stop(); } @Test public void testAutoStartupFalseRouteStart() throws Exception { DefaultCamelContext camel = new DefaultCamelContext(false); camel.disableJMX(); camel.setAutoStartup(false); camel.addRoutes(new RouteBuilder() { @Override public void configure() { from("direct:start").routeId("foo").to("mock:result"); } }); camel.start(); assertTrue(camel.isStarted()); assertEquals(1, camel.getRoutes().size()); assertTrue(camel.getRouteController().getRouteStatus("foo").isStopped()); // now start the routes camel.startRoute("foo"); assertTrue(camel.getRouteController().getRouteStatus("foo").isStarted()); // and now its started we can test that it works by sending in a message // to the route MockEndpoint mock = camel.getEndpoint("mock:result", MockEndpoint.class); mock.expectedMessageCount(1); camel.createProducerTemplate().sendBody("direct:start", "Hello World"); mock.assertIsSatisfied(); camel.stop(); } @Test public void testAutoStartupTrue() throws Exception { DefaultCamelContext camel = new DefaultCamelContext(false); camel.disableJMX(); camel.setAutoStartup(true); camel.addRoutes(new RouteBuilder() { @Override public void configure() { from("direct:start").routeId("foo").to("mock:result"); } }); camel.start(); assertTrue(camel.isStarted()); assertEquals(1, camel.getRoutes().size()); assertTrue(camel.getRouteController().getRouteStatus("foo").isStarted()); MockEndpoint mock = camel.getEndpoint("mock:result", MockEndpoint.class); mock.expectedMessageCount(1); camel.createProducerTemplate().sendBody("direct:start", "Hello World"); mock.assertIsSatisfied(); camel.stop(); } @Test public void testAutoStartupFalseRouteOverride() throws Exception { DefaultCamelContext camel = new DefaultCamelContext(false); camel.disableJMX(); camel.setAutoStartup(false); camel.addRoutes(new RouteBuilder() { @Override public void configure() { from("direct:start").routeId("foo").autoStartup(true).to("mock:result"); } }); camel.start(); // this is special, when you have auto startup=false on CamelContext, // then NO routes is started assertTrue(camel.isStarted()); assertEquals(1, camel.getRoutes().size()); assertTrue(camel.getRouteController().getRouteStatus("foo").isStopped()); assertFalse(camel.getRouteController().getRouteStatus("foo").isStarted()); // now start all the routes camel.getRouteController().startAllRoutes(); assertTrue(camel.getRouteController().getRouteStatus("foo").isStarted()); MockEndpoint mock = camel.getEndpoint("mock:result", MockEndpoint.class); mock.expectedMessageCount(1); camel.createProducerTemplate().sendBody("direct:start", "Hello World"); mock.assertIsSatisfied(); camel.stop(); } }
DefaultCamelContextAutoStartupTest
java
alibaba__druid
core/src/test/java/com/alibaba/druid/sql/oracle/demo/Demo1.java
{ "start": 2613, "end": 3836 }
class ____ extends OracleASTVisitorAdapter { private int varIndex; private List<SQLVariantRefExpr> variantList = new ArrayList<SQLVariantRefExpr>(); public boolean visit(SQLVariantRefExpr x) { x.getAttributes().put("varIndex", varIndex++); return true; } public boolean visit(SQLBinaryOpExpr x) { if (x.getLeft() instanceof SQLIdentifierExpr && x.getRight() instanceof SQLVariantRefExpr) { SQLIdentifierExpr identExpr = (SQLIdentifierExpr) x.getLeft(); String ident = identExpr.getName(); if (ident.equals("uid")) { variantList.add((SQLVariantRefExpr) x.getRight()); } } return true; } public int getVarIndex() { return varIndex; } public void setVarIndex(int varIndex) { this.varIndex = varIndex; } public List<SQLVariantRefExpr> getVariantList() { return variantList; } public void setVariantList(List<SQLVariantRefExpr> variantList) { this.variantList = variantList; } } private static
GetVariantVisitor
java
apache__hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestAHSClient.java
{ "start": 7970, "end": 17354 }
class ____ extends AHSClientImpl { // private ApplicationReport mockReport; private List<ApplicationReport> reports = new ArrayList<ApplicationReport>(); private HashMap<ApplicationId, List<ApplicationAttemptReport>> attempts = new HashMap<ApplicationId, List<ApplicationAttemptReport>>(); private HashMap<ApplicationAttemptId, List<ContainerReport>> containers = new HashMap<ApplicationAttemptId, List<ContainerReport>>(); GetApplicationsResponse mockAppResponse = mock(GetApplicationsResponse.class); GetApplicationReportResponse mockResponse = mock(GetApplicationReportResponse.class); GetApplicationAttemptsResponse mockAppAttemptsResponse = mock(GetApplicationAttemptsResponse.class); GetApplicationAttemptReportResponse mockAttemptResponse = mock(GetApplicationAttemptReportResponse.class); GetContainersResponse mockContainersResponse = mock(GetContainersResponse.class); GetContainerReportResponse mockContainerResponse = mock(GetContainerReportResponse.class); public MockAHSClient() { super(); createAppReports(); } @Override public void start() { ahsClient = mock(ApplicationHistoryProtocol.class); try { when( ahsClient .getApplicationReport(any(GetApplicationReportRequest.class))) .thenReturn(mockResponse); when(ahsClient.getApplications(any(GetApplicationsRequest.class))) .thenReturn(mockAppResponse); when( ahsClient .getApplicationAttemptReport(any(GetApplicationAttemptReportRequest.class))) .thenReturn(mockAttemptResponse); when( ahsClient .getApplicationAttempts(any(GetApplicationAttemptsRequest.class))) .thenReturn(mockAppAttemptsResponse); when(ahsClient.getContainers(any(GetContainersRequest.class))) .thenReturn(mockContainersResponse); when(ahsClient.getContainerReport(any(GetContainerReportRequest.class))) .thenReturn(mockContainerResponse); } catch (YarnException e) { fail("Exception is not expected."); } catch (IOException e) { fail("Exception is not expected."); } } @Override public List<ApplicationReport> getApplications() throws YarnException, IOException { when(mockAppResponse.getApplicationList()).thenReturn(reports); return super.getApplications(); } @Override public ApplicationReport getApplicationReport(ApplicationId appId) throws YarnException, IOException { when(mockResponse.getApplicationReport()).thenReturn(getReport(appId)); return super.getApplicationReport(appId); } @Override public List<ApplicationAttemptReport> getApplicationAttempts( ApplicationId appId) throws YarnException, IOException { when(mockAppAttemptsResponse.getApplicationAttemptList()).thenReturn( getAttempts(appId)); return super.getApplicationAttempts(appId); } @Override public ApplicationAttemptReport getApplicationAttemptReport( ApplicationAttemptId appAttemptId) throws YarnException, IOException { when(mockAttemptResponse.getApplicationAttemptReport()).thenReturn( getAttempt(appAttemptId)); return super.getApplicationAttemptReport(appAttemptId); } @Override public List<ContainerReport> getContainers(ApplicationAttemptId appAttemptId) throws YarnException, IOException { when(mockContainersResponse.getContainerList()).thenReturn( getContainersReport(appAttemptId)); return super.getContainers(appAttemptId); } @Override public ContainerReport getContainerReport(ContainerId containerId) throws YarnException, IOException { when(mockContainerResponse.getContainerReport()).thenReturn( getContainer(containerId)); return super.getContainerReport(containerId); } @Override public void stop() { } public ApplicationReport getReport(ApplicationId appId) { for (int i = 0; i < reports.size(); ++i) { if (appId.toString().equalsIgnoreCase( reports.get(i).getApplicationId().toString())) { return reports.get(i); } } return null; } public List<ApplicationAttemptReport> getAttempts(ApplicationId appId) { return attempts.get(appId); } public ApplicationAttemptReport getAttempt(ApplicationAttemptId appAttemptId) { return attempts.get(appAttemptId.getApplicationId()).get(0); } public List<ContainerReport> getContainersReport( ApplicationAttemptId appAttemptId) { return containers.get(appAttemptId); } public ContainerReport getContainer(ContainerId containerId) { return containers.get(containerId.getApplicationAttemptId()).get(0); } public List<ApplicationReport> getReports() { return this.reports; } private void createAppReports() { ApplicationId applicationId = ApplicationId.newInstance(1234, 5); ApplicationReport newApplicationReport = ApplicationReport.newInstance(applicationId, ApplicationAttemptId.newInstance(applicationId, 1), "user", "queue", "appname", "host", 124, null, YarnApplicationState.RUNNING, "diagnostics", "url", 1, 2, 3, 4, FinalApplicationStatus.SUCCEEDED, null, "N/A", 0.53789f, "YARN", null); List<ApplicationReport> applicationReports = new ArrayList<ApplicationReport>(); applicationReports.add(newApplicationReport); List<ApplicationAttemptReport> appAttempts = new ArrayList<ApplicationAttemptReport>(); ApplicationAttemptReport attempt = ApplicationAttemptReport.newInstance( ApplicationAttemptId.newInstance(applicationId, 1), "host", 124, "url", "oUrl", "diagnostics", YarnApplicationAttemptState.FINISHED, ContainerId.newContainerId( newApplicationReport.getCurrentApplicationAttemptId(), 1)); appAttempts.add(attempt); ApplicationAttemptReport attempt1 = ApplicationAttemptReport.newInstance( ApplicationAttemptId.newInstance(applicationId, 2), "host", 124, "url", "oUrl", "diagnostics", YarnApplicationAttemptState.FINISHED, ContainerId.newContainerId( newApplicationReport.getCurrentApplicationAttemptId(), 2)); appAttempts.add(attempt1); attempts.put(applicationId, appAttempts); List<ContainerReport> containerReports = new ArrayList<ContainerReport>(); ContainerReport container = ContainerReport.newInstance( ContainerId.newContainerId(attempt.getApplicationAttemptId(), 1), null, NodeId.newInstance("host", 1234), Priority.UNDEFINED, 1234, 5678, "diagnosticInfo", "logURL", 0, ContainerState.COMPLETE, "http://" + NodeId.newInstance("host", 2345).toString()); containerReports.add(container); ContainerReport container1 = ContainerReport.newInstance( ContainerId.newContainerId(attempt.getApplicationAttemptId(), 2), null, NodeId.newInstance("host", 1234), Priority.UNDEFINED, 1234, 5678, "diagnosticInfo", "logURL", 0, ContainerState.COMPLETE, "http://" + NodeId.newInstance("host", 2345).toString()); containerReports.add(container1); containers.put(attempt.getApplicationAttemptId(), containerReports); ApplicationId applicationId2 = ApplicationId.newInstance(1234, 6); ApplicationReport newApplicationReport2 = ApplicationReport.newInstance(applicationId2, ApplicationAttemptId.newInstance(applicationId2, 2), "user2", "queue2", "appname2", "host2", 125, null, YarnApplicationState.FINISHED, "diagnostics2", "url2", 2, 2, 2, FinalApplicationStatus.SUCCEEDED, null, "N/A", 0.63789f, "NON-YARN", null); applicationReports.add(newApplicationReport2); ApplicationId applicationId3 = ApplicationId.newInstance(1234, 7); ApplicationReport newApplicationReport3 = ApplicationReport.newInstance(applicationId3, ApplicationAttemptId.newInstance(applicationId3, 3), "user3", "queue3", "appname3", "host3", 126, null, YarnApplicationState.RUNNING, "diagnostics3", "url3", 3, 3, 3, FinalApplicationStatus.SUCCEEDED, null, "N/A", 0.73789f, "MAPREDUCE", null); applicationReports.add(newApplicationReport3); ApplicationId applicationId4 = ApplicationId.newInstance(1234, 8); ApplicationReport newApplicationReport4 = ApplicationReport.newInstance(applicationId4, ApplicationAttemptId.newInstance(applicationId4, 4), "user4", "queue4", "appname4", "host4", 127, null, YarnApplicationState.FAILED, "diagnostics4", "url4", 4, 4, 4, FinalApplicationStatus.SUCCEEDED, null, "N/A", 0.83789f, "NON-MAPREDUCE", null); applicationReports.add(newApplicationReport4); reports = applicationReports; } } }
MockAHSClient
java
apache__camel
components/camel-azure/camel-azure-storage-blob/src/test/java/org/apache/camel/component/azure/storage/blob/transform/AzureStorageBlobCloudEventDataTypeTransformerTest.java
{ "start": 1468, "end": 3192 }
class ____ { private final DefaultCamelContext camelContext = new DefaultCamelContext(); private final AzureStorageBlobCloudEventDataTypeTransformer transformer = new AzureStorageBlobCloudEventDataTypeTransformer(); @Test void shouldMapToCloudEvent() throws Exception { Exchange exchange = new DefaultExchange(camelContext); exchange.getMessage().setHeader(BlobConstants.BLOB_NAME, "myBlob"); exchange.getMessage().setHeader(BlobConstants.E_TAG, "eTag"); exchange.getMessage().setBody(new ByteArrayInputStream("Test1".getBytes(StandardCharsets.UTF_8))); transformer.transform(exchange.getMessage(), DataType.ANY, DataType.ANY); Assertions.assertTrue(exchange.getMessage().hasHeaders()); Assertions.assertTrue(exchange.getMessage().getHeaders().containsKey(BlobConstants.BLOB_NAME)); assertEquals("org.apache.camel.event.azure.storage.blob.getBlob", exchange.getMessage().getHeader(CloudEvent.CAMEL_CLOUD_EVENT_TYPE)); assertEquals("myBlob", exchange.getMessage().getHeader(CloudEvent.CAMEL_CLOUD_EVENT_SUBJECT)); assertEquals("azure.storage.blob.eTag", exchange.getMessage().getHeader(CloudEvent.CAMEL_CLOUD_EVENT_SOURCE)); } @Test public void shouldLookupDataTypeTransformer() throws Exception { Transformer transformer = camelContext.getTransformerRegistry() .resolveTransformer(new TransformerKey("azure-storage-blob:application-cloudevents")); Assertions.assertNotNull(transformer); Assertions.assertEquals(AzureStorageBlobCloudEventDataTypeTransformer.class, transformer.getClass()); } }
AzureStorageBlobCloudEventDataTypeTransformerTest
java
apache__hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/converter/weightconversion/TestWeightToWeightConverter.java
{ "start": 1566, "end": 4185 }
class ____ extends WeightConverterTestBase { private WeightToWeightConverter converter; private CapacitySchedulerConfiguration csConfig; public static final QueuePath ROOT = new QueuePath(CapacitySchedulerConfiguration.ROOT); public static final QueuePath ROOT_A = new QueuePath("root", "a"); public static final QueuePath ROOT_B = new QueuePath("root", "b"); public static final QueuePath ROOT_C = new QueuePath("root", "c"); @BeforeEach public void setup() { converter = new WeightToWeightConverter(); csConfig = new CapacitySchedulerConfiguration( new Configuration(false)); } @Test public void testNoChildQueueConversion() { FSQueue root = createFSQueues(); converter.convertWeightsForChildQueues(root, csConfig); assertEquals(1.0f, csConfig.getNonLabeledQueueWeight(ROOT), 0.0f, "root weight"); assertEquals(22, csConfig.getPropsWithPrefix(PREFIX).size(), "Converted items"); } @Test public void testSingleWeightConversion() { FSQueue root = createFSQueues(1); converter.convertWeightsForChildQueues(root, csConfig); assertEquals(1.0f, csConfig.getNonLabeledQueueWeight(ROOT), 0.0f, "root weight"); assertEquals(1.0f, csConfig.getNonLabeledQueueWeight(ROOT_A), 0.0f, "root.a weight"); assertEquals(23, csConfig.getPropsWithPrefix(PREFIX).size(), "Number of properties"); } @Test public void testMultiWeightConversion() { FSQueue root = createFSQueues(1, 2, 3); converter.convertWeightsForChildQueues(root, csConfig); assertEquals(25, csConfig.getPropsWithPrefix(PREFIX).size(), "Number of properties"); assertEquals(1.0f, csConfig.getNonLabeledQueueWeight(ROOT), 0.0f, "root weight"); assertEquals(1.0f, csConfig.getNonLabeledQueueWeight(ROOT_A), 0.0f, "root.a weight"); assertEquals(2.0f, csConfig.getNonLabeledQueueWeight(ROOT_B), 0.0f, "root.b weight"); assertEquals(3.0f, csConfig.getNonLabeledQueueWeight(ROOT_C), 0.0f, "root.c weight"); } @Test public void testAutoCreateV2FlagOnParent() { FSQueue root = createFSQueues(1); converter.convertWeightsForChildQueues(root, csConfig); assertTrue(csConfig.isAutoQueueCreationV2Enabled(ROOT), "root autocreate v2 enabled"); } @Test public void testAutoCreateV2FlagOnParentWithoutChildren() { FSQueue root = createParent(new ArrayList<>()); converter.convertWeightsForChildQueues(root, csConfig); assertEquals(22, csConfig.getPropsWithPrefix(PREFIX).size(), "Number of properties"); assertTrue(csConfig.isAutoQueueCreationV2Enabled(ROOT), "root autocreate v2 enabled"); } }
TestWeightToWeightConverter
java
apache__hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestLocalCacheCleanup.java
{ "start": 1908, "end": 10359 }
class ____ { @Test public void testBasicCleanup() { ConcurrentMap<LocalResourceRequest, LocalizedResource> publicRsrc = new ConcurrentHashMap<LocalResourceRequest, LocalizedResource>(); addResource(publicRsrc, "/pub-resource1.txt", 5, 20, 0); addResource(publicRsrc, "/pub-resource2.txt", 3, 20, 0); addResource(publicRsrc, "/pub-resource3.txt", 15, 20, 0); ConcurrentMap<String, LocalResourcesTracker> privateRsrc = new ConcurrentHashMap<String, LocalResourcesTracker>(); ConcurrentMap<LocalResourceRequest, LocalizedResource> user1rsrcs = new ConcurrentHashMap<LocalResourceRequest, LocalizedResource>(); addResource(user1rsrcs, "/private-u1-resource4.txt", 1, 20, 0); LocalResourcesTracker user1Tracker = new StubbedLocalResourcesTrackerImpl("user1", user1rsrcs); privateRsrc.put("user1", user1Tracker); ConcurrentMap<LocalResourceRequest, LocalizedResource> user2rsrcs = new ConcurrentHashMap<LocalResourceRequest, LocalizedResource>(); addResource(user2rsrcs, "/private-u2-resource5.txt", 2, 20, 0); LocalResourcesTracker user2Tracker = new StubbedLocalResourcesTrackerImpl("user2", user2rsrcs); privateRsrc.put("user2", user2Tracker); ResourceLocalizationService rls = createLocService(publicRsrc, privateRsrc, 0); LocalCacheCleanerStats stats = rls.handleCacheCleanup(); assertEquals(0, ((StubbedLocalResourcesTrackerImpl) rls.publicRsrc) .getLocalRsrc().size()); assertEquals(0, ((StubbedLocalResourcesTrackerImpl) privateRsrc.get("user1")) .getLocalRsrc().size()); assertEquals(0, ((StubbedLocalResourcesTrackerImpl) privateRsrc.get("user2")) .getLocalRsrc().size()); assertEquals(100, stats.getTotalDelSize()); assertEquals(100, rls.metrics.getTotalBytesDeleted()); assertEquals(60, stats.getPublicDelSize()); assertEquals(60, rls.metrics.getPublicBytesDeleted()); assertEquals(40, stats.getPrivateDelSize()); assertEquals(40, rls.metrics.getPrivateBytesDeleted()); assertEquals(100, rls.metrics.getCacheSizeBeforeClean()); } @Test public void testPositiveRefCount() { ConcurrentMap<LocalResourceRequest, LocalizedResource> publicRsrc = new ConcurrentHashMap<LocalResourceRequest, LocalizedResource>(); // Oldest resource with a positive ref count the other with a ref count // equal to 0. LocalResourceRequest survivor = addResource(publicRsrc, "/pub-resource1.txt", 1, 20, 1); addResource(publicRsrc, "/pub-resource2.txt", 5, 20, 0); ConcurrentMap<String, LocalResourcesTracker> privateRsrc = new ConcurrentHashMap<String, LocalResourcesTracker>(); ResourceLocalizationService rls = createLocService(publicRsrc, privateRsrc, 0); LocalCacheCleanerStats stats = rls.handleCacheCleanup(); StubbedLocalResourcesTrackerImpl resources = (StubbedLocalResourcesTrackerImpl) rls.publicRsrc; assertEquals(1, resources.getLocalRsrc().size()); assertTrue(resources.getLocalRsrc().containsKey(survivor)); assertEquals(20, stats.getTotalDelSize()); assertEquals(20, rls.metrics.getTotalBytesDeleted()); assertEquals(20, stats.getPublicDelSize()); assertEquals(20, rls.metrics.getPublicBytesDeleted()); assertEquals(0, stats.getPrivateDelSize()); assertEquals(0, rls.metrics.getPrivateBytesDeleted()); assertEquals(40, rls.metrics.getCacheSizeBeforeClean()); } @Test public void testLRUAcrossTrackers() { ConcurrentMap<LocalResourceRequest, LocalizedResource> publicRsrc = new ConcurrentHashMap<LocalResourceRequest, LocalizedResource>(); LocalResourceRequest pubSurviver1 = addResource(publicRsrc, "/pub-resource1.txt", 8, 20, 0); LocalResourceRequest pubSurviver2 = addResource(publicRsrc, "/pub-resource2.txt", 7, 20, 0); addResource(publicRsrc, "/pub-resource3.txt", 1, 20, 0); ConcurrentMap<String, LocalResourcesTracker> privateRsrc = new ConcurrentHashMap<String, LocalResourcesTracker>(); ConcurrentMap<LocalResourceRequest, LocalizedResource> user1rsrcs = new ConcurrentHashMap<LocalResourceRequest, LocalizedResource>(); LocalResourceRequest usr1Surviver1 = addResource(user1rsrcs, "/private-u1-resource1.txt", 6, 20, 0); addResource(user1rsrcs, "/private-u1-resource2.txt", 2, 20, 0); LocalResourcesTracker user1Tracker = new StubbedLocalResourcesTrackerImpl("user1", user1rsrcs); privateRsrc.put("user1", user1Tracker); ConcurrentMap<LocalResourceRequest, LocalizedResource> user2rsrcs = new ConcurrentHashMap<LocalResourceRequest, LocalizedResource>(); LocalResourceRequest usr2Surviver1 = addResource(user2rsrcs, "/private-u2-resource1.txt", 5, 20, 0); addResource(user2rsrcs, "/private-u2-resource2.txt", 3, 20, 0); addResource(user2rsrcs, "/private-u2-resource3.txt", 4, 20, 0); LocalResourcesTracker user2Tracker = new StubbedLocalResourcesTrackerImpl("user2", user2rsrcs); privateRsrc.put("user2", user2Tracker); ResourceLocalizationService rls = createLocService(publicRsrc, privateRsrc, 80); LocalCacheCleanerStats stats = rls.handleCacheCleanup(); Map<LocalResourceRequest, LocalizedResource> pubLocalRsrc = ((StubbedLocalResourcesTrackerImpl) rls.publicRsrc).getLocalRsrc(); assertEquals(2, pubLocalRsrc.size()); assertTrue(pubLocalRsrc.containsKey(pubSurviver1)); assertTrue(pubLocalRsrc.containsKey(pubSurviver2)); Map<LocalResourceRequest, LocalizedResource> usr1LocalRsrc = ((StubbedLocalResourcesTrackerImpl) privateRsrc.get("user1")) .getLocalRsrc(); assertEquals(1, usr1LocalRsrc.size()); assertTrue(usr1LocalRsrc.containsKey(usr1Surviver1)); Map<LocalResourceRequest, LocalizedResource> usr2LocalRsrc = ((StubbedLocalResourcesTrackerImpl) privateRsrc.get("user2")) .getLocalRsrc(); assertEquals(1, usr2LocalRsrc.size()); assertTrue(usr2LocalRsrc.containsKey(usr2Surviver1)); assertEquals(80, stats.getTotalDelSize()); assertEquals(80, rls.metrics.getTotalBytesDeleted()); assertEquals(20, stats.getPublicDelSize()); assertEquals(20, rls.metrics.getPublicBytesDeleted()); assertEquals(60, stats.getPrivateDelSize()); assertEquals(60, rls.metrics.getPrivateBytesDeleted()); assertEquals(160, rls.metrics.getCacheSizeBeforeClean()); } private ResourceLocalizationService createLocService( ConcurrentMap<LocalResourceRequest, LocalizedResource> publicRsrcs, ConcurrentMap<String, LocalResourcesTracker> privateRsrcs, long targetCacheSize) { Context mockedContext = mock(Context.class); when(mockedContext.getNMStateStore()).thenReturn(null); NodeManagerMetrics metrics = NodeManagerMetrics.create(); ResourceLocalizationService rls = new ResourceLocalizationService(null, null, null, null, mockedContext, metrics); // We set the following members directly so we don't have to deal with // mocking out the service init method. rls.publicRsrc = new StubbedLocalResourcesTrackerImpl(null, publicRsrcs); rls.cacheTargetSize = targetCacheSize; rls.privateRsrc.putAll(privateRsrcs); return rls; } private LocalResourceRequest addResource( ConcurrentMap<LocalResourceRequest, LocalizedResource> resources, String path, long timestamp, long size, int refCount) { LocalResourceRequest request = createLocalResourceRequest(path, timestamp); LocalizedResource resource = createLocalizedResource(size, refCount, timestamp, request); resources.put(request, resource); return request; } private LocalResourceRequest createLocalResourceRequest(String path, long timestamp) { return new LocalResourceRequest(new Path(path), timestamp, LocalResourceType.FILE, LocalResourceVisibility.PUBLIC, null); } private LocalizedResource createLocalizedResource(long size, int refCount, long timestamp, LocalResourceRequest req) { LocalizedResource lr = mock(LocalizedResource.class); when(lr.getSize()).thenReturn(size); when(lr.getRefCount()).thenReturn(refCount); when(lr.getTimestamp()).thenReturn(timestamp); when(lr.getState()).thenReturn(ResourceState.LOCALIZED); when(lr.getRequest()).thenReturn(req); return lr; }
TestLocalCacheCleanup
java
grpc__grpc-java
api/src/test/java/io/grpc/ForwardingChannelBuilder2Test.java
{ "start": 1079, "end": 1311 }
class ____ { private final ManagedChannelBuilder<?> mockDelegate = mock(ManagedChannelBuilder.class); private final ForwardingChannelBuilder2<?> testChannelBuilder = new TestBuilder(); private final
ForwardingChannelBuilder2Test