language
stringclasses
1 value
repo
stringclasses
60 values
path
stringlengths
22
294
class_span
dict
source
stringlengths
13
1.16M
target
stringlengths
1
113
java
apache__hadoop
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestPipelinesFailover.java
{ "start": 4094, "end": 4737 }
enum ____ { GRACEFUL_FAILOVER { @Override void run(MiniDFSCluster cluster, int previousActive, int activeIndex) throws IOException { cluster.transitionToStandby(previousActive); cluster.transitionToActive(activeIndex); } }, ORIGINAL_ACTIVE_CRASHED { @Override void run(MiniDFSCluster cluster, int previousActive, int activeIndex) throws IOException { cluster.restartNameNode(previousActive); cluster.transitionToActive(activeIndex); } }; abstract void run(MiniDFSCluster cluster, int previousActive, int activeIndex) throws IOException; }
TestScenario
java
apache__avro
lang/java/protobuf/src/main/java/org/apache/avro/protobuf/ProtobufDatumReader.java
{ "start": 1335, "end": 2821 }
class ____<T> extends GenericDatumReader<T> { public ProtobufDatumReader() { this(null, null, ProtobufData.get()); } public ProtobufDatumReader(Class<T> c) { this(ProtobufData.get().getSchema(c)); } /** Construct where the writer's and reader's schemas are the same. */ public ProtobufDatumReader(Schema schema) { this(schema, schema, ProtobufData.get()); } /** Construct given writer's and reader's schema. */ public ProtobufDatumReader(Schema writer, Schema reader) { this(writer, reader, ProtobufData.get()); } protected ProtobufDatumReader(Schema writer, Schema reader, ProtobufData data) { super(writer, reader, data); } @Override protected Object readRecord(Object old, Schema expected, ResolvingDecoder in) throws IOException { Message.Builder b = (Message.Builder) super.readRecord(old, expected, in); return b.build(); // build instance } @Override protected Object createEnum(String symbol, Schema schema) { try { Class c = SpecificData.get().getClass(schema); if (c == null) return super.createEnum(symbol, schema); // punt to generic return ((ProtocolMessageEnum) Enum.valueOf(c, symbol)).getValueDescriptor(); } catch (Exception e) { throw new RuntimeException(e); } } @Override protected Object readBytes(Object old, Decoder in) throws IOException { return ByteString.copyFrom(((ByteBuffer) super.readBytes(old, in)).array()); } }
ProtobufDatumReader
java
apache__hadoop
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsLocalFs.java
{ "start": 978, "end": 1313 }
class ____ extends ViewFsBaseTest { @Override @BeforeEach public void setUp() throws Exception { // create the test root on local_fs fcTarget = FileContext.getLocalFSFileContext(); super.setUp(); } @Override @AfterEach public void tearDown() throws Exception { super.tearDown(); } }
TestViewFsLocalFs
java
elastic__elasticsearch
build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/precommit/LicenseHeadersPrecommitPlugin.java
{ "start": 902, "end": 1790 }
class ____ extends PrecommitPlugin { @Inject public LicenseHeadersPrecommitPlugin(ProviderFactory providerFactory) { this.providerFactory = providerFactory; } @Override public TaskProvider<? extends Task> createTask(Project project) { return project.getTasks().register("licenseHeaders", LicenseHeadersTask.class, licenseHeadersTask -> { project.getPlugins().withType(JavaBasePlugin.class, javaBasePlugin -> { final SourceSetContainer sourceSets = project.getExtensions().getByType(JavaPluginExtension.class).getSourceSets(); licenseHeadersTask.getSourceFolders() .addAll(providerFactory.provider(() -> sourceSets.stream().map(s -> s.getAllJava()).collect(Collectors.toList()))); }); }); } private ProviderFactory providerFactory; }
LicenseHeadersPrecommitPlugin
java
elastic__elasticsearch
x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java
{ "start": 7961, "end": 15925 }
class ____ extends ESTestCase { private static EsqlParser parser; private static Analyzer analyzer; private static Analyzer allTypesAnalyzer; private static LogicalPlanOptimizer logicalOptimizer; private static Map<String, EsField> mapping; @BeforeClass public static void init() { parser = new EsqlParser(); mapping = loadMapping("mapping-basic.json"); EsIndex test = EsIndexGenerator.esIndex("test", mapping, Map.of("test", IndexMode.STANDARD)); logicalOptimizer = new LogicalPlanOptimizer(unboundLogicalOptimizerContext()); analyzer = new Analyzer( testAnalyzerContext( EsqlTestUtils.TEST_CFG, new EsqlFunctionRegistry(), indexResolutions(test), emptyPolicyResolution(), emptyInferenceResolution() ), TEST_VERIFIER ); var allTypesMapping = loadMapping("mapping-all-types.json"); EsIndex testAll = EsIndexGenerator.esIndex("test_all", allTypesMapping, Map.of("test_all", IndexMode.STANDARD)); allTypesAnalyzer = new Analyzer( testAnalyzerContext( EsqlTestUtils.TEST_CFG, new EsqlFunctionRegistry(), indexResolutions(testAll), emptyPolicyResolution(), emptyInferenceResolution() ), TEST_VERIFIER ); } /** * Expects * LocalRelation[[first_name{f}#4],EMPTY] */ public void testMissingFieldInFilterNumeric() { var plan = plan(""" from test | where emp_no > 10 | keep first_name """); var testStats = statsForMissingField("emp_no"); var localPlan = localPlan(plan, testStats); var empty = asEmptyRelation(localPlan); assertThat(Expressions.names(empty.output()), contains("first_name")); } /** * Expects * LocalRelation[[first_name{f}#4],EMPTY] */ public void testMissingFieldInFilterString() { var plan = plan(""" from test | where starts_with(last_name, "abc") | keep first_name """); var testStats = statsForMissingField("last_name"); var localPlan = localPlan(plan, testStats); var empty = asEmptyRelation(localPlan); assertThat(Expressions.names(empty.output()), contains("first_name")); } /** * Expects * Project[[last_name{r}#7]] * \_Eval[[null[KEYWORD] AS last_name]] * \_Limit[1000[INTEGER],false] * \_EsRelation[test][_meta_field{f}#9, emp_no{f}#3, first_name{f}#4, gen..] */ public void testMissingFieldInProject() { var plan = plan(""" from test | keep last_name """); var testStats = statsForMissingField("last_name"); var localPlan = localPlan(plan, testStats); var project = as(localPlan, Project.class); var projections = project.projections(); assertThat(Expressions.names(projections), contains("last_name")); as(projections.get(0), ReferenceAttribute.class); var eval = as(project.child(), Eval.class); assertThat(Expressions.names(eval.fields()), contains("last_name")); var alias = as(eval.fields().get(0), Alias.class); var literal = as(alias.child(), Literal.class); assertThat(literal.value(), is(nullValue())); assertThat(literal.dataType(), is(KEYWORD)); var limit = as(eval.child(), Limit.class); var source = as(limit.child(), EsRelation.class); assertThat(Expressions.names(source.output()), not(contains("last_name"))); } /* * Expects a similar plan to testMissingFieldInProject() above, except for the Alias's child value * Project[[last_name{r}#4]] * \_Eval[[[66 6f 6f][KEYWORD] AS last_name]] * \_Limit[1000[INTEGER],false] * \_EsRelation[test][_meta_field{f}#11, emp_no{f}#5, first_name{f}#6, ge..] */ public void testReassignedMissingFieldInProject() { var plan = plan(""" from test | keep last_name | eval last_name = "foo" """); var testStats = statsForMissingField("last_name"); var localPlan = localPlan(plan, testStats); var project = as(localPlan, Project.class); var projections = project.projections(); assertThat(Expressions.names(projections), contains("last_name")); as(projections.get(0), ReferenceAttribute.class); var eval = as(project.child(), Eval.class); assertThat(Expressions.names(eval.fields()), contains("last_name")); var alias = as(eval.fields().get(0), Alias.class); var literal = as(alias.child(), Literal.class); assertThat(literal.value(), is(new BytesRef("foo"))); assertThat(literal.dataType(), is(KEYWORD)); var limit = as(eval.child(), Limit.class); var source = as(limit.child(), EsRelation.class); assertThat(Expressions.names(source.output()), not(contains("last_name"))); } /** * Expects * EsqlProject[[first_name{f}#4]] * \_Limit[10000[INTEGER]] * \_EsRelation[test][_meta_field{f}#9, emp_no{f}#3, first_name{f}#4, !ge..] */ public void testMissingFieldInSort() { var plan = plan(""" from test | sort last_name | keep first_name """); var testStats = statsForMissingField("last_name"); var localPlan = localPlan(plan, testStats); var project = as(localPlan, Project.class); var projections = project.projections(); assertThat(Expressions.names(projections), contains("first_name")); var limit = as(project.child(), Limit.class); var source = as(limit.child(), EsRelation.class); assertThat(Expressions.names(source.output()), not(contains("last_name"))); } /** * Expects * EsqlProject[[first_name{f}#7, last_name{r}#17]] * \_Limit[1000[INTEGER],true] * \_MvExpand[last_name{f}#10,last_name{r}#17] * \_Project[[_meta_field{f}#12, emp_no{f}#6, first_name{f}#7, gender{f}#8, hire_date{f}#13, job{f}#14, job.raw{f}#15, lang * uages{f}#9, last_name{r}#10, long_noidx{f}#16, salary{f}#11]] * \_Eval[[null[KEYWORD] AS last_name]] * \_Limit[1000[INTEGER],false] * \_EsRelation[test][_meta_field{f}#12, emp_no{f}#6, first_name{f}#7, ge..] */ public void testMissingFieldInMvExpand() { var plan = plan(""" from test | mv_expand last_name | keep first_name, last_name """); var testStats = statsForMissingField("last_name"); var localPlan = localPlan(plan, testStats); // It'd be much better if this project was pushed down past the MvExpand, because MvExpand's cost scales with the number of // involved attributes/columns. var project = as(localPlan, EsqlProject.class); var projections = project.projections(); assertThat(Expressions.names(projections), contains("first_name", "last_name")); var limit1 = asLimit(project.child(), 1000, true); var mvExpand = as(limit1.child(), MvExpand.class); var project2 = as(mvExpand.child(), Project.class); var eval = as(project2.child(), Eval.class); assertEquals(eval.fields().size(), 1); var lastName = eval.fields().get(0); assertEquals(lastName.name(), "last_name"); assertEquals(lastName.child(), new Literal(EMPTY, null, KEYWORD)); var limit2 = asLimit(eval.child(), 1000, false); var relation = as(limit2.child(), EsRelation.class); assertThat(Expressions.names(relation.output()), not(contains("last_name"))); } public static
LocalLogicalPlanOptimizerTests
java
elastic__elasticsearch
x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunctionTests.java
{ "start": 736, "end": 1520 }
class ____ extends AggregatorFunctionTestCase { @Override protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { return new SequenceIntBlockSourceOperator(blockFactory, IntStream.range(0, size).map(l -> randomInt())); } @Override protected AggregatorFunctionSupplier aggregatorFunction() { return new MaxIntAggregatorFunctionSupplier(); } @Override protected String expectedDescriptionOfAggregator() { return "max of ints"; } @Override public void assertSimpleOutput(List<Page> input, Block result) { int max = input.stream().flatMapToInt(p -> allInts(p.getBlock(0))).max().getAsInt(); assertThat(((IntBlock) result).getInt(0), equalTo(max)); } }
MaxIntAggregatorFunctionTests
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/engine/jdbc/NClobImplementer.java
{ "start": 144, "end": 335 }
interface ____ non-contextually created {@link java.sql.NClob} instances. * <p> * {@link java.sql.NClob} is a new type introduced in JDK 1.6 (JDBC 4) * * @author Steve Ebersole */ public
for
java
micronaut-projects__micronaut-core
test-suite/src/test/java/io/micronaut/docs/ioc/mappers/SimpleMapperSpec.java
{ "start": 215, "end": 790 }
class ____ { @Test void testSimpleMappers() { try (ApplicationContext context = ApplicationContext.run(Collections.singletonMap("spec.name", "SimpleMapperSpec"))) { // tag::mappers[] ContactMappers contactMappers = context.getBean(ContactMappers.class); ContactEntity contactEntity = contactMappers.toEntity(new ContactForm("John", "Snow")); assertEquals("John", contactEntity.firstName()); assertEquals("Snow", contactEntity.lastName()); // end::mappers[] } } }
SimpleMapperSpec
java
spring-projects__spring-framework
spring-aop/src/main/java/org/springframework/aop/framework/AopProxyUtils.java
{ "start": 6129, "end": 6822 }
interface ____ the AdvisedSupport's * {@link AdvisedSupport#setOpaque "opaque"} flag is on. Always adds the * {@link org.springframework.aop.SpringProxy} marker interface. * @param advised the proxy config * @param decoratingProxy whether to expose the {@link DecoratingProxy} interface * @return the complete set of interfaces to proxy * @since 4.3 * @see SpringProxy * @see Advised * @see DecoratingProxy */ static Class<?>[] completeProxiedInterfaces(AdvisedSupport advised, boolean decoratingProxy) { Class<?>[] specifiedInterfaces = advised.getProxiedInterfaces(); if (specifiedInterfaces.length == 0) { // No user-specified interfaces: check whether target
unless
java
apache__hadoop
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/balancer/NameNodeConnector.java
{ "start": 3091, "end": 16859 }
class ____ implements Closeable { private static final Logger LOG = LoggerFactory.getLogger(NameNodeConnector.class); public static final int DEFAULT_MAX_IDLE_ITERATIONS = 5; private static boolean write2IdFile = true; private static boolean checkOtherInstanceRunning = true; /** Create {@link NameNodeConnector} for the given namenodes. */ public static List<NameNodeConnector> newNameNodeConnectors( Collection<URI> namenodes, String name, Path idPath, Configuration conf, int maxIdleIterations) throws IOException { final List<NameNodeConnector> connectors = new ArrayList<NameNodeConnector>( namenodes.size()); for (URI uri : namenodes) { NameNodeConnector nnc = new NameNodeConnector(name, uri, idPath, null, conf, maxIdleIterations); nnc.getKeyManager().startBlockKeyUpdater(); connectors.add(nnc); } return connectors; } public static List<NameNodeConnector> newNameNodeConnectors( Map<URI, List<Path>> namenodes, String name, Path idPath, Configuration conf, int maxIdleIterations) throws IOException { final List<NameNodeConnector> connectors = new ArrayList<NameNodeConnector>( namenodes.size()); for (Map.Entry<URI, List<Path>> entry : namenodes.entrySet()) { NameNodeConnector nnc = new NameNodeConnector(name, entry.getKey(), idPath, entry.getValue(), conf, maxIdleIterations); nnc.getKeyManager().startBlockKeyUpdater(); connectors.add(nnc); } return connectors; } public static List<NameNodeConnector> newNameNodeConnectors( Collection<URI> namenodes, Collection<String> nsIds, String name, Path idPath, Configuration conf, int maxIdleIterations) throws IOException { final List<NameNodeConnector> connectors = new ArrayList<NameNodeConnector>( namenodes.size()); Map<URI, String> uriToNsId = new HashMap<>(); if (nsIds != null) { for (URI uri : namenodes) { for (String nsId : nsIds) { if (uri.getAuthority().equals(nsId)) { uriToNsId.put(uri, nsId); } } } } for (URI uri : namenodes) { String nsId = uriToNsId.get(uri); NameNodeConnector nnc = new NameNodeConnector(name, uri, nsId, idPath, null, conf, maxIdleIterations); nnc.getKeyManager().startBlockKeyUpdater(); connectors.add(nnc); } return connectors; } @VisibleForTesting public static void setWrite2IdFile(boolean write2IdFile) { NameNodeConnector.write2IdFile = write2IdFile; } @VisibleForTesting public static void checkOtherInstanceRunning(boolean toCheck) { NameNodeConnector.checkOtherInstanceRunning = toCheck; } private final URI nameNodeUri; private final String blockpoolID; private final BalancerProtocols namenode; /** * If set getBlocksToStandby true, Balancer will getBlocks from * Standby NameNode only and it can reduce the performance impact of Active * NameNode, especially in a busy HA mode cluster. */ private boolean getBlocksToStandby; private String nsId; private Configuration config; private final KeyManager keyManager; final AtomicBoolean fallbackToSimpleAuth = new AtomicBoolean(false); private final DistributedFileSystem fs; private final Path idPath; private OutputStream out; private final List<Path> targetPaths; private final AtomicLong bytesMoved = new AtomicLong(); private final AtomicLong blocksMoved = new AtomicLong(); private final AtomicLong blocksFailed = new AtomicLong(); private final int maxNotChangedIterations; private int notChangedIterations = 0; private final RateLimiter getBlocksRateLimiter; public NameNodeConnector(String name, URI nameNodeUri, Path idPath, List<Path> targetPaths, Configuration conf, int maxNotChangedIterations) throws IOException { this.nameNodeUri = nameNodeUri; this.idPath = idPath; this.targetPaths = targetPaths == null || targetPaths.isEmpty() ? Arrays .asList(new Path("/")) : targetPaths; this.maxNotChangedIterations = maxNotChangedIterations; int getBlocksMaxQps = conf.getInt( DFSConfigKeys.DFS_NAMENODE_GETBLOCKS_MAX_QPS_KEY, DFSConfigKeys.DFS_NAMENODE_GETBLOCKS_MAX_QPS_DEFAULT); if (getBlocksMaxQps > 0) { LOG.info("getBlocks calls for {} will be rate-limited to {} per second", nameNodeUri, getBlocksMaxQps); this.getBlocksRateLimiter = RateLimiter.create(getBlocksMaxQps); } else { this.getBlocksRateLimiter = null; } this.namenode = NameNodeProxies.createProxy(conf, nameNodeUri, BalancerProtocols.class, fallbackToSimpleAuth).getProxy(); this.getBlocksToStandby = !conf.getBoolean( DFSConfigKeys.DFS_NAMENODE_GETBLOCKS_CHECK_OPERATION_KEY, DFSConfigKeys.DFS_NAMENODE_GETBLOCKS_CHECK_OPERATION_DEFAULT); this.config = conf; this.fs = (DistributedFileSystem)FileSystem.get(nameNodeUri, conf); final NamespaceInfo namespaceinfo = namenode.versionRequest(); this.blockpoolID = namespaceinfo.getBlockPoolID(); final FsServerDefaults defaults = fs.getServerDefaults(new Path("/")); this.keyManager = new KeyManager(blockpoolID, namenode, defaults.getEncryptDataTransfer(), conf); // if it is for test, we do not create the id file if (checkOtherInstanceRunning) { out = checkAndMarkRunning(); if (out == null) { // Exit if there is another one running. throw new IOException("Another " + name + " is running."); } } } public NameNodeConnector(String name, URI nameNodeUri, String nsId, Path idPath, List<Path> targetPaths, Configuration conf, int maxNotChangedIterations) throws IOException { this(name, nameNodeUri, idPath, targetPaths, conf, maxNotChangedIterations); this.nsId = nsId; } public DistributedFileSystem getDistributedFileSystem() { return fs; } /** @return the block pool ID */ public String getBlockpoolID() { return blockpoolID; } public AtomicLong getBytesMoved() { return bytesMoved; } public AtomicLong getBlocksMoved() { return blocksMoved; } public AtomicLong getBlocksFailed() { return blocksFailed; } public void addBytesMoved(long numBytes) { bytesMoved.addAndGet(numBytes); blocksMoved.incrementAndGet(); } public URI getNameNodeUri() { return nameNodeUri; } /** @return blocks with locations. */ public BlocksWithLocations getBlocks(DatanodeInfo datanode, long size, long minBlockSize, long timeInterval, StorageType storageType) throws IOException { if (getBlocksRateLimiter != null) { getBlocksRateLimiter.acquire(); } boolean isRequestStandby = false; NamenodeProtocol nnProxy = null; InetSocketAddress standbyAddress = null; try { ProxyPair proxyPair = getProxy(); isRequestStandby = proxyPair.isRequestStandby; ClientProtocol proxy = proxyPair.clientProtocol; if (isRequestStandby) { standbyAddress = RPC.getServerAddress(proxy); nnProxy = NameNodeProxies.createNonHAProxy( config, standbyAddress, NamenodeProtocol.class, UserGroupInformation.getCurrentUser(), false).getProxy(); } else { nnProxy = namenode; } return nnProxy.getBlocks(datanode, size, minBlockSize, timeInterval, storageType); } finally { if (isRequestStandby) { LOG.info("Request #getBlocks to Standby NameNode success. " + "remoteAddress: {}", standbyAddress.getHostString()); } } } /** * @return true if an upgrade is in progress, false if not. * @throws IOException */ public boolean isUpgrading() throws IOException { // fsimage upgrade final boolean isUpgrade = !namenode.isUpgradeFinalized(); // rolling upgrade RollingUpgradeInfo info = fs.rollingUpgrade( HdfsConstants.RollingUpgradeAction.QUERY); final boolean isRollingUpgrade = (info != null && !info.isFinalized()); return (isUpgrade || isRollingUpgrade); } /** @return live datanode storage reports. */ public DatanodeStorageReport[] getLiveDatanodeStorageReport() throws IOException { boolean isRequestStandby = false; InetSocketAddress standbyAddress = null; try { ProxyPair proxyPair = getProxy(); isRequestStandby = proxyPair.isRequestStandby; ClientProtocol proxy = proxyPair.clientProtocol; if (isRequestStandby) { standbyAddress = RPC.getServerAddress(proxy); } return proxy.getDatanodeStorageReport(DatanodeReportType.LIVE); } finally { if (isRequestStandby) { LOG.info("Request #getLiveDatanodeStorageReport to Standby " + "NameNode success. remoteAddress: {}", standbyAddress.getHostString()); } } } /** * get the proxy. * @return ProxyPair(clientProtocol and isRequestStandby) * @throws IOException */ private ProxyPair getProxy() throws IOException { boolean isRequestStandby = false; ClientProtocol clientProtocol = null; if (getBlocksToStandby && nsId != null && HAUtil.isHAEnabled(config, nsId)) { List<ClientProtocol> namenodes = HAUtil.getProxiesForAllNameNodesInNameservice(config, nsId); for (ClientProtocol proxy : namenodes) { try { if (proxy.getHAServiceState().equals( HAServiceProtocol.HAServiceState.STANDBY)) { clientProtocol = proxy; isRequestStandby = true; break; } } catch (Exception e) { // Ignore the exception while connecting to a namenode. LOG.debug("Error while connecting to namenode", e); } } if (clientProtocol == null) { LOG.warn("Request to Standby" + " NameNode but meet exception, will fallback to normal way."); clientProtocol = namenode; } } else { clientProtocol = namenode; } return new ProxyPair(clientProtocol, isRequestStandby); } /** @return the key manager */ public KeyManager getKeyManager() { return keyManager; } /** @return the list of paths to scan/migrate */ public List<Path> getTargetPaths() { return targetPaths; } /** Should the instance continue running? */ public boolean shouldContinue(long dispatchBlockMoveBytes) { if (dispatchBlockMoveBytes > 0) { notChangedIterations = 0; } else { notChangedIterations++; if (LOG.isDebugEnabled()) { LOG.debug("No block has been moved for " + notChangedIterations + " iterations, " + "maximum notChangedIterations before exit is: " + ((maxNotChangedIterations >= 0) ? maxNotChangedIterations : "Infinite")); } if ((maxNotChangedIterations >= 0) && (notChangedIterations >= maxNotChangedIterations)) { System.out.println("No block has been moved for " + notChangedIterations + " iterations. Exiting..."); return false; } } return true; } /** * The idea for making sure that there is no more than one instance * running in an HDFS is to create a file in the HDFS, writes the hostname * of the machine on which the instance is running to the file, but did not * close the file until it exits. * * This prevents the second instance from running because it can not * creates the file while the first one is running. * * This method checks if there is any running instance. If no, mark yes. * Note that this is an atomic operation. * * @return null if there is a running instance; * otherwise, the output stream to the newly created file. */ private OutputStream checkAndMarkRunning() throws IOException { try { if (fs.exists(idPath)) { // try appending to it so that it will fail fast if another balancer is // running. IOUtils.closeStream(fs.append(idPath)); fs.delete(idPath, true); } final FSDataOutputStream fsout = fs.createFile(idPath) .replicate().recursive().build(); Preconditions.checkState( fsout.hasCapability(StreamCapability.HFLUSH.getValue()) && fsout.hasCapability(StreamCapability.HSYNC.getValue()), "Id lock file should support hflush and hsync"); // mark balancer idPath to be deleted during filesystem closure fs.deleteOnExit(idPath); if (write2IdFile) { fsout.writeBytes(InetAddress.getLocalHost().getHostName()); fsout.hflush(); } return fsout; } catch(RemoteException e) { if(AlreadyBeingCreatedException.class.getName().equals(e.getClassName())){ return null; } else { throw e; } } } /** * Returns fallbackToSimpleAuth. This will be true or false during calls to * indicate if a secure client falls back to simple auth. */ public AtomicBoolean getFallbackToSimpleAuth() { return fallbackToSimpleAuth; } @Override public void close() { keyManager.close(); // close the output file IOUtils.closeStream(out); if (fs != null) { try { if (checkOtherInstanceRunning) { fs.delete(idPath, true); } } catch(IOException ioe) { LOG.warn("Failed to delete " + idPath, ioe); } } } public NamenodeProtocol getNNProtocolConnection() { return this.namenode; } @Override public String toString() { return getClass().getSimpleName() + "[namenodeUri=" + nameNodeUri + ", bpid=" + blockpoolID + "]"; } private static
NameNodeConnector
java
apache__camel
components/camel-as2/camel-as2-component/src/generated/java/org/apache/camel/component/as2/AS2ComponentConfigurer.java
{ "start": 730, "end": 4555 }
class ____ extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, ExtendedPropertyConfigurerGetter { private static final Map<String, Object> ALL_OPTIONS; static { Map<String, Object> map = new CaseInsensitiveMap(); map.put("BridgeErrorHandler", boolean.class); map.put("LazyStartProducer", boolean.class); map.put("AutowiredEnabled", boolean.class); map.put("Configuration", org.apache.camel.component.as2.AS2Configuration.class); map.put("SslContextParameters", org.apache.camel.support.jsse.SSLContextParameters.class); map.put("UseGlobalSslContextParameters", boolean.class); ALL_OPTIONS = map; } @Override public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) { AS2Component target = (AS2Component) obj; switch (ignoreCase ? name.toLowerCase() : name) { case "autowiredenabled": case "autowiredEnabled": target.setAutowiredEnabled(property(camelContext, boolean.class, value)); return true; case "bridgeerrorhandler": case "bridgeErrorHandler": target.setBridgeErrorHandler(property(camelContext, boolean.class, value)); return true; case "configuration": target.setConfiguration(property(camelContext, org.apache.camel.component.as2.AS2Configuration.class, value)); return true; case "lazystartproducer": case "lazyStartProducer": target.setLazyStartProducer(property(camelContext, boolean.class, value)); return true; case "sslcontextparameters": case "sslContextParameters": target.setSslContextParameters(property(camelContext, org.apache.camel.support.jsse.SSLContextParameters.class, value)); return true; case "useglobalsslcontextparameters": case "useGlobalSslContextParameters": target.setUseGlobalSslContextParameters(property(camelContext, boolean.class, value)); return true; default: return false; } } @Override public Map<String, Object> getAllOptions(Object target) { return ALL_OPTIONS; } @Override public Class<?> getOptionType(String name, boolean ignoreCase) { switch (ignoreCase ? name.toLowerCase() : name) { case "autowiredenabled": case "autowiredEnabled": return boolean.class; case "bridgeerrorhandler": case "bridgeErrorHandler": return boolean.class; case "configuration": return org.apache.camel.component.as2.AS2Configuration.class; case "lazystartproducer": case "lazyStartProducer": return boolean.class; case "sslcontextparameters": case "sslContextParameters": return org.apache.camel.support.jsse.SSLContextParameters.class; case "useglobalsslcontextparameters": case "useGlobalSslContextParameters": return boolean.class; default: return null; } } @Override public Object getOptionValue(Object obj, String name, boolean ignoreCase) { AS2Component target = (AS2Component) obj; switch (ignoreCase ? name.toLowerCase() : name) { case "autowiredenabled": case "autowiredEnabled": return target.isAutowiredEnabled(); case "bridgeerrorhandler": case "bridgeErrorHandler": return target.isBridgeErrorHandler(); case "configuration": return target.getConfiguration(); case "lazystartproducer": case "lazyStartProducer": return target.isLazyStartProducer(); case "sslcontextparameters": case "sslContextParameters": return target.getSslContextParameters(); case "useglobalsslcontextparameters": case "useGlobalSslContextParameters": return target.isUseGlobalSslContextParameters(); default: return null; } } }
AS2ComponentConfigurer
java
alibaba__nacos
config/src/main/java/com/alibaba/nacos/config/server/model/NacosConfigCachePostProcessor.java
{ "start": 755, "end": 1006 }
class ____ implements ConfigCachePostProcessor { @Override public String getName() { return "nacos"; } @Override public void postProcess(ConfigCache configCache, String content) { } }
NacosConfigCachePostProcessor
java
hibernate__hibernate-orm
hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/accesstype/AttributeAccessorTest.java
{ "start": 2230, "end": 2571 }
class ____ extends PropertyAccessStrategyBasicImpl { static boolean invoked; @Override public PropertyAccess buildPropertyAccess(Class containerJavaType, String propertyName, boolean setterRequired) { invoked = true; return super.buildPropertyAccess( containerJavaType, propertyName, setterRequired ); } } }
BasicAttributeAccessor
java
alibaba__druid
core/src/test/java/com/alibaba/druid/bvt/sql/oracle/OracleSchemaStatVisitorTest.java
{ "start": 1157, "end": 2935 }
class ____ extends OracleTest { public void test_0() throws Exception { String sql = "SELECT id, name name from department d" + " WHERE d.id = ? order by name desc"; OracleStatementParser parser = new OracleStatementParser(sql); List<SQLStatement> statementList = parser.parseStatementList(); SQLStatement statemen = statementList.get(0); print(statementList); assertEquals(1, statementList.size()); List<Object> parameters = new ArrayList<Object>(); parameters.add(23456); OracleSchemaStatVisitor visitor = new OracleSchemaStatVisitor(); visitor.setParameters(parameters); statemen.accept(visitor); System.out.println("Tables : " + visitor.getTables()); System.out.println("fields : " + visitor.getColumns()); System.out.println("coditions : " + visitor.getConditions()); System.out.println("relationships : " + visitor.getRelationships()); assertEquals(1, visitor.getTables().size()); assertTrue(visitor.getTables().containsKey(new TableStat.Name("department"))); assertEquals(2, visitor.getColumns().size()); assertTrue(visitor.getColumns().contains(new TableStat.Column("department", "id"))); assertTrue(visitor.getColumns().contains(new TableStat.Column("department", "name"))); assertEquals(1, visitor.getConditions().size()); Condition condition = visitor.getConditions().get(0); assertSame(parameters.get(0), condition.getValues().get(0)); Column orderByColumn = visitor.getOrderByColumns().iterator().next(); assertEquals(SQLOrderingSpecification.DESC, orderByColumn.getAttributes().get("orderBy.type")); } }
OracleSchemaStatVisitorTest
java
spring-projects__spring-security
oauth2/oauth2-authorization-server/src/main/java/org/springframework/security/oauth2/server/authorization/jackson2/HashSetMixin.java
{ "start": 877, "end": 1106 }
class ____ used to serialize/deserialize {@link HashSet}. * * @author Steve Riesenberg * @since 7.0 * @see HashSet * @deprecated as of 7.0 */ @JsonTypeInfo(use = JsonTypeInfo.Id.CLASS) @Deprecated(forRemoval = true) abstract
is
java
ReactiveX__RxJava
src/main/java/io/reactivex/rxjava3/internal/operators/observable/ObservableLift.java
{ "start": 1179, "end": 2398 }
class ____<R, T> extends AbstractObservableWithUpstream<T, R> { /** The actual operator. */ final ObservableOperator<? extends R, ? super T> operator; public ObservableLift(ObservableSource<T> source, ObservableOperator<? extends R, ? super T> operator) { super(source); this.operator = operator; } @Override public void subscribeActual(Observer<? super R> observer) { Observer<? super T> liftedObserver; try { liftedObserver = Objects.requireNonNull(operator.apply(observer), "Operator " + operator + " returned a null Observer"); } catch (NullPointerException e) { // NOPMD throw e; } catch (Throwable e) { Exceptions.throwIfFatal(e); // can't call onError because no way to know if a Disposable has been set or not // can't call onSubscribe because the call might have set a Disposable already RxJavaPlugins.onError(e); NullPointerException npe = new NullPointerException("Actually not, but can't throw other exceptions due to RS"); npe.initCause(e); throw npe; } source.subscribe(liftedObserver); } }
ObservableLift
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/engine/spi/ExecuteUpdateResultCheckStyle.java
{ "start": 859, "end": 2987 }
enum ____ { /** * Do not perform checking. Either user simply does not want checking, or is * indicating a {@link java.sql.CallableStatement} execution in which the * checks are being performed explicitly and failures are handled through * propagation of {@link java.sql.SQLException}s. */ NONE, /** * Perform row count checking. Row counts are the int values returned by both * {@link java.sql.PreparedStatement#executeUpdate()} and * {@link java.sql.Statement#executeBatch()}. These values are checked * against some expected count. */ COUNT, /** * Essentially the same as {@link #COUNT} except that the row count actually * comes from an output parameter registered as part of a * {@link java.sql.CallableStatement}. This style explicitly prohibits * statement batching from being used... */ PARAM; public String externalName() { return switch ( this ) { case NONE -> "none"; case COUNT -> "rowcount"; case PARAM -> "param"; }; } public static ExecuteUpdateResultCheckStyle fromResultCheckStyle(ResultCheckStyle style) { return switch ( style ) { case NONE -> NONE; case COUNT -> COUNT; case PARAM -> PARAM; }; } public static @Nullable ExecuteUpdateResultCheckStyle fromExternalName(String name) { for ( ExecuteUpdateResultCheckStyle style : values() ) { if ( style.externalName().equalsIgnoreCase(name) ) { return style; } } return null; } public static @Nullable Supplier<? extends Expectation> expectationConstructor( @Nullable ExecuteUpdateResultCheckStyle style) { return style == null ? null : style.expectationConstructor(); } public Supplier<? extends Expectation> expectationConstructor() { return switch ( this ) { case NONE -> Expectation.None::new; case COUNT -> Expectation.RowCount::new; case PARAM -> Expectation.OutParameter::new; }; } public Class<? extends Expectation> expectationClass() { return switch ( this ) { case NONE -> Expectation.None.class; case COUNT -> Expectation.RowCount.class; case PARAM -> Expectation.OutParameter.class; }; } }
ExecuteUpdateResultCheckStyle
java
apache__camel
components/camel-minio/src/main/java/org/apache/camel/component/minio/MinioConsumer.java
{ "start": 2282, "end": 19278 }
class ____ extends ScheduledBatchPollingConsumer { private static final Logger LOG = LoggerFactory.getLogger(MinioConsumer.class); private long totalCounter; private String continuationToken; private transient String minioConsumerToString; public MinioConsumer(MinioEndpoint endpoint, Processor processor) { super(endpoint, processor); } @Override protected void doStart() throws Exception { super.doStart(); if (getConfiguration().isMoveAfterRead()) { String destinationBucketName = getConfiguration().getDestinationBucketName(); if (isNotEmpty(destinationBucketName)) { if (bucketExists(destinationBucketName)) { LOG.trace("Bucket {} already exists", destinationBucketName); } else { LOG.trace("Destination Bucket {} doesn't exist yet", destinationBucketName); if (getConfiguration().isAutoCreateBucket()) { // creates the new bucket because it doesn't exist yet LOG.trace("Creating Destination bucket {}...", destinationBucketName); makeBucket(destinationBucketName); LOG.trace("Destination Bucket created"); } else { throw new IllegalArgumentException( "Destination Bucket does not exist, set autoCreateBucket option for bucket auto creation"); } } } else { LOG.warn("invalid destinationBucketName found: {}", destinationBucketName); } } } private boolean bucketExists(String bucketName) throws Exception { return getMinioClient().bucketExists(BucketExistsArgs.builder().bucket(bucketName).build()); } private void makeBucket(String bucketName) throws Exception { MakeBucketArgs.Builder makeBucketRequest = MakeBucketArgs.builder().bucket(bucketName).objectLock(getConfiguration().isObjectLock()); if (isNotEmpty(getConfiguration().getRegion())) { makeBucketRequest.region(getConfiguration().getRegion()); } getMinioClient().makeBucket(makeBucketRequest.build()); } @Override protected int poll() throws Exception { // must reset for each poll shutdownRunningTask = null; pendingExchanges = 0; String bucketName = getConfiguration().getBucketName(); String objectName = getConfiguration().getObjectName(); Deque<Exchange> exchanges; if (isNotEmpty(objectName)) { LOG.trace("Getting object in bucket {} with object name {}...", bucketName, objectName); exchanges = createExchanges(objectName); return processBatch(CastUtils.cast(exchanges)); } else { LOG.trace("Queueing objects in bucket {}...", bucketName); ListObjectsArgs.Builder listObjectRequest = ListObjectsArgs.builder() .bucket(bucketName) .includeUserMetadata(getConfiguration().isIncludeUserMetadata()) .includeVersions(getConfiguration().isIncludeVersions()) .recursive(getConfiguration().isRecursive()) .useApiVersion1(getConfiguration().isUseVersion1()); if (isNotEmpty(getConfiguration().getDelimiter())) { listObjectRequest.delimiter(getConfiguration().getDelimiter()); } if (maxMessagesPerPoll > 0) { listObjectRequest.maxKeys(maxMessagesPerPoll); } if (isNotEmpty(getConfiguration().getPrefix())) { listObjectRequest.prefix(getConfiguration().getPrefix()); } if (isNotEmpty(getConfiguration().getStartAfter())) { listObjectRequest.startAfter(getConfiguration().getStartAfter()); continuationToken = null; } // if there was a marker from previous poll then use that to // continue from where we left last time if (isNotEmpty(continuationToken)) { LOG.trace("Resuming from marker: {}", continuationToken); listObjectRequest.startAfter(continuationToken); } Iterator<Result<Item>> listObjects = getMinioClient().listObjects(listObjectRequest.build()).iterator(); // we have listed some objects so mark the consumer as ready forceConsumerAsReady(); if (listObjects.hasNext()) { exchanges = createExchanges(listObjects); if (maxMessagesPerPoll <= 0 || exchanges.size() < maxMessagesPerPoll) { continuationToken = null; } else { continuationToken = exchanges.getLast().getIn().getHeader(MinioConstants.OBJECT_NAME, String.class); } if (LOG.isTraceEnabled()) { LOG.trace("Found {} objects in bucket {}...", totalCounter, bucketName); } return processBatch(CastUtils.cast(exchanges)); } else { // no more data so clear marker continuationToken = null; return 0; } } } protected Deque<Exchange> createExchanges(String objectName) throws Exception { Deque<Exchange> answer = new LinkedList<>(); Exchange exchange = createExchange(objectName); answer.add(exchange); return answer; } protected Deque<Exchange> createExchanges(Iterator<Result<Item>> minioObjectSummaries) throws Exception { int messageCounter = 0; Deque<Exchange> answer = new LinkedList<>(); try { if (getConfiguration().isIncludeFolders()) { do { messageCounter++; Item minioObjectSummary = minioObjectSummaries.next().get(); Exchange exchange = createExchange(minioObjectSummary.objectName()); answer.add(exchange); } while (minioObjectSummaries.hasNext()); } else { do { messageCounter++; Item minioObjectSummary = minioObjectSummaries.next().get(); // ignore if directory if (!minioObjectSummary.isDir()) { Exchange exchange = createExchange(minioObjectSummary.objectName()); answer.add(exchange); } } while (minioObjectSummaries.hasNext()); } if (LOG.isTraceEnabled()) { LOG.trace("Received {} messages in this poll", messageCounter); totalCounter += messageCounter; } } catch (Exception e) { LOG.warn("Error getting MinioObject due: {}", e.getMessage()); throw e; } return answer; } private InputStream getObject(String bucketName, MinioClient minioClient, String objectName) throws Exception { GetObjectArgs.Builder getObjectRequest = GetObjectArgs.builder().bucket(bucketName).object(objectName); MinioChecks.checkIfConfigIsNotEmptyAndSetAndConfig(getConfiguration()::getServerSideEncryptionCustomerKey, getObjectRequest::ssec); MinioChecks.checkLengthAndSetConfig(getConfiguration()::getOffset, getObjectRequest::offset); MinioChecks.checkLengthAndSetConfig(getConfiguration()::getLength, getObjectRequest::length); MinioChecks.checkIfConfigIsNotEmptyAndSetAndConfig(getConfiguration()::getVersionId, getObjectRequest::versionId); MinioChecks.checkIfConfigIsNotEmptyAndSetAndConfig(getConfiguration()::getMatchETag, getObjectRequest::matchETag); MinioChecks.checkIfConfigIsNotEmptyAndSetAndConfig(getConfiguration()::getNotMatchETag, getObjectRequest::notMatchETag); MinioChecks.checkIfConfigIsNotEmptyAndSetAndConfig(getConfiguration()::getModifiedSince, getObjectRequest::modifiedSince); MinioChecks.checkIfConfigIsNotEmptyAndSetAndConfig(getConfiguration()::getUnModifiedSince, getObjectRequest::unmodifiedSince); return minioClient.getObject(getObjectRequest.build()); } @Override public int processBatch(Queue<Object> exchanges) throws Exception { int total = exchanges.size(); for (int index = 0; index < total && isBatchAllowed(); index++) { // only loop if we are started (allowed to run) final Exchange exchange = cast(Exchange.class, exchanges.poll()); // add current index and total as properties exchange.setProperty(ExchangePropertyKey.BATCH_INDEX, index); exchange.setProperty(ExchangePropertyKey.BATCH_SIZE, total); exchange.setProperty(ExchangePropertyKey.BATCH_COMPLETE, index == total - 1); // update pending number of exchanges pendingExchanges = total - index - 1; String srcBucketName = exchange.getIn().getHeader(MinioConstants.BUCKET_NAME, String.class); String srcObjectName = exchange.getIn().getHeader(MinioConstants.OBJECT_NAME, String.class); if (getConfiguration().isIncludeBody()) { InputStream minioObject; try { minioObject = getObject(srcBucketName, getMinioClient(), srcObjectName); exchange.getIn().setBody(IOUtils.toByteArray(minioObject)); if (getConfiguration().isAutoCloseBody()) { exchange.getExchangeExtension().addOnCompletion(new SynchronizationAdapter() { @Override public void onDone(Exchange exchange) { IOHelper.close(minioObject); } }); } } catch (Exception e) { LOG.warn("Error getting MinioObject due: {}", e.getMessage()); throw e; } } // add on completion to handle after work when the exchange is done exchange.getExchangeExtension().addOnCompletion(new Synchronization() { public void onComplete(Exchange exchange) { processCommit(exchange); } public void onFailure(Exchange exchange) { processRollback(exchange); } @Override public String toString() { return "MinioConsumerOnCompletion"; } }); getAsyncProcessor().process(exchange, EmptyAsyncCallback.get()); } return total; } /** * Strategy to delete the message after being processed. * * @param exchange the exchange */ protected void processCommit(Exchange exchange) { try { String srcBucketName = exchange.getIn().getHeader(MinioConstants.BUCKET_NAME, String.class); String srcObjectName = exchange.getIn().getHeader(MinioConstants.OBJECT_NAME, String.class); if (getConfiguration().isDeleteAfterRead() || getConfiguration().isMoveAfterRead()) { if (getConfiguration().isMoveAfterRead()) { copyObject(srcBucketName, srcObjectName); LOG.trace("Copied object from bucket {} with objectName {} to bucket {}...", srcBucketName, srcObjectName, getConfiguration().getDestinationBucketName()); } LOG.trace("Deleting object from bucket {} with objectName {}...", srcBucketName, srcObjectName); removeObject(srcBucketName, srcObjectName); LOG.trace("Deleted object from bucket {} with objectName {}...", srcBucketName, srcObjectName); } } catch (MinioException | NoSuchAlgorithmException | InvalidKeyException | IOException e) { getExceptionHandler().handleException("Error occurred during moving or deleting object. This exception is ignored.", exchange, e); } } private void removeObject(String srcBucketName, String srcObjectName) throws MinioException, IOException, InvalidKeyException, NoSuchAlgorithmException { RemoveObjectArgs.Builder removeObjectRequest = RemoveObjectArgs.builder() .bucket(srcBucketName) .object(srcObjectName) .bypassGovernanceMode(getConfiguration().isBypassGovernanceMode()); if (isNotEmpty(getConfiguration().getVersionId())) { removeObjectRequest.versionId(getConfiguration().getVersionId()); } getMinioClient().removeObject(removeObjectRequest.build()); } private void copyObject(String srcBucketName, String srcObjectName) throws MinioException, IOException, InvalidKeyException, NoSuchAlgorithmException { String destinationBucketName = getConfiguration().getDestinationBucketName(); if (isEmpty(destinationBucketName)) { throw new IllegalArgumentException("Destination Bucket name must be specified to copy operation"); } // set destination object name as source object name, if not specified String destinationObjectName = (isNotEmpty(getConfiguration().getDestinationObjectName())) ? getConfiguration().getDestinationObjectName() : srcObjectName; LOG.trace("Copying object from bucket {} with objectName {} to bucket {}...", srcBucketName, srcObjectName, destinationBucketName); CopySource.Builder copySourceBuilder = CopySource.builder().bucket(srcBucketName).object(srcObjectName); MinioChecks.checkIfConfigIsNotEmptyAndSetAndConfig(getConfiguration()::getServerSideEncryptionCustomerKey, copySourceBuilder::ssec); MinioChecks.checkLengthAndSetConfig(getConfiguration()::getOffset, copySourceBuilder::offset); MinioChecks.checkLengthAndSetConfig(getConfiguration()::getLength, copySourceBuilder::length); MinioChecks.checkIfConfigIsNotEmptyAndSetAndConfig(getConfiguration()::getVersionId, copySourceBuilder::versionId); MinioChecks.checkIfConfigIsNotEmptyAndSetAndConfig(getConfiguration()::getMatchETag, copySourceBuilder::matchETag); MinioChecks.checkIfConfigIsNotEmptyAndSetAndConfig(getConfiguration()::getNotMatchETag, copySourceBuilder::notMatchETag); MinioChecks.checkIfConfigIsNotEmptyAndSetAndConfig(getConfiguration()::getModifiedSince, copySourceBuilder::modifiedSince); MinioChecks.checkIfConfigIsNotEmptyAndSetAndConfig(getConfiguration()::getUnModifiedSince, copySourceBuilder::unmodifiedSince); CopyObjectArgs.Builder copyObjectRequest = CopyObjectArgs.builder() .source(copySourceBuilder.build()) .bucket(getConfiguration().getDestinationBucketName()) .object(destinationObjectName); MinioChecks.checkIfConfigIsNotEmptyAndSetAndConfig(getConfiguration()::getServerSideEncryption, copyObjectRequest::sse); getMinioClient().copyObject(copyObjectRequest.build()); } /** * Strategy when processing the exchange failed. * * @param exchange the exchange */ protected void processRollback(Exchange exchange) { Exception cause = exchange.getException(); if (isNotEmpty(cause)) { LOG.warn("Exchange failed, so rolling back message status: {}", exchange, cause); } else { LOG.warn("Exchange failed, so rolling back message status: {}", exchange); } } protected MinioConfiguration getConfiguration() { return getEndpoint().getConfiguration(); } protected MinioClient getMinioClient() { return getEndpoint().getMinioClient(); } @Override public MinioEndpoint getEndpoint() { return (MinioEndpoint) super.getEndpoint(); } private Exchange createExchange(String objectName) throws Exception { LOG.trace("Getting object with objectName {} from bucket {}...", objectName, getConfiguration().getBucketName()); Exchange exchange = createExchange(true); exchange.setPattern(getEndpoint().getExchangePattern()); Message message = exchange.getIn(); LOG.trace("Got object!"); getEndpoint().getObjectStat(objectName, message); return exchange; } @Override public String toString() { if (isEmpty(minioConsumerToString)) { minioConsumerToString = "MinioConsumer[" + URISupport.sanitizeUri(getEndpoint().getEndpointUri()) + "]"; } return minioConsumerToString; } }
MinioConsumer
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/loader/NonUniqueDiscoveredSqlAliasException.java
{ "start": 523, "end": 683 }
class ____ extends HibernateException { public NonUniqueDiscoveredSqlAliasException(String message) { super( message ); } }
NonUniqueDiscoveredSqlAliasException
java
elastic__elasticsearch
server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketTests.java
{ "start": 973, "end": 2933 }
class ____ extends AbstractBucketMetricsTestCase<AvgBucketPipelineAggregationBuilder> { @Override protected AvgBucketPipelineAggregationBuilder doCreateTestAggregatorFactory(String name, String bucketsPath) { return new AvgBucketPipelineAggregationBuilder(name, bucketsPath); } public void testValidate() { AggregationBuilder singleBucketAgg = new GlobalAggregationBuilder("global"); AggregationBuilder multiBucketAgg = new TermsAggregationBuilder("terms").userValueTypeHint(ValueType.STRING); final Set<AggregationBuilder> aggBuilders = new HashSet<>(); aggBuilders.add(singleBucketAgg); aggBuilders.add(multiBucketAgg); // First try to point to a non-existent agg assertThat( validate(aggBuilders, new AvgBucketPipelineAggregationBuilder("name", "invalid_agg>metric")), equalTo( "Validation Failed: 1: " + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() + " aggregation does not exist for aggregation [name]: invalid_agg>metric;" ) ); // Now try to point to a single bucket agg assertThat( validate(aggBuilders, new AvgBucketPipelineAggregationBuilder("name", "global>metric")), equalTo( "Validation Failed: 1: Unable to find unqualified multi-bucket aggregation in " + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() + ". Path must include a multi-bucket aggregation for aggregation [name] found :" + GlobalAggregationBuilder.class.getName() + " for buckets path: global>metric;" ) ); // Now try to point to a valid multi-bucket agg which is valid assertThat(validate(aggBuilders, new AvgBucketPipelineAggregationBuilder("name", "terms>metric")), nullValue()); } }
AvgBucketTests
java
apache__hadoop
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMROutputFormat.java
{ "start": 3904, "end": 5750 }
class ____ extends OutputFormat<IntWritable, IntWritable> implements Configurable { public static final String TEST_CONFIG_NAME = "mapred.test.jobsubmission"; private Configuration conf; @Override public void checkOutputSpecs(JobContext context) throws IOException, InterruptedException { conf.setBoolean(TEST_CONFIG_NAME, true); } @Override public OutputCommitter getOutputCommitter(TaskAttemptContext context) throws IOException, InterruptedException { return new OutputCommitter() { @Override public void abortTask(TaskAttemptContext taskContext) throws IOException { } @Override public void commitTask(TaskAttemptContext taskContext) throws IOException { } @Override public boolean needsTaskCommit(TaskAttemptContext taskContext) throws IOException { return false; } @Override public void setupJob(JobContext jobContext) throws IOException { } @Override public void setupTask(TaskAttemptContext taskContext) throws IOException { } }; } @Override public RecordWriter<IntWritable, IntWritable> getRecordWriter( TaskAttemptContext context) throws IOException, InterruptedException { assertTrue(context.getConfiguration().getBoolean(TEST_CONFIG_NAME, false)); return new RecordWriter<IntWritable, IntWritable>() { @Override public void close(TaskAttemptContext context) throws IOException, InterruptedException { } @Override public void write(IntWritable key, IntWritable value) throws IOException, InterruptedException { } }; } @Override public Configuration getConf() { return conf; } @Override public void setConf(Configuration conf) { this.conf = conf; } }
TestOutputFormat
java
alibaba__fastjson
src/test/java/com/alibaba/json/bvt/issue_1100/Issue1140.java
{ "start": 201, "end": 448 }
class ____ extends TestCase { public void test_for_issue() throws Exception { String s = "\uD83C\uDDEB\uD83C\uDDF7"; ByteArrayOutputStream out = new ByteArrayOutputStream(); JSON.writeJSONString(out, s); } }
Issue1140
java
apache__camel
components/camel-olingo4/camel-olingo4-component/src/test/java/org/apache/camel/component/olingo4/Olingo4ComponentProducerBatchTest.java
{ "start": 2110, "end": 7242 }
class ____ extends AbstractOlingo4TestSupport { private static final Logger LOG = LoggerFactory.getLogger(Olingo4ComponentProducerBatchTest.class); private static final String TEST_CREATE_KEY = "'lewisblack'"; private static final String TEST_CREATE_PEOPLE = PEOPLE + "(" + TEST_CREATE_KEY + ")"; private static final String TEST_CREATE_RESOURCE_CONTENT_ID = "1"; private static final String TEST_UPDATE_RESOURCE_CONTENT_ID = "2"; @Test public void testBatch() throws IOException { final List<Olingo4BatchRequest> batchParts = new ArrayList<>(); String resourceUri = ODATA_API_BASE_URL; // 1. Edm query batchParts.add(Olingo4BatchQueryRequest.resourcePath(Constants.METADATA).resourceUri(resourceUri) .headers(Map.of("Content-Disposition", "test")).headers(Map.of("Content-Disposition", "test")).build()); // 2. Read entities batchParts.add(Olingo4BatchQueryRequest.resourcePath(PEOPLE).resourceUri(resourceUri) .headers(Map.of("Content-Disposition", "test")).build()); // 3. Read entity batchParts.add(Olingo4BatchQueryRequest.resourcePath(TEST_PEOPLE).resourceUri(resourceUri) .headers(Map.of("Content-Disposition", "test")).build()); // 4. Read with $top final HashMap<String, String> queryParams = new HashMap<>(); queryParams.put(SystemQueryOptionKind.TOP.toString(), "5"); batchParts .add(Olingo4BatchQueryRequest.resourcePath(PEOPLE).resourceUri(resourceUri) .headers(Map.of("Content-Disposition", "test")).queryParams(queryParams) .build()); // 5. Create entity ClientEntity clientEntity = createEntity(); batchParts.add(Olingo4BatchChangeRequest.resourcePath(PEOPLE).resourceUri(resourceUri) .contentId(TEST_CREATE_RESOURCE_CONTENT_ID).operation(Operation.CREATE) .body(clientEntity).build()); // 6. Update middle name in created entry clientEntity.getProperties() .add(objFactory.newPrimitiveProperty("MiddleName", objFactory.newPrimitiveValueBuilder().buildString("Lewis"))); batchParts.add(Olingo4BatchChangeRequest.resourcePath(TEST_CREATE_PEOPLE).resourceUri(resourceUri) .contentId(TEST_UPDATE_RESOURCE_CONTENT_ID) .operation(Operation.UPDATE).body(clientEntity).build()); // 7. Delete entity batchParts.add(Olingo4BatchChangeRequest.resourcePath(TEST_CREATE_PEOPLE).resourceUri(resourceUri) .operation(Operation.DELETE).build()); // 8. Read deleted entity to verify delete batchParts.add(Olingo4BatchQueryRequest.resourcePath(TEST_CREATE_PEOPLE).resourceUri(resourceUri) .headers(Map.of("Content-Disposition", "test")).build()); // execute batch request final List<Olingo4BatchResponse> responseParts = requestBody("direct:batch", batchParts); assertNotNull(responseParts, "Batch response"); assertEquals(8, responseParts.size(), "Batch responses expected"); final Edm edm = (Edm) responseParts.get(0).getBody(); assertNotNull(edm); LOG.info("Edm entity sets: {}", edm.getEntityContainer().getEntitySets()); ClientEntitySet entitySet = (ClientEntitySet) responseParts.get(1).getBody(); assertNotNull(entitySet); LOG.info("Read entities: {}", entitySet.getEntities()); clientEntity = (ClientEntity) responseParts.get(2).getBody(); assertNotNull(clientEntity); LOG.info("Read entiry properties: {}", clientEntity.getProperties()); ClientEntitySet entitySetWithTop = (ClientEntitySet) responseParts.get(3).getBody(); assertNotNull(entitySetWithTop); assertEquals(5, entitySetWithTop.getEntities().size()); LOG.info("Read entities with $top=5: {}", entitySet.getEntities()); clientEntity = (ClientEntity) responseParts.get(4).getBody(); assertNotNull(clientEntity); LOG.info("Created entity: {}", clientEntity.getProperties()); int statusCode = responseParts.get(5).getStatusCode(); assertEquals(HttpStatusCode.NO_CONTENT.getStatusCode(), statusCode); LOG.info("Update MdiddleName status: {}", statusCode); statusCode = responseParts.get(6).getStatusCode(); assertEquals(HttpStatusCode.NO_CONTENT.getStatusCode(), statusCode); LOG.info("Delete entity status: {}", statusCode); assertEquals(HttpStatusCode.NOT_FOUND.getStatusCode(), responseParts.get(7).getStatusCode()); final ODataError error = (ODataError) responseParts.get(7).getBody(); assertNotNull(error); LOG.info("Read deleted entity error: {}", error.getMessage()); } @Override protected RouteBuilder createRouteBuilder() { return new RouteBuilder() { public void configure() { // test route for batch from("direct:batch").to("olingo4://batch"); } }; } }
Olingo4ComponentProducerBatchTest
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/component/cascading/collection/Definition.java
{ "start": 268, "end": 569 }
class ____ { private Long id; private Set<Value> values = new HashSet<>(); public Long getId() { return id; } public void setId(Long id) { this.id = id; } public Set<Value> getValues() { return values; } public void setValues(Set<Value> values) { this.values = values; } }
Definition
java
FasterXML__jackson-databind
src/main/java/tools/jackson/databind/node/JsonNodeCreator.java
{ "start": 371, "end": 2624 }
interface ____ { // Enumerated/singleton types public ValueNode booleanNode(boolean v); public ValueNode nullNode(); /** * @since 3.0 */ public JsonNode missingNode(); // Numeric types. // // note! Cannot return `NumericNode` when passed wrapper since `null` will // return `NullNode` which is NOT a `NumericNode`! public ValueNode numberNode(byte v); public ValueNode numberNode(Byte value); public ValueNode numberNode(short v); public ValueNode numberNode(Short value); public ValueNode numberNode(int v); public ValueNode numberNode(Integer value); public ValueNode numberNode(long v); public ValueNode numberNode(Long value); public ValueNode numberNode(BigInteger v); public ValueNode numberNode(float v); public ValueNode numberNode(Float value); public ValueNode numberNode(double v); public ValueNode numberNode(Double value); public ValueNode numberNode(BigDecimal v); // Textual nodes public ValueNode stringNode(String text); /** * @deprecated since 3.0 Use {@link #stringNode(String)} instead */ @Deprecated // since 3.0 public default ValueNode textNode(String text) { return stringNode(text); } // Other value (non-structured) nodes public ValueNode binaryNode(byte[] data); public ValueNode binaryNode(byte[] data, int offset, int length); public ValueNode pojoNode(Object pojo); /** * Factory method to use for adding "raw values"; pre-encoded values * that are included exactly as-is when node is serialized. * This may be used, for example, to include fully serialized JSON * sub-trees. * Note that the concept may not work with all backends, and since * no translation of any kinds is done it will not work when converting * between data formats. */ public ValueNode rawValueNode(RawValue value); // Structured nodes: // (bit unkosher, due to forward references... but has to do for now) public ArrayNode arrayNode(); /** * Factory method for constructing a JSON Array node with an initial capacity */ public ArrayNode arrayNode(int capacity); public ObjectNode objectNode(); }
JsonNodeCreator
java
apache__hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/NodesListManager.java
{ "start": 30733, "end": 31475 }
class ____ implements Node { private String host; public UnknownNode(String host) { this.host = host; } @Override public String getNetworkLocation() { return null; } @Override public void setNetworkLocation(String location) { } @Override public String getName() { return host; } @Override public Node getParent() { return null; } @Override public void setParent(Node parent) { } @Override public int getLevel() { return 0; } @Override public void setLevel(int i) { } public String getHost() { return host; } public void setHost(String hst) { this.host = hst; } } }
UnknownNode
java
square__javapoet
src/test/java/com/squareup/javapoet/MethodSpecTest.java
{ "start": 3961, "end": 4084 }
interface ____ extends Callable<Integer>, Comparable<ExtendsOthers>, Throws<IllegalStateException> { }
ExtendsOthers
java
apache__maven
compat/maven-model/src/main/java/org/apache/maven/model/merge/ModelMerger.java
{ "start": 93999, "end": 94256 }
class ____ implements KeyComputer<Developer> { @Override public Object key(Developer developer) { return getDeveloperKey(developer); } } /** * KeyComputer for Contributor */ private
DeveloperKeyComputer
java
assertj__assertj-core
assertj-core/src/main/java/org/assertj/core/api/AbstractObjectAssert.java
{ "start": 10250, "end": 11786 }
interface ____ by the given {@code type}</li> * </ol> * <p> * Example: * <pre><code class='java'> import static org.assertj.core.api.Assertions.from; * * Jedi yoda = new Jedi("Yoda"); * * // assertion succeeds * assertThat(yoda).usingComparatorForType(String.CASE_INSENSITIVE_ORDER, String.class) * .returns("YODA", from(Jedi::getName)); * * // assertion will fail * assertThat(yoda).usingComparatorForType(String.CASE_INSENSITIVE_ORDER, String.class) * .returns("LUKE", from(Jedi::getName));</code></pre> * * @param comparator the {@link Comparator} to use * @param type the {@link Class} of the type the comparator should be used for * @param <T> the type of objects that the comparator should be used for * @return {@code this} assertions object * @see #returns(Object, Function) * @see #doesNotReturn(Object, Function) */ @CheckReturnValue public <T> SELF usingComparatorForType(Comparator<? super T> comparator, Class<T> type) { getComparatorsByType().registerComparator(type, comparator); return myself; } /** * Asserts that the actual object has the specified field or property. Static and synthetic fields are ignored since 3.19.0. * <p> * Private fields are matched by default but this can be changed by calling {@link Assertions#setAllowExtractingPrivateFields(boolean) Assertions.setAllowExtractingPrivateFields(false)}. * <p> * Example: * <pre><code class='java'> public
implemented
java
elastic__elasticsearch
x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/RequestTests.java
{ "start": 403, "end": 622 }
class ____ { public static Request mockRequest(String modelId) { var request = mock(Request.class); when(request.getInferenceEntityId()).thenReturn(modelId); return request; } }
RequestTests
java
elastic__elasticsearch
server/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java
{ "start": 5875, "end": 16102 }
class ____ extends IndexNameExpressionResolver { MyResolver() { super(new ThreadContext(Settings.EMPTY), EmptySystemIndices.INSTANCE, TestProjectResolvers.DEFAULT_PROJECT_ONLY); } @Override public String[] concreteIndexNames(ProjectMetadata project, IndicesRequest request) { return request.indices(); } } @BeforeClass public static void startThreadPool() { THREAD_POOL = new TestThreadPool(TransportInstanceSingleOperationActionTests.class.getSimpleName()); } @Override @Before public void setUp() throws Exception { super.setUp(); projectId = randomProjectIdOrDefault(); transport = new CapturingTransport(); clusterService = createClusterService(THREAD_POOL); projectResolver = TestProjectResolvers.singleProject(projectId); transportService = transport.createTransportService( clusterService.getSettings(), THREAD_POOL, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> clusterService.localNode(), null, Collections.emptySet() ); transportService.start(); transportService.acceptIncomingRequests(); action = new TestTransportInstanceSingleOperationAction( "indices:admin/test", transportService, new ActionFilters(new HashSet<>()), new MyResolver(), Request::new ); } @Override @After public void tearDown() throws Exception { super.tearDown(); clusterService.close(); transportService.close(); } @AfterClass public static void destroyThreadPool() { ThreadPool.terminate(THREAD_POOL, 30, TimeUnit.SECONDS); // since static must set to null to be eligible for collection THREAD_POOL = null; } public void testGlobalBlock() { Request request = new Request(); PlainActionFuture<Response> listener = new PlainActionFuture<>(); ClusterBlocks.Builder block = ClusterBlocks.builder() .addGlobalBlock(new ClusterBlock(1, "", false, true, false, RestStatus.SERVICE_UNAVAILABLE, ClusterBlockLevel.ALL)); setState( clusterService, ClusterState.builder(clusterService.state()).putProjectMetadata(ProjectMetadata.builder(projectId)).blocks(block) ); try { action.new AsyncSingleAction(request, listener).start(); listener.get(); fail("expected ClusterBlockException"); } catch (Exception e) { if (ExceptionsHelper.unwrap(e, ClusterBlockException.class) == null) { logger.info("expected ClusterBlockException but got ", e); fail("expected ClusterBlockException"); } } } public void testBasicRequestWorks() throws InterruptedException, ExecutionException, TimeoutException { Request request = new Request().index("test"); request.shardId = new ShardId("test", "_na_", 0); PlainActionFuture<Response> listener = new PlainActionFuture<>(); setState(clusterService, ClusterStateCreationUtils.state(projectId, "test", randomBoolean(), ShardRoutingState.STARTED)); action.new AsyncSingleAction(request, listener).start(); assertThat(transport.capturedRequests().length, equalTo(1)); transport.handleResponse(transport.capturedRequests()[0].requestId(), new Response()); listener.get(); } public void testFailureWithoutRetry() throws Exception { Request request = new Request().index("test"); request.shardId = new ShardId("test", "_na_", 0); PlainActionFuture<Response> listener = new PlainActionFuture<>(); setState(clusterService, ClusterStateCreationUtils.state(projectId, "test", randomBoolean(), ShardRoutingState.STARTED)); action.new AsyncSingleAction(request, listener).start(); assertThat(transport.capturedRequests().length, equalTo(1)); long requestId = transport.capturedRequests()[0].requestId(); transport.clear(); // this should not trigger retry or anything and the listener should report exception immediately transport.handleRemoteError( requestId, new TransportException("a generic transport exception", new Exception("generic test exception")) ); try { // result should return immediately assertTrue(listener.isDone()); listener.get(); fail("this should fail with a transport exception"); } catch (ExecutionException t) { if (ExceptionsHelper.unwrap(t, TransportException.class) == null) { logger.info("expected TransportException but got ", t); fail("expected and TransportException"); } } } public void testSuccessAfterRetryWithClusterStateUpdate() throws Exception { Request request = new Request().index("test"); request.shardId = new ShardId("test", "_na_", 0); PlainActionFuture<Response> listener = new PlainActionFuture<>(); boolean local = randomBoolean(); setState(clusterService, ClusterStateCreationUtils.state(projectId, "test", local, ShardRoutingState.INITIALIZING)); action.new AsyncSingleAction(request, listener).start(); // this should fail because primary not initialized assertThat(transport.capturedRequests().length, equalTo(0)); setState(clusterService, ClusterStateCreationUtils.state(projectId, "test", local, ShardRoutingState.STARTED)); // this time it should work assertThat(transport.capturedRequests().length, equalTo(1)); transport.handleResponse(transport.capturedRequests()[0].requestId(), new Response()); listener.get(); } public void testSuccessAfterRetryWithExceptionFromTransport() throws Exception { Request request = new Request().index("test"); request.shardId = new ShardId("test", "_na_", 0); PlainActionFuture<Response> listener = new PlainActionFuture<>(); boolean local = randomBoolean(); setState(clusterService, ClusterStateCreationUtils.state(projectId, "test", local, ShardRoutingState.STARTED)); action.new AsyncSingleAction(request, listener).start(); assertThat(transport.capturedRequests().length, equalTo(1)); long requestId = transport.capturedRequests()[0].requestId(); transport.clear(); DiscoveryNode node = clusterService.state().getNodes().getLocalNode(); transport.handleLocalError(requestId, new ConnectTransportException(node, "test exception")); // trigger cluster state observer setState(clusterService, ClusterStateCreationUtils.state(projectId, "test", local, ShardRoutingState.STARTED)); assertThat(transport.capturedRequests().length, equalTo(1)); transport.handleResponse(transport.capturedRequests()[0].requestId(), new Response()); listener.get(); } public void testRetryOfAnAlreadyTimedOutRequest() throws Exception { Request request = new Request().index("test").timeout(new TimeValue(0, TimeUnit.MILLISECONDS)); request.shardId = new ShardId("test", "_na_", 0); PlainActionFuture<Response> listener = new PlainActionFuture<>(); setState(clusterService, ClusterStateCreationUtils.state(projectId, "test", randomBoolean(), ShardRoutingState.STARTED)); action.new AsyncSingleAction(request, listener).start(); assertThat(transport.capturedRequests().length, equalTo(1)); long requestId = transport.capturedRequests()[0].requestId(); transport.clear(); DiscoveryNode node = clusterService.state().getNodes().getLocalNode(); transport.handleLocalError(requestId, new ConnectTransportException(node, "test exception")); // wait until the timeout was triggered and we actually tried to send for the second time assertBusy(() -> assertThat(transport.capturedRequests().length, equalTo(1))); // let it fail the second time too requestId = transport.capturedRequests()[0].requestId(); transport.handleLocalError(requestId, new ConnectTransportException(node, "test exception")); try { // result should return immediately assertTrue(listener.isDone()); listener.get(); fail("this should fail with a transport exception"); } catch (ExecutionException t) { if (ExceptionsHelper.unwrap(t, ConnectTransportException.class) == null) { logger.info("expected ConnectTransportException but got ", t); fail("expected and ConnectTransportException"); } } } public void testUnresolvableRequestDoesNotHang() throws InterruptedException, ExecutionException, TimeoutException { action = new TestTransportInstanceSingleOperationAction( "indices:admin/test_unresolvable", transportService, new ActionFilters(new HashSet<>()), new MyResolver(), Request::new ) { @Override protected void resolveRequest(ProjectState state, Request request) { throw new IllegalStateException("request cannot be resolved"); } }; Request request = new Request().index("test"); request.shardId = new ShardId("test", "_na_", 0); PlainActionFuture<Response> listener = new PlainActionFuture<>(); setState(clusterService, ClusterStateCreationUtils.state(projectId, "test", randomBoolean(), ShardRoutingState.STARTED)); action.new AsyncSingleAction(request, listener).start(); assertThat(transport.capturedRequests().length, equalTo(0)); try { listener.get(); } catch (Exception e) { if (ExceptionsHelper.unwrap(e, IllegalStateException.class) == null) { logger.info("expected IllegalStateException but got ", e); fail("expected and IllegalStateException"); } } } }
MyResolver
java
quarkusio__quarkus
extensions/arc/deployment/src/test/java/io/quarkus/arc/test/synthetic/typeClosure/SyntheticBeanBuildItemAddTypeClosureGenericsTest.java
{ "start": 4612, "end": 4852 }
class ____ implements BeanCreator<FooSubclass<Beta>> { @Override public FooSubclass<Beta> create(SyntheticCreationalContext<FooSubclass<Beta>> context) { return new FooSubclass<Beta>(); } } }
FooCreator
java
apache__kafka
server-common/src/main/java/org/apache/kafka/timeline/SnapshottableHashTable.java
{ "start": 3024, "end": 3366 }
class ____ looks like this: * <pre> * Revertable BaseHashTable * ↑ ↑ * SnapshottableHashTable → SnapshotRegistry → Snapshot * ↑ ↑ * TimelineHashSet TimelineHashMap * </pre> * BaseHashTable is a simple hash table that uses separate chaining. The
hierarchy
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/android/StaticOrDefaultInterfaceMethodTest.java
{ "start": 1738, "end": 2118 }
interface ____ { // BUG: Diagnostic contains: StaticOrDefaultInterfaceMethod static void test() { System.out.println(); } } """) .doTest(); } @Test public void negativeCaseNoBody() { compilationHelper .addSourceLines( "Test.java", """
Test
java
apache__hadoop
hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/secure/AbstractSecureRegistryTest.java
{ "start": 5612, "end": 11890 }
class ____ of the JVM properties were * not being picked up. This method addresses that by setting them * before every test case */ @BeforeEach public void beforeSecureRegistryTest() { } @AfterEach public void afterSecureRegistryTest() throws IOException { describe(LOG, "teardown of instance"); teardown.close(); stopSecureZK(); } protected static void addToClassTeardown(Service svc) { classTeardown.addService(svc); } protected void addToTeardown(Service svc) { teardown.addService(svc); } public static void teardownKDC() throws Exception { if (kdc != null) { kdc.stop(); kdc = null; } } /** * Sets up the KDC and a set of principals in the JAAS file * * @throws Exception */ public static void setupKDCAndPrincipals() throws Exception { // set up the KDC File target = new File(System.getProperty("test.dir", "target")); kdcWorkDir = new File(target, "kdc"); kdcWorkDir.mkdirs(); if (!kdcWorkDir.mkdirs()) { assertTrue(kdcWorkDir.isDirectory()); } kdcConf = MiniKdc.createConf(); kdcConf.setProperty(MiniKdc.DEBUG, "true"); kdc = new MiniKdc(kdcConf, kdcWorkDir); kdc.start(); keytab_zk = createKeytab(ZOOKEEPER, "zookeeper.keytab"); keytab_alice = createKeytab(ALICE, "alice.keytab"); keytab_bob = createKeytab(BOB, "bob.keytab"); zkServerPrincipal = Shell.WINDOWS ? ZOOKEEPER_1270001 : ZOOKEEPER_LOCALHOST; StringBuilder jaas = new StringBuilder(1024); jaas.append(registrySecurity.createJAASEntry(ZOOKEEPER_CLIENT_CONTEXT, ZOOKEEPER, keytab_zk)); jaas.append(registrySecurity.createJAASEntry(ZOOKEEPER_SERVER_CONTEXT, zkServerPrincipal, keytab_zk)); jaas.append(registrySecurity.createJAASEntry(ALICE_CLIENT_CONTEXT, ALICE_LOCALHOST , keytab_alice)); jaas.append(registrySecurity.createJAASEntry(BOB_CLIENT_CONTEXT, BOB_LOCALHOST, keytab_bob)); jaasFile = new File(kdcWorkDir, "jaas.txt"); FileUtils.write(jaasFile, jaas.toString(), StandardCharsets.UTF_8); LOG.info("\n"+ jaas); RegistrySecurity.bindJVMtoJAASFile(jaasFile); } // protected static final String kerberosRule = "RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\nDEFAULT"; /** * Init hadoop security by setting up the UGI config */ public static void initHadoopSecurity() { UserGroupInformation.setConfiguration(CONF); KerberosName.setRules(kerberosRule); } /** * Stop the secure ZK and log out the ZK account */ public synchronized void stopSecureZK() { ServiceOperations.stop(secureZK); secureZK = null; logout(zookeeperLogin); zookeeperLogin = null; } public static MiniKdc getKdc() { return kdc; } public static File getKdcWorkDir() { return kdcWorkDir; } public static Properties getKdcConf() { return kdcConf; } /** * Create a secure instance * @param name instance name * @return the instance * @throws Exception */ protected static MicroZookeeperService createSecureZKInstance(String name) throws Exception { String context = ZOOKEEPER_SERVER_CONTEXT; Configuration conf = new Configuration(); File testdir = new File(System.getProperty("test.dir", "target")); File workDir = new File(testdir, name); if (!workDir.mkdirs()) { assertTrue(workDir.isDirectory()); } System.setProperty( ZookeeperConfigOptions.PROP_ZK_SERVER_MAINTAIN_CONNECTION_DESPITE_SASL_FAILURE, "false"); RegistrySecurity.validateContext(context); conf.set(MicroZookeeperServiceKeys.KEY_REGISTRY_ZKSERVICE_JAAS_CONTEXT, context); MicroZookeeperService secureZK = new MicroZookeeperService(name); secureZK.init(conf); LOG.info(secureZK.getDiagnostics()); return secureZK; } /** * Create the keytabl for the given principal, includes * raw principal and $principal/localhost * @param principal principal short name * @param filename filename of keytab * @return file of keytab * @throws Exception */ public static File createKeytab(String principal, String filename) throws Exception { assertNotEmpty("empty principal", principal); assertNotEmpty("empty host", filename); assertNotNull(kdc, "Null KDC"); File keytab = new File(kdcWorkDir, filename); kdc.createPrincipal(keytab, principal, principal + "/localhost", principal + "/127.0.0.1"); return keytab; } public static String getPrincipalAndRealm(String principal) { return principal + "@" + getRealm(); } protected static String getRealm() { return kdc.getRealm(); } /** * Log in, defaulting to the client context * @param principal principal * @param context context * @param keytab keytab * @return the logged in context * @throws LoginException failure to log in * @throws FileNotFoundException no keytab */ protected LoginContext login(String principal, String context, File keytab) throws LoginException, FileNotFoundException { LOG.info("Logging in as {} in context {} with keytab {}", principal, context, keytab); if (!keytab.exists()) { throw new FileNotFoundException(keytab.getAbsolutePath()); } Set<Principal> principals = new HashSet<Principal>(); principals.add(new KerberosPrincipal(principal)); Subject subject = new Subject(false, principals, new HashSet<Object>(), new HashSet<Object>()); LoginContext login; login = new LoginContext(context, subject, null, KerberosConfiguration.createClientConfig(principal, keytab)); login.login(); return login; } /** * Start the secure ZK instance using the test method name as the path. * As the entry is saved to the {@link #secureZK} field, it * is automatically stopped after the test case. * @throws Exception on any failure */ protected synchronized void startSecureZK() throws Exception { assertNull(secureZK, "Zookeeper is already running"); zookeeperLogin = login(zkServerPrincipal, ZOOKEEPER_SERVER_CONTEXT, keytab_zk); secureZK = createSecureZKInstance("test-" + methodName.getMethodName()); secureZK.start(); } }
setting
java
spring-projects__spring-framework
spring-aop/src/main/java/org/springframework/aop/support/ControlFlowPointcut.java
{ "start": 1829, "end": 8338 }
class ____ which to match. * @since 6.1 */ protected final Class<?> clazz; /** * An immutable list of distinct method name patterns against which to match. * @since 6.1 */ protected final List<String> methodNamePatterns; private final AtomicInteger evaluationCount = new AtomicInteger(); /** * Construct a new pointcut that matches all control flows below the given class. * @param clazz the class */ public ControlFlowPointcut(Class<?> clazz) { this(clazz, (String) null); } /** * Construct a new pointcut that matches all calls below a method matching * the given method name pattern in the given class. * <p>If no method name pattern is given, the pointcut matches all control flows * below the given class. * @param clazz the class * @param methodNamePattern the method name pattern (may be {@code null}) */ public ControlFlowPointcut(Class<?> clazz, @Nullable String methodNamePattern) { Assert.notNull(clazz, "Class must not be null"); this.clazz = clazz; this.methodNamePatterns = (methodNamePattern != null ? Collections.singletonList(methodNamePattern) : Collections.emptyList()); } /** * Construct a new pointcut that matches all calls below a method matching * one of the given method name patterns in the given class. * <p>If no method name pattern is given, the pointcut matches all control flows * below the given class. * @param clazz the class * @param methodNamePatterns the method name patterns (potentially empty) * @since 6.1 */ public ControlFlowPointcut(Class<?> clazz, String... methodNamePatterns) { this(clazz, Arrays.asList(methodNamePatterns)); } /** * Construct a new pointcut that matches all calls below a method matching * one of the given method name patterns in the given class. * <p>If no method name pattern is given, the pointcut matches all control flows * below the given class. * @param clazz the class * @param methodNamePatterns the method name patterns (potentially empty) * @since 6.1 */ public ControlFlowPointcut(Class<?> clazz, List<String> methodNamePatterns) { Assert.notNull(clazz, "Class must not be null"); Assert.notNull(methodNamePatterns, "List of method name patterns must not be null"); Assert.noNullElements(methodNamePatterns, "List of method name patterns must not contain null elements"); this.clazz = clazz; this.methodNamePatterns = methodNamePatterns.stream().distinct().toList(); } /** * Subclasses can override this for greater filtering (and performance). * <p>The default implementation always returns {@code true}. */ @Override public boolean matches(Class<?> clazz) { return true; } /** * Subclasses can override this if it's possible to filter out some candidate classes. * <p>The default implementation always returns {@code true}. */ @Override public boolean matches(Method method, Class<?> targetClass) { return true; } @Override public boolean isRuntime() { return true; } @Override public boolean matches(Method method, Class<?> targetClass, @Nullable Object... args) { incrementEvaluationCount(); for (StackTraceElement element : new Throwable().getStackTrace()) { if (element.getClassName().equals(this.clazz.getName())) { if (this.methodNamePatterns.isEmpty()) { return true; } String methodName = element.getMethodName(); for (int i = 0; i < this.methodNamePatterns.size(); i++) { if (isMatch(methodName, i)) { return true; } } } } return false; } /** * Get the number of times {@link #matches(Method, Class, Object...)} has been * evaluated. * <p>Useful for optimization and testing purposes. */ public int getEvaluations() { return this.evaluationCount.get(); } /** * Increment the {@link #getEvaluations() evaluation count}. * @since 6.1 * @see #matches(Method, Class, Object...) */ protected final void incrementEvaluationCount() { this.evaluationCount.incrementAndGet(); } /** * Determine if the given method name matches the method name pattern at the * specified index. * <p>This method is invoked by {@link #matches(Method, Class, Object...)}. * <p>The default implementation retrieves the method name pattern from * {@link #methodNamePatterns} and delegates to {@link #isMatch(String, String)}. * <p>Can be overridden in subclasses &mdash; for example, to support * regular expressions. * @param methodName the method name to check * @param patternIndex the index of the method name pattern * @return {@code true} if the method name matches the pattern at the specified * index * @since 6.1 * @see #methodNamePatterns * @see #isMatch(String, String) * @see #matches(Method, Class, Object...) */ protected boolean isMatch(String methodName, int patternIndex) { String methodNamePattern = this.methodNamePatterns.get(patternIndex); return isMatch(methodName, methodNamePattern); } /** * Determine if the given method name matches the method name pattern. * <p>This method is invoked by {@link #isMatch(String, int)}. * <p>The default implementation checks for direct equality as well as * {@code xxx*}, {@code *xxx}, {@code *xxx*}, and {@code xxx*yyy} matches. * <p>Can be overridden in subclasses &mdash; for example, to support a * different style of simple pattern matching. * @param methodName the method name to check * @param methodNamePattern the method name pattern * @return {@code true} if the method name matches the pattern * @since 6.1 * @see #isMatch(String, int) * @see PatternMatchUtils#simpleMatch(String, String) */ protected boolean isMatch(String methodName, String methodNamePattern) { return (methodName.equals(methodNamePattern) || PatternMatchUtils.simpleMatch(methodNamePattern, methodName)); } @Override public ClassFilter getClassFilter() { return this; } @Override public MethodMatcher getMethodMatcher() { return this; } @Override public boolean equals(@Nullable Object other) { return (this == other || (other instanceof ControlFlowPointcut that && this.clazz.equals(that.clazz)) && this.methodNamePatterns.equals(that.methodNamePatterns)); } @Override public int hashCode() { int code = this.clazz.hashCode(); code = 37 * code + this.methodNamePatterns.hashCode(); return code; } @Override public String toString() { return getClass().getName() + ": class = " + this.clazz.getName() + "; methodNamePatterns = " + this.methodNamePatterns; } }
against
java
quarkusio__quarkus
extensions/resteasy-reactive/rest-jsonb/deployment/src/main/java/io/quarkus/resteasy/reactive/jsonb/deployment/ResteasyReactiveJsonbProcessor.java
{ "start": 740, "end": 1613 }
class ____ { @BuildStep void feature(BuildProducer<FeatureBuildItem> feature) { feature.produce(new FeatureBuildItem(Feature.REST_JSONB)); } @BuildStep ServerDefaultProducesHandlerBuildItem jsonDefault() { return ServerDefaultProducesHandlerBuildItem.json(); } @BuildStep void additionalProviders(BuildProducer<MessageBodyReaderBuildItem> additionalReaders, BuildProducer<MessageBodyWriterBuildItem> additionalWriters) { ResteasyReactiveJsonbCommonProcessor.additionalProviders(additionalReaders, additionalWriters, RuntimeType.SERVER); } @BuildStep void reflection(BuildProducer<ReflectiveClassBuildItem> producer) { producer.produce(ReflectiveClassBuildItem.builder(Cookie.class).reason(getClass().getName()).methods().build()); } }
ResteasyReactiveJsonbProcessor
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/embeddable/NestedJsonEmbeddableTest.java
{ "start": 20535, "end": 21043 }
class ____ { private String stringField; public Leaf() { } public Leaf(String stringField) { this.stringField = stringField; } @Override public boolean equals(Object o) { if ( this == o ) { return true; } if ( o == null || getClass() != o.getClass() ) { return false; } Leaf leaf = (Leaf) o; return Objects.equals( stringField, leaf.stringField ); } @Override public int hashCode() { return stringField != null ? stringField.hashCode() : 0; } } }
Leaf
java
dropwizard__dropwizard
dropwizard-jackson/src/test/java/io/dropwizard/jackson/GuavaExtrasModuleTest.java
{ "start": 408, "end": 1697 }
class ____ { private final ObjectMapper mapper = new ObjectMapper(); @BeforeEach void setUp() throws Exception { mapper.registerModule(new GuavaModule()); mapper.registerModule(new GuavaExtrasModule()); } @Test void canDeserializeAHostAndPort() throws Exception { assertThat(mapper.readValue("\"example.com:8080\"", HostAndPort.class)) .isEqualTo(HostAndPort.fromParts("example.com", 8080)); } @Test void canDeserializeCacheBuilderSpecs() throws Exception { assertThat(mapper.readValue("\"maximumSize=30\"", CacheBuilderSpec.class)) .isEqualTo(CacheBuilderSpec.parse("maximumSize=30")); } @Test void canSerializeCacheBuilderSpecs() throws Exception { assertThat(mapper.writeValueAsString(CacheBuilderSpec.disableCaching())) .isEqualTo("\"maximumSize=0\""); } @Test void canDeserializeAbsentOptions() throws Exception { assertThat(mapper.readValue("null", Optional.class)) .isEqualTo(Optional.absent()); } @Test void canDeserializePresentOptions() throws Exception { assertThat(mapper.readValue("\"woo\"", Optional.class)) .isEqualTo(Optional.of("woo")); } }
GuavaExtrasModuleTest
java
alibaba__fastjson
src/test/java/com/alibaba/json/bvt/bug/Bug_for_xujin2.java
{ "start": 3451, "end": 3521 }
interface ____<E extends Enum<E>> { int getCode(); } }
IntEnum
java
elastic__elasticsearch
x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/service/TransportDeleteServiceAccountTokenAction.java
{ "start": 1046, "end": 2178 }
class ____ extends HandledTransportAction< DeleteServiceAccountTokenRequest, DeleteServiceAccountTokenResponse> { private final ServiceAccountService serviceAccountService; @Inject public TransportDeleteServiceAccountTokenAction( TransportService transportService, ActionFilters actionFilters, ServiceAccountService serviceAccountService ) { super( DeleteServiceAccountTokenAction.NAME, transportService, actionFilters, DeleteServiceAccountTokenRequest::new, EsExecutors.DIRECT_EXECUTOR_SERVICE ); this.serviceAccountService = serviceAccountService; } @Override protected void doExecute( Task task, DeleteServiceAccountTokenRequest request, ActionListener<DeleteServiceAccountTokenResponse> listener ) { serviceAccountService.deleteIndexToken( request, ActionListener.wrap(found -> listener.onResponse(new DeleteServiceAccountTokenResponse(found)), listener::onFailure) ); } }
TransportDeleteServiceAccountTokenAction
java
spring-projects__spring-boot
module/spring-boot-liquibase/src/test/java/org/springframework/boot/liquibase/LiquibaseChangelogMissingFailureAnalyzerTests.java
{ "start": 2409, "end": 2912 }
class ____ { @Bean DataSource dataSource() { HikariDataSource dataSource = new HikariDataSource(); dataSource.setJdbcUrl("jdbc:h2:mem:test"); dataSource.setUsername("sa"); return dataSource; } @Bean SpringLiquibase springLiquibase(DataSource dataSource) { SpringLiquibase liquibase = new SpringLiquibase(); liquibase.setChangeLog("classpath:/db/changelog/db.changelog-master.yaml"); liquibase.setDataSource(dataSource); return liquibase; } } }
LiquibaseConfiguration
java
spring-projects__spring-security
oauth2/oauth2-client/src/test/java/org/springframework/security/oauth2/client/web/DefaultReactiveOAuth2AuthorizedClientManagerTests.java
{ "start": 3516, "end": 31509 }
class ____ { private ReactiveClientRegistrationRepository clientRegistrationRepository; private ServerOAuth2AuthorizedClientRepository authorizedClientRepository; private ReactiveOAuth2AuthorizedClientProvider authorizedClientProvider; private Function contextAttributesMapper; private DefaultReactiveOAuth2AuthorizedClientManager authorizedClientManager; private ClientRegistration clientRegistration; private Authentication principal; private OAuth2AuthorizedClient authorizedClient; private MockServerWebExchange serverWebExchange; private Context context; private ArgumentCaptor<OAuth2AuthorizationContext> authorizationContextCaptor; private PublisherProbe<OAuth2AuthorizedClient> loadAuthorizedClientProbe; private PublisherProbe<Void> saveAuthorizedClientProbe; private PublisherProbe<Void> removeAuthorizedClientProbe; @SuppressWarnings("unchecked") @BeforeEach public void setup() { this.clientRegistrationRepository = mock(ReactiveClientRegistrationRepository.class); given(this.clientRegistrationRepository.findByRegistrationId(anyString())).willReturn(Mono.empty()); this.authorizedClientRepository = mock(ServerOAuth2AuthorizedClientRepository.class); this.loadAuthorizedClientProbe = PublisherProbe.empty(); given(this.authorizedClientRepository.loadAuthorizedClient(anyString(), any(Authentication.class), any(ServerWebExchange.class))) .willReturn(this.loadAuthorizedClientProbe.mono()); this.saveAuthorizedClientProbe = PublisherProbe.empty(); given(this.authorizedClientRepository.saveAuthorizedClient(any(OAuth2AuthorizedClient.class), any(Authentication.class), any(ServerWebExchange.class))) .willReturn(this.saveAuthorizedClientProbe.mono()); this.removeAuthorizedClientProbe = PublisherProbe.empty(); given(this.authorizedClientRepository.removeAuthorizedClient(any(String.class), any(Authentication.class), any(ServerWebExchange.class))) .willReturn(this.removeAuthorizedClientProbe.mono()); this.authorizedClientProvider = mock(ReactiveOAuth2AuthorizedClientProvider.class); given(this.authorizedClientProvider.authorize(any(OAuth2AuthorizationContext.class))).willReturn(Mono.empty()); this.contextAttributesMapper = mock(Function.class); given(this.contextAttributesMapper.apply(any())).willReturn(Mono.just(Collections.emptyMap())); this.authorizedClientManager = new DefaultReactiveOAuth2AuthorizedClientManager( this.clientRegistrationRepository, this.authorizedClientRepository); this.authorizedClientManager.setAuthorizedClientProvider(this.authorizedClientProvider); this.authorizedClientManager.setContextAttributesMapper(this.contextAttributesMapper); this.clientRegistration = TestClientRegistrations.clientRegistration().build(); this.principal = new TestingAuthenticationToken("principal", "password"); this.authorizedClient = new OAuth2AuthorizedClient(this.clientRegistration, this.principal.getName(), TestOAuth2AccessTokens.scopes("read", "write"), TestOAuth2RefreshTokens.refreshToken()); this.serverWebExchange = MockServerWebExchange.builder(MockServerHttpRequest.get("/")).build(); this.context = Context.of(ServerWebExchange.class, this.serverWebExchange); this.authorizationContextCaptor = ArgumentCaptor.forClass(OAuth2AuthorizationContext.class); } @Test public void constructorWhenClientRegistrationRepositoryIsNullThenThrowIllegalArgumentException() { assertThatIllegalArgumentException() .isThrownBy(() -> new DefaultReactiveOAuth2AuthorizedClientManager(null, this.authorizedClientRepository)) .withMessage("clientRegistrationRepository cannot be null"); } @Test public void constructorWhenOAuth2AuthorizedClientRepositoryIsNullThenThrowIllegalArgumentException() { assertThatIllegalArgumentException() .isThrownBy(() -> new DefaultReactiveOAuth2AuthorizedClientManager(this.clientRegistrationRepository, null)) .withMessage("authorizedClientRepository cannot be null"); } @Test public void setAuthorizedClientProviderWhenNullThenThrowIllegalArgumentException() { assertThatIllegalArgumentException() .isThrownBy(() -> this.authorizedClientManager.setAuthorizedClientProvider(null)) .withMessage("authorizedClientProvider cannot be null"); } @Test public void setAuthorizationSuccessHandlerWhenNullThenThrowIllegalArgumentException() { assertThatIllegalArgumentException() .isThrownBy(() -> this.authorizedClientManager.setAuthorizationSuccessHandler(null)) .withMessage("authorizationSuccessHandler cannot be null"); } @Test public void setAuthorizationFailureHandlerWhenNullThenThrowIllegalArgumentException() { assertThatIllegalArgumentException() .isThrownBy(() -> this.authorizedClientManager.setAuthorizationFailureHandler(null)) .withMessage("authorizationFailureHandler cannot be null"); } @Test public void setContextAttributesMapperWhenNullThenThrowIllegalArgumentException() { assertThatIllegalArgumentException() .isThrownBy(() -> this.authorizedClientManager.setContextAttributesMapper(null)) .withMessage("contextAttributesMapper cannot be null"); } @Test public void authorizeWhenRequestIsNullThenThrowIllegalArgumentException() { assertThatIllegalArgumentException().isThrownBy(() -> this.authorizedClientManager.authorize(null).block()) .withMessage("authorizeRequest cannot be null"); } @Test public void authorizeWhenExchangeIsNullThenThrowIllegalArgumentException() { OAuth2AuthorizeRequest authorizeRequest = OAuth2AuthorizeRequest .withClientRegistrationId(this.clientRegistration.getRegistrationId()) .principal(this.principal) .build(); assertThatIllegalArgumentException() .isThrownBy(() -> this.authorizedClientManager.authorize(authorizeRequest).block()) .withMessage("serverWebExchange cannot be null"); } @Test public void authorizeWhenClientRegistrationNotFoundThenThrowIllegalArgumentException() { OAuth2AuthorizeRequest authorizeRequest = OAuth2AuthorizeRequest .withClientRegistrationId("invalid-registration-id") .principal(this.principal) .build(); assertThatIllegalArgumentException() .isThrownBy( () -> this.authorizedClientManager.authorize(authorizeRequest).contextWrite(this.context).block()) .withMessage("Could not find ClientRegistration with id 'invalid-registration-id'"); } @SuppressWarnings("unchecked") @Test public void authorizeWhenNotAuthorizedAndUnsupportedProviderThenNotAuthorized() { given(this.clientRegistrationRepository.findByRegistrationId(eq(this.clientRegistration.getRegistrationId()))) .willReturn(Mono.just(this.clientRegistration)); OAuth2AuthorizeRequest authorizeRequest = OAuth2AuthorizeRequest .withClientRegistrationId(this.clientRegistration.getRegistrationId()) .principal(this.principal) .build(); OAuth2AuthorizedClient authorizedClient = this.authorizedClientManager.authorize(authorizeRequest) .contextWrite(this.context) .block(); verify(this.authorizedClientProvider).authorize(this.authorizationContextCaptor.capture()); verify(this.contextAttributesMapper).apply(eq(authorizeRequest)); OAuth2AuthorizationContext authorizationContext = this.authorizationContextCaptor.getValue(); assertThat(authorizationContext.getClientRegistration()).isEqualTo(this.clientRegistration); assertThat(authorizationContext.getAuthorizedClient()).isNull(); assertThat(authorizationContext.getPrincipal()).isEqualTo(this.principal); assertThat(authorizedClient).isNull(); this.loadAuthorizedClientProbe.assertWasSubscribed(); this.saveAuthorizedClientProbe.assertWasNotSubscribed(); } @SuppressWarnings("unchecked") @Test public void authorizeWhenNotAuthorizedAndSupportedProviderThenAuthorized() { given(this.clientRegistrationRepository.findByRegistrationId(eq(this.clientRegistration.getRegistrationId()))) .willReturn(Mono.just(this.clientRegistration)); given(this.authorizedClientProvider.authorize(any(OAuth2AuthorizationContext.class))) .willReturn(Mono.just(this.authorizedClient)); OAuth2AuthorizeRequest authorizeRequest = OAuth2AuthorizeRequest .withClientRegistrationId(this.clientRegistration.getRegistrationId()) .principal(this.principal) .build(); OAuth2AuthorizedClient authorizedClient = this.authorizedClientManager.authorize(authorizeRequest) .contextWrite(this.context) .block(); verify(this.authorizedClientProvider).authorize(this.authorizationContextCaptor.capture()); verify(this.contextAttributesMapper).apply(eq(authorizeRequest)); OAuth2AuthorizationContext authorizationContext = this.authorizationContextCaptor.getValue(); assertThat(authorizationContext.getClientRegistration()).isEqualTo(this.clientRegistration); assertThat(authorizationContext.getAuthorizedClient()).isNull(); assertThat(authorizationContext.getPrincipal()).isEqualTo(this.principal); assertThat(authorizedClient).isSameAs(this.authorizedClient); verify(this.authorizedClientRepository).saveAuthorizedClient(eq(this.authorizedClient), eq(this.principal), eq(this.serverWebExchange)); this.saveAuthorizedClientProbe.assertWasSubscribed(); verify(this.authorizedClientRepository, never()).removeAuthorizedClient(any(), any(), any()); } @SuppressWarnings("unchecked") @Test public void authorizeWhenNotAuthorizedAndSupportedProviderAndCustomSuccessHandlerThenInvokeCustomSuccessHandler() { given(this.clientRegistrationRepository.findByRegistrationId(eq(this.clientRegistration.getRegistrationId()))) .willReturn(Mono.just(this.clientRegistration)); given(this.authorizedClientProvider.authorize(any(OAuth2AuthorizationContext.class))) .willReturn(Mono.just(this.authorizedClient)); OAuth2AuthorizeRequest authorizeRequest = OAuth2AuthorizeRequest .withClientRegistrationId(this.clientRegistration.getRegistrationId()) .principal(this.principal) .build(); PublisherProbe<Void> authorizationSuccessHandlerProbe = PublisherProbe.empty(); this.authorizedClientManager .setAuthorizationSuccessHandler((client, principal, attributes) -> authorizationSuccessHandlerProbe.mono()); OAuth2AuthorizedClient authorizedClient = this.authorizedClientManager.authorize(authorizeRequest) .contextWrite(this.context) .block(); verify(this.authorizedClientProvider).authorize(this.authorizationContextCaptor.capture()); verify(this.contextAttributesMapper).apply(eq(authorizeRequest)); OAuth2AuthorizationContext authorizationContext = this.authorizationContextCaptor.getValue(); assertThat(authorizationContext.getClientRegistration()).isEqualTo(this.clientRegistration); assertThat(authorizationContext.getAuthorizedClient()).isNull(); assertThat(authorizationContext.getPrincipal()).isEqualTo(this.principal); assertThat(authorizedClient).isSameAs(this.authorizedClient); authorizationSuccessHandlerProbe.assertWasSubscribed(); verify(this.authorizedClientRepository, never()).saveAuthorizedClient(any(), any(), any()); verify(this.authorizedClientRepository, never()).removeAuthorizedClient(any(), any(), any()); } @SuppressWarnings("unchecked") @Test public void authorizeWhenInvalidTokenThenRemoveAuthorizedClient() { given(this.clientRegistrationRepository.findByRegistrationId(eq(this.clientRegistration.getRegistrationId()))) .willReturn(Mono.just(this.clientRegistration)); OAuth2AuthorizeRequest authorizeRequest = OAuth2AuthorizeRequest .withClientRegistrationId(this.clientRegistration.getRegistrationId()) .principal(this.principal) .build(); ClientAuthorizationException exception = new ClientAuthorizationException( new OAuth2Error(OAuth2ErrorCodes.INVALID_TOKEN, null, null), this.clientRegistration.getRegistrationId()); given(this.authorizedClientProvider.authorize(any(OAuth2AuthorizationContext.class))) .willReturn(Mono.error(exception)); assertThatExceptionOfType(ClientAuthorizationException.class) .isThrownBy( () -> this.authorizedClientManager.authorize(authorizeRequest).contextWrite(this.context).block()) .isEqualTo(exception); verify(this.authorizedClientProvider).authorize(this.authorizationContextCaptor.capture()); verify(this.contextAttributesMapper).apply(eq(authorizeRequest)); OAuth2AuthorizationContext authorizationContext = this.authorizationContextCaptor.getValue(); assertThat(authorizationContext.getClientRegistration()).isEqualTo(this.clientRegistration); assertThat(authorizationContext.getAuthorizedClient()).isNull(); assertThat(authorizationContext.getPrincipal()).isEqualTo(this.principal); verify(this.authorizedClientRepository).removeAuthorizedClient(eq(this.clientRegistration.getRegistrationId()), eq(this.principal), eq(this.serverWebExchange)); this.removeAuthorizedClientProbe.assertWasSubscribed(); verify(this.authorizedClientRepository, never()).saveAuthorizedClient(any(), any(), any()); } @SuppressWarnings("unchecked") @Test public void authorizeWhenInvalidGrantThenRemoveAuthorizedClient() { given(this.clientRegistrationRepository.findByRegistrationId(eq(this.clientRegistration.getRegistrationId()))) .willReturn(Mono.just(this.clientRegistration)); OAuth2AuthorizeRequest authorizeRequest = OAuth2AuthorizeRequest .withClientRegistrationId(this.clientRegistration.getRegistrationId()) .principal(this.principal) .build(); ClientAuthorizationException exception = new ClientAuthorizationException( new OAuth2Error(OAuth2ErrorCodes.INVALID_GRANT, null, null), this.clientRegistration.getRegistrationId()); given(this.authorizedClientProvider.authorize(any(OAuth2AuthorizationContext.class))) .willReturn(Mono.error(exception)); assertThatExceptionOfType(ClientAuthorizationException.class) .isThrownBy( () -> this.authorizedClientManager.authorize(authorizeRequest).contextWrite(this.context).block()) .isEqualTo(exception); verify(this.authorizedClientProvider).authorize(this.authorizationContextCaptor.capture()); verify(this.contextAttributesMapper).apply(eq(authorizeRequest)); OAuth2AuthorizationContext authorizationContext = this.authorizationContextCaptor.getValue(); assertThat(authorizationContext.getClientRegistration()).isEqualTo(this.clientRegistration); assertThat(authorizationContext.getAuthorizedClient()).isNull(); assertThat(authorizationContext.getPrincipal()).isEqualTo(this.principal); verify(this.authorizedClientRepository).removeAuthorizedClient(eq(this.clientRegistration.getRegistrationId()), eq(this.principal), eq(this.serverWebExchange)); this.removeAuthorizedClientProbe.assertWasSubscribed(); verify(this.authorizedClientRepository, never()).saveAuthorizedClient(any(), any(), any()); } @SuppressWarnings("unchecked") @Test public void authorizeWhenServerErrorThenDoNotRemoveAuthorizedClient() { given(this.clientRegistrationRepository.findByRegistrationId(eq(this.clientRegistration.getRegistrationId()))) .willReturn(Mono.just(this.clientRegistration)); OAuth2AuthorizeRequest authorizeRequest = OAuth2AuthorizeRequest .withClientRegistrationId(this.clientRegistration.getRegistrationId()) .principal(this.principal) .build(); ClientAuthorizationException exception = new ClientAuthorizationException( new OAuth2Error(OAuth2ErrorCodes.SERVER_ERROR, null, null), this.clientRegistration.getRegistrationId()); given(this.authorizedClientProvider.authorize(any(OAuth2AuthorizationContext.class))) .willReturn(Mono.error(exception)); assertThatExceptionOfType(ClientAuthorizationException.class) .isThrownBy( () -> this.authorizedClientManager.authorize(authorizeRequest).contextWrite(this.context).block()) .isEqualTo(exception); verify(this.authorizedClientProvider).authorize(this.authorizationContextCaptor.capture()); verify(this.contextAttributesMapper).apply(eq(authorizeRequest)); OAuth2AuthorizationContext authorizationContext = this.authorizationContextCaptor.getValue(); assertThat(authorizationContext.getClientRegistration()).isEqualTo(this.clientRegistration); assertThat(authorizationContext.getAuthorizedClient()).isNull(); assertThat(authorizationContext.getPrincipal()).isEqualTo(this.principal); verify(this.authorizedClientRepository, never()).removeAuthorizedClient(any(), any(), any()); verify(this.authorizedClientRepository, never()).saveAuthorizedClient(any(), any(), any()); } @SuppressWarnings("unchecked") @Test public void authorizeWhenOAuth2AuthorizationExceptionThenDoNotRemoveAuthorizedClient() { given(this.clientRegistrationRepository.findByRegistrationId(eq(this.clientRegistration.getRegistrationId()))) .willReturn(Mono.just(this.clientRegistration)); OAuth2AuthorizeRequest authorizeRequest = OAuth2AuthorizeRequest .withClientRegistrationId(this.clientRegistration.getRegistrationId()) .principal(this.principal) .build(); OAuth2AuthorizationException exception = new OAuth2AuthorizationException( new OAuth2Error(OAuth2ErrorCodes.INVALID_GRANT, null, null)); given(this.authorizedClientProvider.authorize(any(OAuth2AuthorizationContext.class))) .willReturn(Mono.error(exception)); assertThatExceptionOfType(OAuth2AuthorizationException.class) .isThrownBy( () -> this.authorizedClientManager.authorize(authorizeRequest).contextWrite(this.context).block()) .isEqualTo(exception); verify(this.authorizedClientProvider).authorize(this.authorizationContextCaptor.capture()); verify(this.contextAttributesMapper).apply(eq(authorizeRequest)); OAuth2AuthorizationContext authorizationContext = this.authorizationContextCaptor.getValue(); assertThat(authorizationContext.getClientRegistration()).isEqualTo(this.clientRegistration); assertThat(authorizationContext.getAuthorizedClient()).isNull(); assertThat(authorizationContext.getPrincipal()).isEqualTo(this.principal); verify(this.authorizedClientRepository, never()).removeAuthorizedClient(any(), any(), any()); verify(this.authorizedClientRepository, never()).saveAuthorizedClient(any(), any(), any()); } @SuppressWarnings("unchecked") @Test public void authorizeWhenOAuth2AuthorizationExceptionAndCustomFailureHandlerThenInvokeCustomFailureHandler() { given(this.clientRegistrationRepository.findByRegistrationId(eq(this.clientRegistration.getRegistrationId()))) .willReturn(Mono.just(this.clientRegistration)); OAuth2AuthorizeRequest authorizeRequest = OAuth2AuthorizeRequest .withClientRegistrationId(this.clientRegistration.getRegistrationId()) .principal(this.principal) .build(); OAuth2AuthorizationException exception = new OAuth2AuthorizationException( new OAuth2Error(OAuth2ErrorCodes.INVALID_GRANT, null, null)); given(this.authorizedClientProvider.authorize(any(OAuth2AuthorizationContext.class))) .willReturn(Mono.error(exception)); PublisherProbe<Void> authorizationFailureHandlerProbe = PublisherProbe.empty(); this.authorizedClientManager .setAuthorizationFailureHandler((client, principal, attributes) -> authorizationFailureHandlerProbe.mono()); assertThatExceptionOfType(OAuth2AuthorizationException.class) .isThrownBy( () -> this.authorizedClientManager.authorize(authorizeRequest).contextWrite(this.context).block()) .isEqualTo(exception); verify(this.authorizedClientProvider).authorize(this.authorizationContextCaptor.capture()); verify(this.contextAttributesMapper).apply(eq(authorizeRequest)); OAuth2AuthorizationContext authorizationContext = this.authorizationContextCaptor.getValue(); assertThat(authorizationContext.getClientRegistration()).isEqualTo(this.clientRegistration); assertThat(authorizationContext.getAuthorizedClient()).isNull(); assertThat(authorizationContext.getPrincipal()).isEqualTo(this.principal); authorizationFailureHandlerProbe.assertWasSubscribed(); verify(this.authorizedClientRepository, never()).removeAuthorizedClient(any(), any(), any()); verify(this.authorizedClientRepository, never()).saveAuthorizedClient(any(), any(), any()); } @SuppressWarnings("unchecked") @Test public void authorizeWhenAuthorizedAndSupportedProviderThenReauthorized() { given(this.clientRegistrationRepository.findByRegistrationId(eq(this.clientRegistration.getRegistrationId()))) .willReturn(Mono.just(this.clientRegistration)); this.loadAuthorizedClientProbe = PublisherProbe.of(Mono.just(this.authorizedClient)); given(this.authorizedClientRepository.loadAuthorizedClient(eq(this.clientRegistration.getRegistrationId()), eq(this.principal), eq(this.serverWebExchange))) .willReturn(this.loadAuthorizedClientProbe.mono()); OAuth2AuthorizedClient reauthorizedClient = new OAuth2AuthorizedClient(this.clientRegistration, this.principal.getName(), TestOAuth2AccessTokens.noScopes(), TestOAuth2RefreshTokens.refreshToken()); given(this.authorizedClientProvider.authorize(any(OAuth2AuthorizationContext.class))) .willReturn(Mono.just(reauthorizedClient)); OAuth2AuthorizeRequest authorizeRequest = OAuth2AuthorizeRequest .withClientRegistrationId(this.clientRegistration.getRegistrationId()) .principal(this.principal) .build(); OAuth2AuthorizedClient authorizedClient = this.authorizedClientManager.authorize(authorizeRequest) .contextWrite(this.context) .block(); verify(this.authorizedClientProvider).authorize(this.authorizationContextCaptor.capture()); verify(this.contextAttributesMapper).apply(any()); OAuth2AuthorizationContext authorizationContext = this.authorizationContextCaptor.getValue(); assertThat(authorizationContext.getClientRegistration()).isEqualTo(this.clientRegistration); assertThat(authorizationContext.getAuthorizedClient()).isSameAs(this.authorizedClient); assertThat(authorizationContext.getPrincipal()).isEqualTo(this.principal); assertThat(authorizedClient).isSameAs(reauthorizedClient); verify(this.authorizedClientRepository).saveAuthorizedClient(eq(reauthorizedClient), eq(this.principal), eq(this.serverWebExchange)); this.saveAuthorizedClientProbe.assertWasSubscribed(); verify(this.authorizedClientRepository, never()).removeAuthorizedClient(any(), any(), any()); } @SuppressWarnings("unchecked") @Test public void reauthorizeWhenUnsupportedProviderThenNotReauthorized() { OAuth2AuthorizeRequest reauthorizeRequest = OAuth2AuthorizeRequest.withAuthorizedClient(this.authorizedClient) .principal(this.principal) .build(); OAuth2AuthorizedClient authorizedClient = this.authorizedClientManager.authorize(reauthorizeRequest) .contextWrite(this.context) .block(); verify(this.authorizedClientProvider).authorize(this.authorizationContextCaptor.capture()); verify(this.contextAttributesMapper).apply(eq(reauthorizeRequest)); OAuth2AuthorizationContext authorizationContext = this.authorizationContextCaptor.getValue(); assertThat(authorizationContext.getClientRegistration()).isEqualTo(this.clientRegistration); assertThat(authorizationContext.getAuthorizedClient()).isSameAs(this.authorizedClient); assertThat(authorizationContext.getPrincipal()).isEqualTo(this.principal); assertThat(authorizedClient).isSameAs(this.authorizedClient); this.saveAuthorizedClientProbe.assertWasNotSubscribed(); } @SuppressWarnings("unchecked") @Test public void reauthorizeWhenSupportedProviderThenReauthorized() { OAuth2AuthorizedClient reauthorizedClient = new OAuth2AuthorizedClient(this.clientRegistration, this.principal.getName(), TestOAuth2AccessTokens.noScopes(), TestOAuth2RefreshTokens.refreshToken()); given(this.authorizedClientProvider.authorize(any(OAuth2AuthorizationContext.class))) .willReturn(Mono.just(reauthorizedClient)); OAuth2AuthorizeRequest reauthorizeRequest = OAuth2AuthorizeRequest.withAuthorizedClient(this.authorizedClient) .principal(this.principal) .build(); OAuth2AuthorizedClient authorizedClient = this.authorizedClientManager.authorize(reauthorizeRequest) .contextWrite(this.context) .block(); verify(this.authorizedClientProvider).authorize(this.authorizationContextCaptor.capture()); verify(this.contextAttributesMapper).apply(eq(reauthorizeRequest)); OAuth2AuthorizationContext authorizationContext = this.authorizationContextCaptor.getValue(); assertThat(authorizationContext.getClientRegistration()).isEqualTo(this.clientRegistration); assertThat(authorizationContext.getAuthorizedClient()).isSameAs(this.authorizedClient); assertThat(authorizationContext.getPrincipal()).isEqualTo(this.principal); assertThat(authorizedClient).isSameAs(reauthorizedClient); verify(this.authorizedClientRepository).saveAuthorizedClient(eq(reauthorizedClient), eq(this.principal), eq(this.serverWebExchange)); this.saveAuthorizedClientProbe.assertWasSubscribed(); verify(this.authorizedClientRepository, never()).removeAuthorizedClient(any(), any(), any()); } @Test public void reauthorizeWhenRequestParameterScopeThenMappedToContext() { OAuth2AuthorizedClient reauthorizedClient = new OAuth2AuthorizedClient(this.clientRegistration, this.principal.getName(), TestOAuth2AccessTokens.noScopes(), TestOAuth2RefreshTokens.refreshToken()); given(this.authorizedClientProvider.authorize(any(OAuth2AuthorizationContext.class))) .willReturn(Mono.just(reauthorizedClient)); // Override the mock with the default this.authorizedClientManager.setContextAttributesMapper( new DefaultReactiveOAuth2AuthorizedClientManager.DefaultContextAttributesMapper()); this.serverWebExchange = MockServerWebExchange .builder(MockServerHttpRequest.get("/").queryParam(OAuth2ParameterNames.SCOPE, "read write")) .build(); this.context = Context.of(ServerWebExchange.class, this.serverWebExchange); OAuth2AuthorizeRequest reauthorizeRequest = OAuth2AuthorizeRequest.withAuthorizedClient(this.authorizedClient) .principal(this.principal) .build(); this.authorizedClientManager.authorize(reauthorizeRequest).contextWrite(this.context).block(); verify(this.authorizedClientProvider).authorize(this.authorizationContextCaptor.capture()); OAuth2AuthorizationContext authorizationContext = this.authorizationContextCaptor.getValue(); String[] requestScopeAttribute = authorizationContext .getAttribute(OAuth2AuthorizationContext.REQUEST_SCOPE_ATTRIBUTE_NAME); assertThat(requestScopeAttribute).contains("read", "write"); } @Test public void authorizeWhenBlockingExecutionAndContextPropagationEnabledThenContextPropagated() throws InterruptedException { Hooks.enableAutomaticContextPropagation(); given(this.clientRegistrationRepository.findByRegistrationId(eq(this.clientRegistration.getRegistrationId()))) .willReturn(Mono.just(this.clientRegistration)); given(this.authorizedClientProvider.authorize(any(OAuth2AuthorizationContext.class))) .willReturn(Mono.just(this.authorizedClient)); OAuth2AuthorizeRequest authorizeRequest = OAuth2AuthorizeRequest .withClientRegistrationId(this.clientRegistration.getRegistrationId()) .principal(this.principal) .build(); CountDownLatch countDownLatch = new CountDownLatch(1); Runnable task = () -> { try { OAuth2AuthorizedClient authorizedClient = this.authorizedClientManager.authorize(authorizeRequest) .block(); assertThat(authorizedClient).isSameAs(this.authorizedClient); } finally { countDownLatch.countDown(); } }; try (SimpleAsyncTaskExecutor taskExecutor = new SimpleAsyncTaskExecutor()) { ContextSnapshotFactory contextSnapshotFactory = ContextSnapshotFactory.builder().build(); ExecutorService executorService = ContextExecutorService.wrap(new ExecutorServiceAdapter(taskExecutor), contextSnapshotFactory); Mono.fromRunnable(() -> executorService.execute(task)).contextWrite(this.context).block(); } countDownLatch.await(); verify(this.authorizedClientProvider).authorize(any(OAuth2AuthorizationContext.class)); } private Mono<ServerWebExchange> currentServerWebExchange() { return Mono.deferContextual(Mono::just) .filter((c) -> c.hasKey(ServerWebExchange.class)) .map((c) -> c.get(ServerWebExchange.class)); } }
DefaultReactiveOAuth2AuthorizedClientManagerTests
java
spring-projects__spring-framework
spring-context/src/test/java/org/springframework/context/annotation/configuration/ImportTests.java
{ "start": 11087, "end": 11223 }
class ____ { @Bean ImportedBean importedBean() { return new ImportedBean("imported"); } } @Configuration static
ImportedConfig
java
quarkusio__quarkus
extensions/resteasy-reactive/rest-client/deployment/src/test/java/io/quarkus/rest/client/reactive/proxy/ViaHeaderReturningResource.java
{ "start": 162, "end": 345 }
class ____ { @GET public String getViaHeader(@HeaderParam("X-Via") String viaHeader) { return viaHeader == null ? "noProxy" : viaHeader; } }
ViaHeaderReturningResource
java
hibernate__hibernate-orm
hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/collection/embeddable/Item.java
{ "start": 399, "end": 1388 }
class ____ { private String name; @ManyToOne @Audited(targetAuditMode = RelationTargetAuditMode.NOT_AUDITED) private Type type; Item() { } Item(String name, Type type) { this.name = name; this.type = type; } public String getName() { return name; } public void setName(String name) { this.name = name; } public Type getType() { return type; } public void setType(Type type) { this.type = type; } @Override public int hashCode() { int result = name != null ? name.hashCode() : 0; result = 31 * result + ( type != null ? type.hashCode() : 0 ); return result; } @Override public boolean equals(Object object) { if ( this == object ) { return true; } if ( object == null || getClass() != object.getClass() ) { return false; } Item that = (Item) object; if ( name != null ? !name.equals( that.name ) : that.name != null ) { return false; } return !( type != null ? !type.equals( that.type ) : that.type != null ); } }
Item
java
mapstruct__mapstruct
processor/src/test/java/org/mapstruct/ap/test/references/statics/Category.java
{ "start": 242, "end": 312 }
enum ____ { LIGHT, LAGER, STRONG, BARLEY_WINE }
Category
java
apache__hadoop
hadoop-hdfs-project/hadoop-hdfs-client/src/test/java/org/apache/hadoop/hdfs/protocol/TestHdfsFileStatusMethods.java
{ "start": 2502, "end": 3544 }
class ____ { private final String name; private final Type rval; private final Type[] param; MethodSignature(Method m) { name = m.getName(); rval = m.getGenericReturnType(); param = m.getParameterTypes(); } @Override public int hashCode() { return name.hashCode(); } /** * Methods are equal iff they have the same name, return type, and params * (non-generic). */ @Override public boolean equals(Object o) { if (!(o instanceof MethodSignature)) { return false; } MethodSignature s = (MethodSignature) o; return name.equals(s.name) && rval.equals(s.rval) && Arrays.equals(param, s.param); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append(rval).append(" ").append(name).append("(") .append(Stream.of(param) .map(Type::toString).collect(joining(","))) .append(")"); return sb.toString(); } } }
MethodSignature
java
apache__camel
dsl/camel-yaml-dsl/camel-yaml-dsl-deserializers/src/generated/java/org/apache/camel/dsl/yaml/deserializers/ModelDeserializers.java
{ "start": 523272, "end": 535991 }
class ____ extends YamlDeserializerBase<JsonPathExpression> { public JsonPathExpressionDeserializer() { super(JsonPathExpression.class); } @Override protected JsonPathExpression newInstance() { return new JsonPathExpression(); } @Override protected JsonPathExpression newInstance(String value) { return new JsonPathExpression(value); } @Override protected boolean setProperty(JsonPathExpression target, String propertyKey, String propertyName, Node node) { propertyKey = org.apache.camel.util.StringHelper.dashToCamelCase(propertyKey); switch(propertyKey) { case "allowEasyPredicate": { String val = asText(node); target.setAllowEasyPredicate(val); break; } case "allowSimple": { String val = asText(node); target.setAllowSimple(val); break; } case "expression": { String val = asText(node); target.setExpression(val); break; } case "id": { String val = asText(node); target.setId(val); break; } case "option": { String val = asText(node); target.setOption(val); break; } case "resultType": { String val = asText(node); target.setResultTypeName(val); break; } case "source": { String val = asText(node); target.setSource(val); break; } case "suppressExceptions": { String val = asText(node); target.setSuppressExceptions(val); break; } case "trim": { String val = asText(node); target.setTrim(val); break; } case "unpackArray": { String val = asText(node); target.setUnpackArray(val); break; } case "writeAsString": { String val = asText(node); target.setWriteAsString(val); break; } default: { ExpressionDefinition ed = target.getExpressionType(); if (ed != null) { throw new org.apache.camel.dsl.yaml.common.exception.DuplicateFieldException(node, propertyName, "as an expression"); } ed = ExpressionDeserializers.constructExpressionType(propertyKey, node); if (ed != null) { target.setExpressionType(ed); } else { return false; } } } return true; } } @YamlType( nodes = "jtaTransactionErrorHandler", types = org.apache.camel.model.errorhandler.JtaTransactionErrorHandlerDefinition.class, order = org.apache.camel.dsl.yaml.common.YamlDeserializerResolver.ORDER_LOWEST - 1, displayName = "Jta Transaction Error Handler", description = "JTA based transactional error handler (requires camel-jta).", deprecated = false, properties = { @YamlProperty(name = "executorServiceRef", type = "string", description = "Sets a reference to a thread pool to be used by the error handler", displayName = "Executor Service Ref"), @YamlProperty(name = "id", type = "string", description = "The id of this node", displayName = "Id"), @YamlProperty(name = "level", type = "enum:TRACE,DEBUG,INFO,WARN,ERROR,OFF", defaultValue = "ERROR", description = "Logging level to use by error handler", displayName = "Level"), @YamlProperty(name = "logName", type = "string", description = "Name of the logger to use by the error handler", displayName = "Log Name"), @YamlProperty(name = "loggerRef", type = "string", description = "References to a logger to use as logger for the error handler", displayName = "Logger Ref"), @YamlProperty(name = "onExceptionOccurredRef", type = "string", description = "Sets a reference to a processor that should be processed just after an exception occurred. Can be used to perform custom logging about the occurred exception at the exact time it happened. Important: Any exception thrown from this processor will be ignored.", displayName = "On Exception Occurred Ref"), @YamlProperty(name = "onPrepareFailureRef", type = "string", description = "Sets a reference to a processor to prepare the org.apache.camel.Exchange before handled by the failure processor / dead letter channel. This allows for example to enrich the message before sending to a dead letter queue.", displayName = "On Prepare Failure Ref"), @YamlProperty(name = "onRedeliveryRef", type = "string", description = "Sets a reference to a processor that should be processed before a redelivery attempt. Can be used to change the org.apache.camel.Exchange before its being redelivered.", displayName = "On Redelivery Ref"), @YamlProperty(name = "redeliveryPolicy", type = "object:org.apache.camel.model.RedeliveryPolicyDefinition", description = "Sets the redelivery settings", displayName = "Redelivery Policy"), @YamlProperty(name = "redeliveryPolicyRef", type = "string", description = "Sets a reference to a RedeliveryPolicy to be used for redelivery settings.", displayName = "Redelivery Policy Ref"), @YamlProperty(name = "retryWhileRef", type = "string", description = "Sets a retry while predicate. Will continue retrying until the predicate evaluates to false.", displayName = "Retry While Ref"), @YamlProperty(name = "rollbackLoggingLevel", type = "enum:TRACE,DEBUG,INFO,WARN,ERROR,OFF", defaultValue = "WARN", description = "Sets the logging level to use for logging transactional rollback. This option is default WARN.", displayName = "Rollback Logging Level"), @YamlProperty(name = "transactedPolicyRef", type = "string", description = "The transacted policy to use that is configured for either Spring or JTA based transactions. If no policy has been configured then Camel will attempt to auto-discover.", displayName = "Transacted Policy Ref"), @YamlProperty(name = "useOriginalBody", type = "boolean", defaultValue = "false", description = "Will use the original input org.apache.camel.Message body (original body only) when an org.apache.camel.Exchange is moved to the dead letter queue. Notice: this only applies when all redeliveries attempt have failed and the org.apache.camel.Exchange is doomed for failure. Instead of using the current inprogress org.apache.camel.Exchange IN message we use the original IN message instead. This allows you to store the original input in the dead letter queue instead of the inprogress snapshot of the IN message. For instance if you route transform the IN body during routing and then failed. With the original exchange store in the dead letter queue it might be easier to manually re submit the org.apache.camel.Exchange again as the IN message is the same as when Camel received it. So you should be able to send the org.apache.camel.Exchange to the same input. The difference between useOriginalMessage and useOriginalBody is that the former includes both the original body and headers, where as the latter only includes the original body. You can use the latter to enrich the message with custom headers and include the original message body. The former wont let you do this, as its using the original message body and headers as they are. You cannot enable both useOriginalMessage and useOriginalBody. The original input message is defensively copied, and the copied message body is converted to org.apache.camel.StreamCache if possible (stream caching is enabled, can be disabled globally or on the original route), to ensure the body can be read when the original message is being used later. If the body is converted to org.apache.camel.StreamCache then the message body on the current org.apache.camel.Exchange is replaced with the org.apache.camel.StreamCache body. If the body is not converted to org.apache.camel.StreamCache then the body will not be able to re-read when accessed later. Important: The original input means the input message that are bounded by the current org.apache.camel.spi.UnitOfWork . An unit of work typically spans one route, or multiple routes if they are connected using internal endpoints such as direct or seda. When messages is passed via external endpoints such as JMS or HTTP then the consumer will create a new unit of work, with the message it received as input as the original input. Also some EIP patterns such as splitter, multicast, will create a new unit of work boundary for the messages in their sub-route (eg the splitted message); however these EIPs have an option named shareUnitOfWork which allows to combine with the parent unit of work in regard to error handling and therefore use the parent original message. By default this feature is off.", displayName = "Use Original Body"), @YamlProperty(name = "useOriginalMessage", type = "boolean", defaultValue = "false", description = "Will use the original input org.apache.camel.Message (original body and headers) when an org.apache.camel.Exchange is moved to the dead letter queue. Notice: this only applies when all redeliveries attempt have failed and the org.apache.camel.Exchange is doomed for failure. Instead of using the current inprogress org.apache.camel.Exchange IN message we use the original IN message instead. This allows you to store the original input in the dead letter queue instead of the inprogress snapshot of the IN message. For instance if you route transform the IN body during routing and then failed. With the original exchange store in the dead letter queue it might be easier to manually re submit the org.apache.camel.Exchange again as the IN message is the same as when Camel received it. So you should be able to send the org.apache.camel.Exchange to the same input. The difference between useOriginalMessage and useOriginalBody is that the former includes both the original body and headers, where as the latter only includes the original body. You can use the latter to enrich the message with custom headers and include the original message body. The former wont let you do this, as its using the original message body and headers as they are. You cannot enable both useOriginalMessage and useOriginalBody. The original input message is defensively copied, and the copied message body is converted to org.apache.camel.StreamCache if possible (stream caching is enabled, can be disabled globally or on the original route), to ensure the body can be read when the original message is being used later. If the body is converted to org.apache.camel.StreamCache then the message body on the current org.apache.camel.Exchange is replaced with the org.apache.camel.StreamCache body. If the body is not converted to org.apache.camel.StreamCache then the body will not be able to re-read when accessed later. Important: The original input means the input message that are bounded by the current org.apache.camel.spi.UnitOfWork . An unit of work typically spans one route, or multiple routes if they are connected using internal endpoints such as direct or seda. When messages is passed via external endpoints such as JMS or HTTP then the consumer will create a new unit of work, with the message it received as input as the original input. Also some EIP patterns such as splitter, multicast, will create a new unit of work boundary for the messages in their sub-route (eg the splitted message); however these EIPs have an option named shareUnitOfWork which allows to combine with the parent unit of work in regard to error handling and therefore use the parent original message. By default this feature is off.", displayName = "Use Original Message") } ) public static
JsonPathExpressionDeserializer
java
quarkusio__quarkus
extensions/vertx-http/deployment/src/test/java/io/quarkus/vertx/http/mtls/MtlsRequestWithTlsRegistryTest.java
{ "start": 903, "end": 2679 }
class ____ { private static final String configuration = """ # Server needs both the key store and the trust store quarkus.tls.key-store.jks.path=target/certs/mtls-test-keystore.jks quarkus.tls.key-store.jks.password=secret quarkus.tls.trust-store.jks.path=target/certs/mtls-test-server-truststore.jks quarkus.tls.trust-store.jks.password=secret quarkus.http.ssl.client-auth=REQUEST quarkus.http.auth.permission.all.paths=/* quarkus.http.auth.permission.all.policy=authenticated """; @TestHTTPResource(value = "/mtls", tls = true) URL url; @RegisterExtension static final QuarkusUnitTest config = new QuarkusUnitTest() .withApplicationRoot((jar) -> jar .addClasses(MyBean.class) .addAsResource(new StringAsset(configuration), "application.properties") .addAsResource(new File("target/certs/mtls-test-keystore.jks"), "server-keystore.jks") .addAsResource(new File("target/certs/mtls-test-server-truststore.jks"), "server-truststore.jks")); @Test public void testClientAuthentication() { RestAssured.given() .keyStore("target/certs/mtls-test-client-keystore.jks", "secret") .trustStore("target/certs/mtls-test-client-truststore.jks", "secret") .get(url).then().statusCode(200).body(is("CN=localhost")); } @Test public void testNoClientCert() { RestAssured.given() .trustStore("target/certs/mtls-test-client-truststore.jks", "secret") .get(url).then().statusCode(401); } @ApplicationScoped static
MtlsRequestWithTlsRegistryTest
java
elastic__elasticsearch
modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/WhitespaceTokenizerFactory.java
{ "start": 939, "end": 1515 }
class ____ extends AbstractTokenizerFactory { static final String MAX_TOKEN_LENGTH = "max_token_length"; private Integer maxTokenLength; WhitespaceTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { super(name); maxTokenLength = settings.getAsInt(MAX_TOKEN_LENGTH, StandardAnalyzer.DEFAULT_MAX_TOKEN_LENGTH); } @Override public Tokenizer create() { return new WhitespaceTokenizer(TokenStream.DEFAULT_TOKEN_ATTRIBUTE_FACTORY, maxTokenLength); } }
WhitespaceTokenizerFactory
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/inheritance/EmbeddableInheritanceHierarchyOrderTest.java
{ "start": 3717, "end": 3927 }
class ____ extends Mammal { //private int mouse; // [...] public Cat() { super(); } public Cat(int age, String name, String mother) { super( age, name, mother ); } } @Embeddable static
Cat
java
quarkusio__quarkus
independent-projects/bootstrap/app-model/src/main/java/io/quarkus/paths/SharedArchivePathTree.java
{ "start": 2354, "end": 4682 }
class ____ extends OpenArchivePathTree { private final AtomicInteger users = new AtomicInteger(1); protected SharedOpenArchivePathTree(FileSystem fs) { super(fs); openCount.incrementAndGet(); } /** * Returns a new handle for this open archive tree to the caller * as long as this open archive tree is still open and is still * the last one that was open for this archive. Otherwise, the method * will return null. * * @return a new instance of {@link CallerOpenPathTree} or null, * if the current open archive tree has been closed or another open * archive tree has been created for this archive */ private CallerOpenPathTree acquire() { readLock().lock(); try { final boolean result = lastOpen == this && isOpen(); if (result) { users.incrementAndGet(); return new CallerOpenPathTree(this); } } finally { readLock().unlock(); } return null; } @Override public OpenPathTree open() { return SharedArchivePathTree.this.open(); } @Override public void close() throws IOException { writeLock().lock(); final boolean close = users.decrementAndGet() == 0; try { if (close) { if (lastOpen == this) { lastOpen = null; } if (openCount.decrementAndGet() == 0) { removeFromCache(archive); } super.close(); } } finally { writeLock().unlock(); } } @Override public String toString() { return SharedArchivePathTree.this.toString(); } } /** * This is a caller "view" of an underlying {@link OpenPathTree} instance that * delegates only the first {@link #close()} call by the caller to the underlying {@link OpenPathTree} instance * with subsequent {@link #close()} calls ignored. */ private static
SharedOpenArchivePathTree
java
spring-projects__spring-framework
spring-context/src/test/java/org/springframework/context/annotation/EnableAspectJAutoProxyTests.java
{ "start": 4100, "end": 4248 }
class ____ { } @Import({ServiceInvocationCounter.class, StubFooDao.class}) @EnableAspectJAutoProxy(exposeProxy = true) static
ConfigWithCglibProxy
java
apache__avro
lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroKeyRecordReader.java
{ "start": 1212, "end": 2309 }
class ____<T> extends AvroRecordReaderBase<AvroKey<T>, NullWritable, T> { private static final Logger LOG = LoggerFactory.getLogger(AvroKeyRecordReader.class); /** A reusable object to hold records of the Avro container file. */ private final AvroKey<T> mCurrentRecord; /** * Constructor. * * @param readerSchema The reader schema to use for the records in the Avro * container file. */ public AvroKeyRecordReader(Schema readerSchema) { super(readerSchema); mCurrentRecord = new AvroKey<>(null); } /** {@inheritDoc} */ @Override public boolean nextKeyValue() throws IOException, InterruptedException { boolean hasNext = super.nextKeyValue(); mCurrentRecord.datum(getCurrentRecord()); return hasNext; } /** {@inheritDoc} */ @Override public AvroKey<T> getCurrentKey() throws IOException, InterruptedException { return mCurrentRecord; } /** {@inheritDoc} */ @Override public NullWritable getCurrentValue() throws IOException, InterruptedException { return NullWritable.get(); } }
AvroKeyRecordReader
java
alibaba__druid
core/src/main/java/com/alibaba/druid/sql/dialect/mysql/ast/statement/MySqlOptimizeStatement.java
{ "start": 867, "end": 1900 }
class ____ extends MySqlStatementImpl { private boolean noWriteToBinlog; private boolean local; protected final List<SQLExprTableSource> tableSources = new ArrayList<SQLExprTableSource>(); public void accept0(MySqlASTVisitor visitor) { if (visitor.visit(this)) { acceptChild(visitor, tableSources); } visitor.endVisit(this); } public boolean isNoWriteToBinlog() { return noWriteToBinlog; } public void setNoWriteToBinlog(boolean noWriteToBinlog) { this.noWriteToBinlog = noWriteToBinlog; } public boolean isLocal() { return local; } public void setLocal(boolean local) { this.local = local; } public List<SQLExprTableSource> getTableSources() { return tableSources; } public void addTableSource(SQLExprTableSource tableSource) { if (tableSource != null) { tableSource.setParent(this); } this.tableSources.add(tableSource); } }
MySqlOptimizeStatement
java
square__retrofit
retrofit/java-test/src/test/java/retrofit2/RequestFactoryTest.java
{ "start": 50619, "end": 51095 }
class ____ { @GET Call<ResponseBody> method(@Url String url) { return null; } } Request request = buildRequest(Example.class, "http://example.com/foo/bar/"); assertThat(request.method()).isEqualTo("GET"); assertThat(request.headers().size()).isEqualTo(0); assertThat(request.url().toString()).isEqualTo("http://example.com/foo/bar/"); assertThat(request.body()).isNull(); } @Test public void getWithHttpUrl() {
Example
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/sql/exec/internal/JdbcParameterBindingImpl.java
{ "start": 401, "end": 1353 }
class ____ implements JdbcParameterBinding { private final JdbcMapping jdbcMapping; private final Object bindValue; public JdbcParameterBindingImpl(JdbcMapping jdbcMapping, Object bindValue) { assert bindValue == null || jdbcMapping == null || jdbcMapping.getJdbcJavaType().isInstance( bindValue ) || jdbcMapping.getJdbcJavaType() instanceof BasicPluralJavaType<?> pluralJavaType && bindValue instanceof Object[] objects && Arrays.stream( objects ).allMatch( pluralJavaType.getElementJavaType()::isInstance ) : String.format( Locale.ROOT, "Unexpected value type (expected : %s) : %s (%s)", jdbcMapping.getJdbcJavaType().getJavaTypeClass().getName(), bindValue, bindValue.getClass().getName() ); this.jdbcMapping = jdbcMapping; this.bindValue = bindValue; } @Override public JdbcMapping getBindType() { return jdbcMapping; } @Override public Object getBindValue() { return bindValue; } }
JdbcParameterBindingImpl
java
apache__kafka
metadata/src/main/java/org/apache/kafka/metadata/placement/PartitionAssignment.java
{ "start": 1171, "end": 1239 }
class ____ immutable. It's internal state does not change. */ public
is
java
spring-projects__spring-boot
core/spring-boot/src/test/java/org/springframework/boot/context/properties/bind/BindConverterTests.java
{ "start": 10611, "end": 11379 }
class ____ implements ConversionService { @Override public boolean canConvert(@Nullable Class<?> sourceType, Class<?> targetType) { throw new AssertionError("Should not call conversion service"); } @Override public boolean canConvert(@Nullable TypeDescriptor sourceType, TypeDescriptor targetType) { throw new AssertionError("Should not call conversion service"); } @Override public <T> T convert(@Nullable Object source, Class<T> targetType) { throw new AssertionError("Should not call conversion service"); } @Override public Object convert(@Nullable Object source, @Nullable TypeDescriptor sourceType, TypeDescriptor targetType) { throw new AssertionError("Should not call conversion service"); } } }
ThrowingConversionService
java
assertj__assertj-core
assertj-core/src/test/java/org/assertj/core/internal/maps/Maps_assertHasValueSatisfying_Test.java
{ "start": 1445, "end": 2744 }
class ____ extends MapsBaseTest { private final Condition<String> isGreen = new Condition<String>("green color condition") { @Override public boolean matches(String value) { return "green".equals(value); } }; private final Condition<Object> isBlack = new Condition<Object>("black color condition") { @Override public boolean matches(Object value) { return "black".equals(value); } }; @Test void should_fail_if_condition_is_null() { assertThatNullPointerException().isThrownBy(() -> maps.assertHasValueSatisfying(INFO, actual, null)) .withMessage("The condition to evaluate should not be null"); } @Test void should_fail_if_actual_is_null() { assertThatAssertionErrorIsThrownBy(() -> maps.assertHasValueSatisfying(INFO, null, isGreen)).withMessage(actualIsNull()); } @Test void should_fail_if_actual_does_not_contain_value_matching_condition() { expectAssertionError(() -> maps.assertHasValueSatisfying(INFO, actual, isBlack)); verify(failures).failure(INFO, shouldContainValue(actual, isBlack)); } @Test void should_pass_if_actual_contains_a_value_matching_the_given_condition() { maps.assertHasValueSatisfying(INFO, actual, isGreen); } }
Maps_assertHasValueSatisfying_Test
java
apache__maven
impl/maven-impl/src/test/java/org/apache/maven/impl/model/DefaultModelInterpolatorTest.java
{ "start": 2505, "end": 25039 }
class ____ { Map<String, String> context; ModelInterpolator interpolator; Session session; AtomicReference<Path> rootDirectory; // used in TestRootLocator below @BeforeEach public void setUp() { context = new HashMap<>(); context.put("basedir", "myBasedir"); context.put("anotherdir", "anotherBasedir"); context.put("project.baseUri", "myBaseUri"); session = ApiRunner.createSession(injector -> { injector.bindInstance(DefaultModelInterpolatorTest.class, this); }); interpolator = session.getService(Lookup.class).lookup(DefaultModelInterpolator.class); } protected void assertProblemFree(SimpleProblemCollector collector) { assertEquals(0, collector.getErrors().size(), "Expected no errors"); assertEquals(0, collector.getWarnings().size(), "Expected no warnings"); assertEquals(0, collector.getFatals().size(), "Expected no fatals"); } @SuppressWarnings("SameParameterValue") protected void assertCollectorState( int numFatals, int numErrors, int numWarnings, SimpleProblemCollector collector) { assertEquals(numErrors, collector.getErrors().size(), "Errors"); assertEquals(numWarnings, collector.getWarnings().size(), "Warnings"); assertEquals(numFatals, collector.getFatals().size(), "Fatals"); } private ModelBuilderRequest.ModelBuilderRequestBuilder createModelBuildingRequest(Map<String, String> p) { ModelBuilderRequest.ModelBuilderRequestBuilder config = ModelBuilderRequest.builder() .session(session) .requestType(ModelBuilderRequest.RequestType.BUILD_PROJECT); if (p != null) { config.systemProperties(p); } return config; } @Test public void testDefaultBuildTimestampFormatShouldFormatTimeIn24HourFormat() { Calendar cal = Calendar.getInstance(); cal.setTimeZone(TimeZone.getTimeZone("Etc/UTC")); cal.set(Calendar.HOUR, 12); cal.set(Calendar.AM_PM, Calendar.AM); // just to make sure all the bases are covered... cal.set(Calendar.HOUR_OF_DAY, 0); cal.set(Calendar.MINUTE, 16); cal.set(Calendar.SECOND, 0); cal.set(Calendar.YEAR, 1976); cal.set(Calendar.MONTH, Calendar.NOVEMBER); cal.set(Calendar.DATE, 11); Instant firstTestDate = Instant.ofEpochMilli(cal.getTime().getTime()); cal.set(Calendar.HOUR, 11); cal.set(Calendar.AM_PM, Calendar.PM); // just to make sure all the bases are covered... cal.set(Calendar.HOUR_OF_DAY, 23); Instant secondTestDate = Instant.ofEpochMilli(cal.getTime().getTime()); DateTimeFormatter format = DateTimeFormatter.ofPattern(MavenBuildTimestamp.DEFAULT_BUILD_TIMESTAMP_FORMAT) .withZone(ZoneId.of("UTC")); assertEquals("1976-11-11T00:16:00Z", format.format(firstTestDate)); assertEquals("1976-11-11T23:16:00Z", format.format(secondTestDate)); } @Test public void testDefaultBuildTimestampFormatWithLocalTimeZoneMidnightRollover() { Calendar cal = Calendar.getInstance(); cal.setTimeZone(TimeZone.getTimeZone("Europe/Berlin")); cal.set(Calendar.HOUR_OF_DAY, 1); cal.set(Calendar.MINUTE, 16); cal.set(Calendar.SECOND, 0); cal.set(Calendar.YEAR, 2014); cal.set(Calendar.MONTH, Calendar.JUNE); cal.set(Calendar.DATE, 16); Instant firstTestDate = Instant.ofEpochMilli(cal.getTime().getTime()); cal.set(Calendar.MONTH, Calendar.NOVEMBER); Instant secondTestDate = Instant.ofEpochMilli(cal.getTime().getTime()); DateTimeFormatter format = DateTimeFormatter.ofPattern(MavenBuildTimestamp.DEFAULT_BUILD_TIMESTAMP_FORMAT) .withZone(ZoneId.of("UTC")); assertEquals("2014-06-15T23:16:00Z", format.format(firstTestDate)); assertEquals("2014-11-16T00:16:00Z", format.format(secondTestDate)); } @Test public void testShouldNotThrowExceptionOnReferenceToNonExistentValue() throws Exception { Scm scm = Scm.newBuilder().connection("${test}/somepath").build(); Model model = Model.newBuilder().scm(scm).build(); final SimpleProblemCollector collector = new SimpleProblemCollector(); Model out = interpolator.interpolateModel( model, Paths.get("."), createModelBuildingRequest(context).build(), collector); assertProblemFree(collector); assertEquals("${test}/somepath", out.getScm().getConnection()); } @Test public void testShouldThrowExceptionOnRecursiveScmConnectionReference() throws Exception { Scm scm = Scm.newBuilder() .connection("${project.scm.connection}/somepath") .build(); Model model = Model.newBuilder().scm(scm).build(); final SimpleProblemCollector collector = new SimpleProblemCollector(); interpolator.interpolateModel( model, null, createModelBuildingRequest(context).build(), collector); assertCollectorState(0, 1, 0, collector); } @Test public void testShouldNotThrowExceptionOnReferenceToValueContainingNakedExpression() throws Exception { Scm scm = Scm.newBuilder().connection("${test}/somepath").build(); Map<String, String> props = new HashMap<>(); props.put("test", "test"); Model model = Model.newBuilder().scm(scm).properties(props).build(); final SimpleProblemCollector collector = new SimpleProblemCollector(); Model out = interpolator.interpolateModel( model, Paths.get("."), createModelBuildingRequest(context).build(), collector); assertProblemFree(collector); assertEquals("test/somepath", out.getScm().getConnection()); } @Test void shouldInterpolateOrganizationNameCorrectly() throws Exception { String orgName = "MyCo"; Model model = Model.newBuilder() .name("${project.organization.name} Tools") .organization(Organization.newBuilder().name(orgName).build()) .build(); Model out = interpolator.interpolateModel( model, Paths.get("."), createModelBuildingRequest(context).build(), new SimpleProblemCollector()); assertEquals(orgName + " Tools", out.getName()); } @Test public void shouldInterpolateDependencyVersionToSetSameAsProjectVersion() throws Exception { Model model = Model.newBuilder() .version("3.8.1") .dependencies(Collections.singletonList( Dependency.newBuilder().version("${project.version}").build())) .build(); final SimpleProblemCollector collector = new SimpleProblemCollector(); Model out = interpolator.interpolateModel( model, Paths.get("."), createModelBuildingRequest(context).build(), collector); assertCollectorState(0, 0, 0, collector); assertEquals("3.8.1", (out.getDependencies().get(0)).getVersion()); } @Test public void testShouldNotInterpolateDependencyVersionWithInvalidReference() throws Exception { Model model = Model.newBuilder() .version("3.8.1") .dependencies(Collections.singletonList( Dependency.newBuilder().version("${something}").build())) .build(); final SimpleProblemCollector collector = new SimpleProblemCollector(); Model out = interpolator.interpolateModel( model, Paths.get("."), createModelBuildingRequest(context).build(), collector); assertProblemFree(collector); assertEquals("${something}", (out.getDependencies().get(0)).getVersion()); } @Test public void testTwoReferences() throws Exception { Model model = Model.newBuilder() .version("3.8.1") .artifactId("foo") .dependencies(Collections.singletonList(Dependency.newBuilder() .version("${project.artifactId}-${project.version}") .build())) .build(); final SimpleProblemCollector collector = new SimpleProblemCollector(); Model out = interpolator.interpolateModel( model, Paths.get("."), createModelBuildingRequest(context).build(), collector); assertCollectorState(0, 0, 0, collector); assertEquals("foo-3.8.1", (out.getDependencies().get(0)).getVersion()); } @Test public void testProperty() throws Exception { Model model = Model.newBuilder() .version("3.8.1") .artifactId("foo") .repositories(Collections.singletonList(Repository.newBuilder() .url("file://localhost/${anotherdir}/temp-repo") .build())) .build(); final SimpleProblemCollector collector = new SimpleProblemCollector(); Model out = interpolator.interpolateModel( model, Paths.get("projectBasedir"), createModelBuildingRequest(context).build(), collector); assertProblemFree(collector); assertEquals( "file://localhost/anotherBasedir/temp-repo", (out.getRepositories().get(0)).getUrl()); } @Test public void testBasedirUnx() throws Exception { FileSystem fs = Jimfs.newFileSystem(Configuration.unix()); Path projectBasedir = fs.getPath("projectBasedir"); Model model = Model.newBuilder() .version("3.8.1") .artifactId("foo") .repositories(Collections.singletonList( Repository.newBuilder().url("${basedir}/temp-repo").build())) .build(); final SimpleProblemCollector collector = new SimpleProblemCollector(); Model out = interpolator.interpolateModel( model, projectBasedir, createModelBuildingRequest(context).build(), collector); assertProblemFree(collector); assertEquals( projectBasedir.toAbsolutePath() + "/temp-repo", (out.getRepositories().get(0)).getUrl()); } @Test public void testBasedirWin() throws Exception { FileSystem fs = Jimfs.newFileSystem(Configuration.windows()); Path projectBasedir = fs.getPath("projectBasedir"); Model model = Model.newBuilder() .version("3.8.1") .artifactId("foo") .repositories(Collections.singletonList( Repository.newBuilder().url("${basedir}/temp-repo").build())) .build(); final SimpleProblemCollector collector = new SimpleProblemCollector(); Model out = interpolator.interpolateModel( model, projectBasedir, createModelBuildingRequest(context).build(), collector); assertProblemFree(collector); assertEquals( projectBasedir.toAbsolutePath() + "/temp-repo", (out.getRepositories().get(0)).getUrl()); } @Test public void testBaseUri() throws Exception { Path projectBasedir = Paths.get("projectBasedir"); Model model = Model.newBuilder() .version("3.8.1") .artifactId("foo") .repositories(Collections.singletonList(Repository.newBuilder() .url("${project.baseUri}/temp-repo") .build())) .build(); final SimpleProblemCollector collector = new SimpleProblemCollector(); Model out = interpolator.interpolateModel( model, projectBasedir, createModelBuildingRequest(context).build(), collector); assertProblemFree(collector); assertEquals( projectBasedir.resolve("temp-repo").toUri().toString(), (out.getRepositories().get(0)).getUrl()); } @Test void testRootDirectory() throws Exception { Path rootDirectory = Paths.get("myRootDirectory"); Model model = Model.newBuilder() .version("3.8.1") .artifactId("foo") .repositories(Collections.singletonList(Repository.newBuilder() .url("file:${project.rootDirectory}/temp-repo") .build())) .build(); final SimpleProblemCollector collector = new SimpleProblemCollector(); Model out = interpolator.interpolateModel( model, rootDirectory, createModelBuildingRequest(context).build(), collector); assertProblemFree(collector); assertEquals("file:myRootDirectory/temp-repo", (out.getRepositories().get(0)).getUrl()); } @Test void testRootDirectoryWithUri() throws Exception { Path rootDirectory = Paths.get("myRootDirectory"); Model model = Model.newBuilder() .version("3.8.1") .artifactId("foo") .repositories(Collections.singletonList(Repository.newBuilder() .url("${project.rootDirectory.uri}/temp-repo") .build())) .build(); final SimpleProblemCollector collector = new SimpleProblemCollector(); Model out = interpolator.interpolateModel( model, rootDirectory, createModelBuildingRequest(context).build(), collector); assertProblemFree(collector); assertEquals( rootDirectory.resolve("temp-repo").toUri().toString(), (out.getRepositories().get(0)).getUrl()); } @Test void testRootDirectoryWithNull() throws Exception { Path projectDirectory = Paths.get("myProjectDirectory"); this.rootDirectory = new AtomicReference<>(null); Model model = Model.newBuilder() .version("3.8.1") .artifactId("foo") .repositories(Collections.singletonList(Repository.newBuilder() .url("file:///${project.rootDirectory}/temp-repo") .build())) .build(); final SimpleProblemCollector collector = new SimpleProblemCollector(); IllegalStateException e = assertThrows( IllegalStateException.class, () -> interpolator.interpolateModel( model, projectDirectory, createModelBuildingRequest(context).build(), collector)); assertEquals(RootLocator.UNABLE_TO_FIND_ROOT_PROJECT_MESSAGE, e.getMessage()); } @Test public void testEnvars() throws Exception { context.put("env.HOME", "/path/to/home"); Map<String, String> modelProperties = new HashMap<>(); modelProperties.put("outputDirectory", "${env.HOME}"); Model model = Model.newBuilder().properties(modelProperties).build(); final SimpleProblemCollector collector = new SimpleProblemCollector(); Model out = interpolator.interpolateModel( model, Paths.get("."), createModelBuildingRequest(context).build(), collector); assertProblemFree(collector); assertEquals("/path/to/home", out.getProperties().get("outputDirectory")); } @Test public void envarExpressionThatEvaluatesToNullReturnsTheLiteralString() throws Exception { Map<String, String> modelProperties = new HashMap<>(); modelProperties.put("outputDirectory", "${env.DOES_NOT_EXIST}"); Model model = Model.newBuilder().properties(modelProperties).build(); final SimpleProblemCollector collector = new SimpleProblemCollector(); Model out = interpolator.interpolateModel( model, Paths.get("."), createModelBuildingRequest(context).build(), collector); assertProblemFree(collector); assertEquals("${env.DOES_NOT_EXIST}", out.getProperties().get("outputDirectory")); } @Test public void expressionThatEvaluatesToNullReturnsTheLiteralString() throws Exception { Map<String, String> modelProperties = new HashMap<>(); modelProperties.put("outputDirectory", "${DOES_NOT_EXIST}"); Model model = Model.newBuilder().properties(modelProperties).build(); final SimpleProblemCollector collector = new SimpleProblemCollector(); Model out = interpolator.interpolateModel( model, Paths.get("."), createModelBuildingRequest(context).build(), collector); assertProblemFree(collector); assertEquals("${DOES_NOT_EXIST}", out.getProperties().get("outputDirectory")); } @Test public void shouldInterpolateSourceDirectoryReferencedFromResourceDirectoryCorrectly() throws Exception { Model model = Model.newBuilder() .build(Build.newBuilder() .sourceDirectory("correct") .resources(List.of(Resource.newBuilder() .directory("${project.build.sourceDirectory}") .build())) .build()) .build(); final SimpleProblemCollector collector = new SimpleProblemCollector(); Model out = interpolator.interpolateModel( model, null, createModelBuildingRequest(context).build(), collector); assertCollectorState(0, 0, 0, collector); List<Resource> outResources = out.getBuild().getResources(); Iterator<Resource> resIt = outResources.iterator(); assertEquals(model.getBuild().getSourceDirectory(), resIt.next().getDirectory()); } @Test public void shouldInterpolateUnprefixedBasedirExpression() throws Exception { Path basedir = Paths.get("/test/path"); Model model = Model.newBuilder() .dependencies(Collections.singletonList(Dependency.newBuilder() .systemPath("${basedir}/artifact.jar") .build())) .build(); final SimpleProblemCollector collector = new SimpleProblemCollector(); Model result = interpolator.interpolateModel( model, basedir, createModelBuildingRequest(context).build(), collector); assertProblemFree(collector); List<Dependency> rDeps = result.getDependencies(); assertNotNull(rDeps); assertEquals(1, rDeps.size()); assertEquals( basedir.resolve("artifact.jar").toAbsolutePath(), Paths.get(rDeps.get(0).getSystemPath()).toAbsolutePath()); } @Test public void testRecursiveExpressionCycleNPE() throws Exception { Map<String, String> props = new HashMap<>(); props.put("aa", "${bb}"); props.put("bb", "${aa}"); Model model = Model.newBuilder().properties(props).build(); SimpleProblemCollector collector = new SimpleProblemCollector(); ModelBuilderRequest request = createModelBuildingRequest(Map.of()).build(); interpolator.interpolateModel(model, null, request, collector); assertCollectorState(0, 2, 0, collector); assertTrue(collector.getErrors().get(0).contains("recursive variable reference")); } @Disabled("per def cannot be recursive: ${basedir} is immediately going for project.basedir") @Test public void testRecursiveExpressionCycleBaseDir() throws Exception { Map<String, String> props = new HashMap<>(); props.put("basedir", "${basedir}"); ModelBuilderRequest request = createModelBuildingRequest(Map.of()).build(); Model model = Model.newBuilder().properties(props).build(); SimpleProblemCollector collector = new SimpleProblemCollector(); ModelInterpolator interpolator = this.interpolator; interpolator.interpolateModel(model, null, request, collector); assertCollectorState(0, 1, 0, collector); assertEquals( "recursive variable reference: basedir", collector.getErrors().get(0)); } @Test void shouldIgnorePropertiesWithPomPrefix() throws Exception { final String orgName = "MyCo"; final String uninterpolatedName = "${pom.organization.name} Tools"; Model model = Model.newBuilder() .name(uninterpolatedName) .organization(Organization.newBuilder().name(orgName).build()) .build(); SimpleProblemCollector collector = new SimpleProblemCollector(); Model out = interpolator.interpolateModel( model, null, createModelBuildingRequest(context).build(), // .validationLevel(ModelBuilderRequest.VALIDATION_LEVEL_MAVEN_4_0), collector); assertCollectorState(0, 0, 0, collector); assertEquals(uninterpolatedName, out.getName()); } @Test void testProjectUrlPropertyDoesNotCauseRecursion() throws Exception { // GH-11384: ${project.url} should resolve to the property "project.url" before // trying to resolve via model reflection, which would cause recursion Map<String, String> modelProperties = new HashMap<>(); modelProperties.put("project.url", "https://github.com/slackapi/java-slack-sdk"); Model model = Model.newBuilder() .url("${project.url}") .properties(modelProperties) .build(); SimpleProblemCollector collector = new SimpleProblemCollector(); Model out = interpolator.interpolateModel( model, null, createModelBuildingRequest(context).build(), collector); assertProblemFree(collector); assertEquals("https://github.com/slackapi/java-slack-sdk", out.getUrl()); } @Provides @Priority(10) @SuppressWarnings("unused") RootLocator testRootLocator() { return new RootLocator() { @Override public Path findRoot(Path basedir) { return rootDirectory != null ? rootDirectory.get() : basedir; } @Override public Path findMandatoryRoot(Path basedir) { return Optional.ofNullable(findRoot(basedir)) .orElseThrow(() -> new IllegalStateException(getNoRootMessage())); } }; } }
DefaultModelInterpolatorTest
java
apache__camel
components/camel-jetty/src/test/java/org/apache/camel/component/jetty/MultiPartFormTest.java
{ "start": 1714, "end": 5599 }
class ____ extends BaseJettyTest { private HttpEntity createMultipartRequestEntity() { File file = new File("src/test/resources/log4j2.properties"); return MultipartEntityBuilder.create().addTextBody("comment", "A binary file of some kind") .addBinaryBody(file.getName(), file).build(); } @Test public void testSendMultiPartForm() throws Exception { HttpPost post = new HttpPost("http://localhost:" + getPort() + "/test"); post.setEntity(createMultipartRequestEntity()); try (CloseableHttpClient client = HttpClients.createDefault(); CloseableHttpResponse response = client.execute(post)) { int status = response.getCode(); assertEquals(200, status, "Get a wrong response status"); String result = IOHelper.loadText(response.getEntity().getContent()).trim(); assertEquals("A binary file of some kind", result, "Get a wrong result"); } } @Test public void testSendMultiPartFormFromCamelHttpComponnent() { String result = template.requestBody("http://localhost:" + getPort() + "/test", createMultipartRequestEntity(), String.class); assertEquals("A binary file of some kind", result, "Get a wrong result"); } @Override protected RouteBuilder createRouteBuilder() { return new RouteBuilder() { public void configure() { // START SNIPPET: e1 // Set the jetty temp directory which store the file for multi // part form // camel-jetty will clean up the file after it handled the // request. // The option works rightly from Camel 2.4.0 getContext().getGlobalOptions().put("CamelJettyTempDir", "target"); from("jetty://http://localhost:{{port}}/test").process(new Processor() { public void process(Exchange exchange) throws Exception { AttachmentMessage in = exchange.getIn(AttachmentMessage.class); assertEquals(2, in.getAttachments().size(), "Get a wrong attachement size"); // The file name is attachment id DataHandler data = in.getAttachment("log4j2.properties"); assertNotNull(data, "Should get the DataHandle log4j2.properties"); // This assert is wrong, but the correct content-type // (application/octet-stream) // will not be returned until Jetty makes it available - // currently the content-type // returned is just the default for FileDataHandler (for // the implentation being used) // assertEquals("Get a wrong content type", // "text/plain", data.getContentType()); assertEquals("log4j2.properties", data.getName(), "Got the wrong name"); assertTrue(data.getDataSource().getInputStream().read() != -1, "We should get the data from the DataHandle"); // The other form date can be get from the message // header // For binary attachment, header should also be // populated by DataHandler but not payload Object header = in.getHeader("log4j2.properties"); assertEquals(DataHandler.class, header.getClass()); assertEquals(data, header); exchange.getMessage().setBody(in.getHeader("comment")); } }); // END SNIPPET: e1 } }; } }
MultiPartFormTest
java
elastic__elasticsearch
server/src/test/java/org/elasticsearch/cluster/ClusterChangedEventTests.java
{ "start": 38145, "end": 38718 }
class ____ extends TestClusterCustomMetadata { protected CustomClusterMetadata1(String data) { super(data); } @Override public String getWriteableName() { return "c1"; } @Override public TransportVersion getMinimalSupportedVersion() { return TransportVersion.current(); } @Override public EnumSet<Metadata.XContentContext> context() { return EnumSet.of(Metadata.XContentContext.GATEWAY); } } private static
CustomClusterMetadata1
java
apache__flink
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/stream/table/AsyncCalcITCase.java
{ "start": 2297, "end": 13397 }
class ____ extends StreamingTestBase { private TableEnvironment tEnv; @BeforeEach public void before() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setParallelism(2); tEnv = StreamTableEnvironment.create(env, EnvironmentSettings.inStreamingMode()); tEnv.getConfig() .set(ExecutionConfigOptions.TABLE_EXEC_ASYNC_SCALAR_MAX_CONCURRENT_OPERATIONS, 2); tEnv.getConfig() .set(ExecutionConfigOptions.TABLE_EXEC_ASYNC_SCALAR_TIMEOUT, Duration.ofMinutes(1)); } @Test public void testSimpleTableSelect() { Table t1 = tEnv.fromValues(1, 2, 3).as("f1"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporarySystemFunction("func", new AsyncFunc()); final List<Row> results = executeSql("select func(f1) from t1"); final List<Row> expectedRows = Arrays.asList(Row.of("val 1"), Row.of("val 2"), Row.of("val 3")); assertThat(results).containsSequence(expectedRows); } @Test public void testLiteralPlusTableSelect() { Table t1 = tEnv.fromValues(1, 2, 3).as("f1"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporarySystemFunction("func", new AsyncFunc()); final List<Row> results = executeSql("select 'foo', func(f1) from t1"); final List<Row> expectedRows = Arrays.asList( Row.of("foo", "val 1"), Row.of("foo", "val 2"), Row.of("foo", "val 3")); assertThat(results).containsSequence(expectedRows); } @Test public void testFieldPlusTableSelect() { Table t1 = tEnv.fromValues(1, 2, 3).as("f1"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporarySystemFunction("func", new AsyncFunc()); final List<Row> results = executeSql("select f1, func(f1) from t1"); final List<Row> expectedRows = Arrays.asList(Row.of(1, "val 1"), Row.of(2, "val 2"), Row.of(3, "val 3")); assertThat(results).containsSequence(expectedRows); } @Test public void testTwoCalls() { Table t1 = tEnv.fromValues(1, 2, 3).as("f1"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporarySystemFunction("func", new AsyncFunc()); final List<Row> results = executeSql("select func(f1), func(f1) from t1"); final List<Row> expectedRows = Arrays.asList( Row.of("val 1", "val 1"), Row.of("val 2", "val 2"), Row.of("val 3", "val 3")); assertThat(results).containsSequence(expectedRows); } @Test public void testThreeNestedCalls() { Table t1 = tEnv.fromValues(1, 2, 3).as("f1"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporarySystemFunction("func", new AsyncFuncAdd10()); final List<Row> results = executeSql("select func(func(f1)), func(func(func(f1))), func(f1) from t1"); final List<Row> expectedRows = Arrays.asList(Row.of(21, 31, 11), Row.of(22, 32, 12), Row.of(23, 33, 13)); assertThat(results).containsSequence(expectedRows); } @Test public void testPassedToOtherUDF() { Table t1 = tEnv.fromValues(1, 2, 3).as("f1"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporarySystemFunction("func", new AsyncFunc()); final List<Row> results = executeSql("select Concat(func(f1), 'foo') from t1"); final List<Row> expectedRows = Arrays.asList(Row.of("val 1foo"), Row.of("val 2foo"), Row.of("val 3foo")); assertThat(results).containsSequence(expectedRows); } @Test public void testJustCall() { Table t1 = tEnv.fromValues(1, 2, 3).as("f1"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporarySystemFunction("func", new AsyncFunc()); final List<Row> results = executeSql("select func(1)"); final List<Row> expectedRows = Collections.singletonList(Row.of("val 1")); assertThat(results).containsSequence(expectedRows); } @Test public void testJoin() { Table t1 = tEnv.fromValues(1, 2, 3).as("f1"); Table t2 = tEnv.fromValues(1, 2, 3).as("f1"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporaryView("t2", t2); tEnv.createTemporarySystemFunction("func", new Sum()); final List<Row> results = executeSql( "select * from t1 right join t2 on t1.f1 = t2.f1 WHERE t1.f1 = t2.f1 AND " + "func(t1.f1, t2.f1) > 5"); final List<Row> expectedRows = Collections.singletonList(Row.of(3, 3)); assertThat(results).containsSequence(expectedRows); } @Test public void testWhereConditionAndProjection() { Table t1 = tEnv.fromValues(1, 2, 3).as("f1"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporarySystemFunction("func", new AsyncFunc()); final List<Row> results = executeSql("select func(f1) from t1 where REGEXP(func(f1), 'val (2|3)')"); final List<Row> expectedRows = Arrays.asList(Row.of("val 2"), Row.of("val 3")); assertThat(results).containsSequence(expectedRows); } @Test public void testFieldAccessAfter() { Table t1 = tEnv.fromValues(2).as("f1"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporarySystemFunction("func", new AsyncFuncRow()); final List<Row> results = executeSql("select func(f1).f0 from t1"); final List<Row> expectedRows = Collections.singletonList(Row.of(3)); assertThat(results).containsSequence(expectedRows); } @Test public void testFieldOperand() { Table t1 = tEnv.fromValues(2).as("f1"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporarySystemFunction("func", new AsyncFuncRow()); tEnv.createTemporarySystemFunction("func2", new AsyncFuncAdd10()); Table structs = tEnv.sqlQuery("select func(f1) from t1"); tEnv.createTemporaryView("t2", structs); final List<Row> results = executeSql("select func2(t2.f0) from t2"); final List<Row> expectedRows = Collections.singletonList(Row.of(13)); assertThat(results).containsSequence(expectedRows); } @Test public void testOverload() { Table t1 = tEnv.fromValues(1).as("f1"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporarySystemFunction("func", new AsyncFuncOverload()); final List<Row> results = executeSql("select func(f1), func(cast(f1 as String)) from t1"); final List<Row> expectedRows = Collections.singletonList(Row.of("int version 1", "string version 1")); assertThat(results).containsSequence(expectedRows); } @Test public void testMultiLayerGeneric() { Table t1 = tEnv.fromValues(1).as("f1"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporarySystemFunction("func", new LongAsyncFuncGeneric()); final List<Row> results = executeSql("select func(f1) from t1"); final List<Row> expectedRows = Collections.singletonList(Row.of((Object) new Long[] {11L})); assertThat(results).containsSequence(expectedRows); } @Test public void testMultiLayerMoreGeneric() { Table t1 = tEnv.fromValues(1).as("f1"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporarySystemFunction("func", new LongAsyncFuncMoreGeneric()); final List<Row> results = executeSql("select func(f1) from t1"); final List<Row> expectedRows = Collections.singletonList(Row.of((Object) new Long[] {11L})); assertThat(results).containsSequence(expectedRows); } @Test public void testFailures() { // If there is a failure after hitting the end of the input, then it doesn't retry. Having // the buffer = 1 triggers the end input only after completion. tEnv.getConfig() .set(ExecutionConfigOptions.TABLE_EXEC_ASYNC_SCALAR_MAX_CONCURRENT_OPERATIONS, 1); Table t1 = tEnv.fromValues(1).as("f1"); tEnv.createTemporaryView("t1", t1); AsyncFuncFail func = new AsyncFuncFail(2); tEnv.createTemporarySystemFunction("func", func); final List<Row> results = executeSql("select func(f1) from t1"); final List<Row> expectedRows = Collections.singletonList(Row.of(3)); assertThat(results).containsSequence(expectedRows); } @Test public void testTableFuncWithAsyncCalc() { Table t1 = tEnv.fromValues(1, 2).as("f1"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporarySystemFunction("func", new RandomTableFunction()); tEnv.createTemporarySystemFunction("addTen", new AsyncFuncAdd10()); final List<Row> results = executeSql("select * FROM t1, LATERAL TABLE(func(addTen(f1)))"); final List<Row> expectedRows = Arrays.asList( Row.of(1, "blah 11"), Row.of(1, "foo 11"), Row.of(2, "blah 12"), Row.of(2, "foo 12")); assertThat(results).containsSequence(expectedRows); } @Test public void testMultiArgumentAsyncWithAdditionalProjection() { // This was the cause of a bug previously where the reference to the sync projection was // getting garbled by janino. See issue https://issues.apache.org/jira/browse/FLINK-37721 Table t1 = tEnv.fromValues(row("a1", "b1", "c1"), row("a2", "b2", "c2")).as("f1", "f2", "f3"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporarySystemFunction("func", new AsyncFuncThreeParams()); final List<Row> results = executeSql("select f1, func(f1, f2, f3) FROM t1"); final List<Row> expectedRows = Arrays.asList(Row.of("a1", "val a1b1c1"), Row.of("a2", "val a2b2c2")); assertThat(results).containsSequence(expectedRows); } @Test public void testGroupBy() { Table t1 = tEnv.fromValues(row(1, 1), row(2, 2), row(1, 3)).as("f1", "f2"); tEnv.createTemporaryView("t1", t1); tEnv.createTemporarySystemFunction("func", new AsyncFuncAdd10()); final List<Row> results = executeSql("select f1, func(SUM(f2)) FROM t1 group by f1"); final List<Row> expectedRows = Arrays.asList( Row.of(1, 11), Row.of(2, 12), Row.ofKind(RowKind.UPDATE_BEFORE, 1, 11), Row.ofKind(RowKind.UPDATE_AFTER, 1, 14)); assertThat(results).containsSequence(expectedRows); } private List<Row> executeSql(String sql) { TableResult result = tEnv.executeSql(sql); final List<Row> rows = new ArrayList<>(); result.collect().forEachRemaining(rows::add); return rows; } /** Test function. */ public static
AsyncCalcITCase
java
elastic__elasticsearch
x-pack/plugin/identity-provider/qa/idp-rest-tests/src/javaRestTest/java/org/elasticsearch/xpack/idp/ManageServiceProviderRestIT.java
{ "start": 1109, "end": 4638 }
class ____ extends IdpRestTestCase { // From build.gradle private final String IDP_ENTITY_ID = "https://idp.test.es.elasticsearch.org/"; // From SAMLConstants private final String REDIRECT_BINDING = "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect"; @Before public void defineApplicationPrivileges() throws IOException { super.createApplicationPrivileges( "elastic-cloud", Map.ofEntries(Map.entry("deployment_admin", Set.of("sso:superuser")), Map.entry("deployment_viewer", Set.of("sso:viewer"))) ); } public void testCreateAndDeleteServiceProvider() throws Exception { final String entityId = "ec:" + randomAlphaOfLength(8) + ":" + randomAlphaOfLength(12); final Map<String, Object> request = Map.ofEntries( Map.entry("name", "Test SP"), Map.entry("acs", "https://sp1.test.es.elasticsearch.org/saml/acs"), Map.entry("privileges", Map.ofEntries(Map.entry("resource", entityId), Map.entry("roles", Set.of("role:(\\w+)")))), Map.entry( "attributes", Map.ofEntries( Map.entry("principal", "https://idp.test.es.elasticsearch.org/attribute/principal"), Map.entry("name", "https://idp.test.es.elasticsearch.org/attribute/name"), Map.entry("email", "https://idp.test.es.elasticsearch.org/attribute/email"), Map.entry("roles", "https://idp.test.es.elasticsearch.org/attribute/roles") ) ) ); final DocumentVersion docVersion = createServiceProvider(entityId, request); checkIndexDoc(docVersion); ensureGreen(SamlServiceProviderIndex.INDEX_NAME); getMetadata(entityId); deleteServiceProvider(entityId, docVersion); expectThrows(ResponseException.class, () -> getMetadata(entityId)); expectThrows(ResponseException.class, () -> deleteServiceProvider(entityId, docVersion)); } private void deleteServiceProvider(String entityId, DocumentVersion version) throws IOException { final Response response = client().performRequest( new Request("DELETE", "/_idp/saml/sp/" + encode(entityId) + "?refresh=" + RefreshPolicy.IMMEDIATE.getValue()) ); final Map<String, Object> map = entityAsMap(response); assertThat(ObjectPath.eval("document._id", map), equalTo(version.id)); Long seqNo = asLong(ObjectPath.eval("document._seq_no", map)); Long primaryTerm = asLong(ObjectPath.eval("document._primary_term", map)); if (primaryTerm == version.primaryTerm) { assertThat(seqNo, greaterThanOrEqualTo(version.seqNo)); } else { assertThat(primaryTerm, greaterThanOrEqualTo(version.primaryTerm)); } assertThat(ObjectPath.eval("service_provider.entity_id", map), equalTo(entityId)); } private void getMetadata(String entityId) throws IOException { final Map<String, Object> map = getAsMap("/_idp/saml/metadata/" + encode(entityId)); assertThat(map, notNullValue()); assertThat(map.keySet(), containsInAnyOrder("metadata")); final Object metadata = map.get("metadata"); assertThat(metadata, notNullValue()); assertThat(metadata, instanceOf(String.class)); assertThat((String) metadata, containsString(IDP_ENTITY_ID)); assertThat((String) metadata, containsString(REDIRECT_BINDING)); } }
ManageServiceProviderRestIT
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/UnnecessaryBoxedVariableTest.java
{ "start": 16264, "end": 16632 }
class ____ { public static final Integer FOO = 42; public int foo() { return FOO; } } """) .doTest(); } @Test public void privateConstant_usedBoxed_notRefactored() { compilationTestHelper .addSourceLines( "Test.java", """
Test
java
apache__camel
core/camel-core-model/src/main/java/org/apache/camel/model/loadbalancer/CustomLoadBalancerDefinition.java
{ "start": 1435, "end": 2809 }
class ____ extends LoadBalancerDefinition { @XmlTransient private LoadBalancer loadBalancer; @XmlAttribute(required = true) private String ref; public CustomLoadBalancerDefinition() { } protected CustomLoadBalancerDefinition(CustomLoadBalancerDefinition source) { super(source); this.loadBalancer = source.loadBalancer; this.ref = source.ref; } public CustomLoadBalancerDefinition(String ref) { this.ref = ref; } @Override public CustomLoadBalancerDefinition copyDefinition() { return new CustomLoadBalancerDefinition(this); } public String getRef() { return ref; } /** * Refers to the custom load balancer to lookup from the registry */ public void setRef(String ref) { this.ref = ref; } public LoadBalancer getCustomLoadBalancer() { return loadBalancer; } /** * The custom load balancer to use. */ public void setCustomLoadBalancer(LoadBalancer loadBalancer) { this.loadBalancer = loadBalancer; } @Override public String toString() { if (getCustomLoadBalancer() != null) { return "CustomLoadBalancer[" + getCustomLoadBalancer() + "]"; } else { return "CustomLoadBalancer[" + ref + "]"; } } }
CustomLoadBalancerDefinition
java
google__error-prone
check_api/src/main/java/com/google/errorprone/matchers/MethodHasParameters.java
{ "start": 926, "end": 1302 }
class ____ extends ChildMultiMatcher<MethodTree, VariableTree> { public MethodHasParameters(MatchType matchType, Matcher<VariableTree> nodeMatcher) { super(matchType, nodeMatcher); } @Override protected Iterable<? extends VariableTree> getChildNodes( MethodTree methodTree, VisitorState state) { return methodTree.getParameters(); } }
MethodHasParameters
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/annotations/onetomany/OrderItemID.java
{ "start": 256, "end": 866 }
class ____ implements Serializable { String schoolId; Integer academicYear; Integer dayNo; @Column( name = "Academic_Yr" ) public Integer getAcademicYear() { return this.academicYear; } public void setAcademicYear(Integer academicYear) { this.academicYear = academicYear; } @Column( name = "Day_No" ) public Integer getDayNo() { return this.dayNo; } public void setDayNo(Integer dayNo) { this.dayNo = dayNo; } @Column( name = "School_Id" ) public String getSchoolId() { return this.schoolId; } public void setSchoolId(String schoolId) { this.schoolId = schoolId; } }
OrderItemID
java
apache__flink
flink-runtime/src/main/java/org/apache/flink/runtime/state/StateSnapshotContext.java
{ "start": 1139, "end": 1489 }
interface ____ extends FunctionSnapshotContext { /** Returns an output stream for keyed state */ KeyedStateCheckpointOutputStream getRawKeyedOperatorStateOutput() throws Exception; /** Returns an output stream for operator state */ OperatorStateCheckpointOutputStream getRawOperatorStateOutput() throws Exception; }
StateSnapshotContext
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/CheckReturnValueTest.java
{ "start": 11579, "end": 12221 }
class ____ { void m() { // BUG: Diagnostic contains: CheckReturnValue lib.Lib.f(); } } """) .doTest(); } // Don't match void-returning methods in packages with @CRV @Test public void voidReturningMethodInAnnotatedPackage() { compilationHelper .addSourceLines( "package-info.java", """ @com.google.errorprone.annotations.CheckReturnValue package lib; """) .addSourceLines( "lib/Lib.java", """ package lib; public
Test
java
eclipse-vertx__vert.x
vertx-core/src/test/java/io/vertx/it/json/CustomJsonFactoryTest.java
{ "start": 697, "end": 1179 }
class ____ extends VertxTestBase { @Test public void testJsonObject() { assertSame(CustomJsonFactory.CODEC, Json.CODEC); JsonObject obj = new JsonObject(); obj.put("foo", "bar"); assertEquals("{\"foo\":\"bar\"}", obj.toString()); } @Test public void testJsonArray() { assertSame(CustomJsonFactory.CODEC, Json.CODEC); JsonArray array = new JsonArray(); array.add("foo"); assertEquals("[\"foo\"]", array.toString()); } }
CustomJsonFactoryTest
java
apache__camel
core/camel-core/src/test/java/org/apache/camel/component/bean/BeanInfoOverloadedTest.java
{ "start": 1192, "end": 1886 }
class ____ extends ContextTestSupport { @Test public void testBeanInfoOverloaded() { BeanInfo beanInfo = new BeanInfo(context, Bean.class); Message message = new DefaultMessage(context); message.setBody(new RequestB()); Exchange exchange = new DefaultExchange(context); exchange.setIn(message); MethodInvocation methodInvocation = beanInfo.createInvocation(new Bean(), exchange); Method method = methodInvocation.getMethod(); assertEquals("doSomething", method.getName()); assertEquals(RequestB.class, method.getGenericParameterTypes()[0]); } @SuppressWarnings("Unused") static
BeanInfoOverloadedTest
java
FasterXML__jackson-databind
src/test/java/tools/jackson/databind/jsontype/jdk/TypedContainerSerTest.java
{ "start": 1562, "end": 1734 }
class ____ { Animal animal; public Animal getAnimal() { return animal; } public void setAnimal(Animal animal) { this.animal = animal; } } static
Container1
java
apache__hadoop
hadoop-hdfs-project/hadoop-hdfs-nfs/src/test/java/org/apache/hadoop/hdfs/nfs/nfs3/TestClientAccessPrivilege.java
{ "start": 1977, "end": 4501 }
class ____ { static MiniDFSCluster cluster = null; static NfsConfiguration config = new NfsConfiguration(); static DistributedFileSystem hdfs; static NameNode nn; static String testdir = "/tmp"; static SecurityHandler securityHandler; @BeforeAll public static void setup() throws Exception { String currentUser = System.getProperty("user.name"); config.set(DefaultImpersonationProvider.getTestProvider() .getProxySuperuserGroupConfKey(currentUser), "*"); config.set(DefaultImpersonationProvider.getTestProvider() .getProxySuperuserIpConfKey(currentUser), "*"); ProxyUsers.refreshSuperUserGroupsConfiguration(config); cluster = new MiniDFSCluster.Builder(config).numDataNodes(1).build(); cluster.waitActive(); hdfs = cluster.getFileSystem(); nn = cluster.getNameNode(); // Use ephemeral port in case tests are running in parallel config.setInt("nfs3.mountd.port", 0); config.setInt("nfs3.server.port", 0); securityHandler = mock(SecurityHandler.class); when(securityHandler.getUser()).thenReturn( System.getProperty("user.name")); } @AfterAll public static void shutdown() throws Exception { if (cluster != null) { cluster.shutdown(); } } @BeforeEach public void createFiles() throws IllegalArgumentException, IOException { hdfs.delete(new Path(testdir), true); hdfs.mkdirs(new Path(testdir)); DFSTestUtil.createFile(hdfs, new Path(testdir + "/f1"), 0, (short) 1, 0); } @Test @Timeout(value = 60) public void testClientAccessPrivilegeForRemove() throws Exception { // Configure ro access for nfs1 service config.set("dfs.nfs.exports.allowed.hosts", "* ro"); // Start nfs Nfs3 nfs = new Nfs3(config); nfs.startServiceInternal(false); RpcProgramNfs3 nfsd = (RpcProgramNfs3) nfs.getRpcProgram(); // Create a remove request HdfsFileStatus status = nn.getRpcServer().getFileInfo(testdir); long dirId = status.getFileId(); int namenodeId = Nfs3Utils.getNamenodeId(config); XDR xdr_req = new XDR(); FileHandle handle = new FileHandle(dirId, namenodeId); handle.serialize(xdr_req); xdr_req.writeString("f1"); // Remove operation REMOVE3Response response = nfsd.remove(xdr_req.asReadOnlyWrap(), securityHandler, new InetSocketAddress("localhost", 1234)); // Assert on return code assertEquals(Nfs3Status.NFS3ERR_ACCES, response.getStatus(), "Incorrect return code"); } }
TestClientAccessPrivilege
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/injection/guice/internal/FailableCache.java
{ "start": 904, "end": 2139 }
class ____<K, V> { private final ConcurrentHashMap<K, Object> cache = new ConcurrentHashMap<>(); protected abstract V create(K key, Errors errors) throws ErrorsException; public V get(K key, Errors errors) throws ErrorsException { Object resultOrError = cache.get(key); if (resultOrError == null) { synchronized (this) { resultOrError = load(key); // we can't use cache.computeIfAbsent since this might be recursively call this API cache.putIfAbsent(key, resultOrError); } } if (resultOrError instanceof Errors) { errors.merge((Errors) resultOrError); throw errors.toException(); } else { @SuppressWarnings("unchecked") // create returned a non-error result, so this is safe V result = (V) resultOrError; return result; } } private Object load(K key) { Errors errors = new Errors(); V result = null; try { result = create(key, errors); } catch (ErrorsException e) { errors.merge(e.getErrors()); } return errors.hasErrors() ? errors : result; } }
FailableCache
java
elastic__elasticsearch
libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterTests.java
{ "start": 3552, "end": 4260 }
class ____ implements Testable { public TestClassToInstrument() {} public TestClassToInstrument(int arg) {} public void someMethod(int arg) {} public void someMethod(int arg, String anotherArg) {} public static void someStaticMethod(int arg) {} public static void someStaticMethod(int arg, String anotherArg) {} public static void anotherStaticMethod(int arg) {} } /** * Interface to test specific, "synthetic" cases (e.g. overloaded methods, overloaded constructors, etc.) that * may be not present/may be difficult to find or not clear in the production EntitlementChecker interface. * <p> * This
TestClassToInstrument
java
assertj__assertj-core
assertj-core/src/main/java/org/assertj/core/api/TemporalAssert.java
{ "start": 1149, "end": 1477 }
class ____ ZonedDateTimeAssert"); } @Override public TemporalAssert isCloseTo(String otherAsString, TemporalOffset<? super Temporal> offset) { throw new UnsupportedOperationException("This is not supported because there is no unique String representation of Temporal, this is available in concrete assertion temporal
like
java
apache__flink
flink-runtime/src/test/java/org/apache/flink/runtime/executiongraph/DefaultExecutionGraphDeploymentTest.java
{ "start": 4743, "end": 31397 }
class ____ { @RegisterExtension static final TestExecutorExtension<ScheduledExecutorService> EXECUTOR_EXTENSION = TestingUtils.defaultExecutorExtension(); /** BLOB server instance to use for the job graph. */ protected BlobWriter blobWriter = VoidBlobWriter.getInstance(); /** * Permanent BLOB cache instance to use for the actor gateway that handles the {@link * TaskDeploymentDescriptor} loading (may be <tt>null</tt>). */ protected PermanentBlobService blobCache = null; /** * Checks that the job information for the given ID has been offloaded successfully (if * offloading is used). * * @param eg the execution graph that was created */ protected void checkJobOffloaded(DefaultExecutionGraph eg) throws Exception { assertThat(eg.getTaskDeploymentDescriptorFactory().getSerializedJobInformation()) .isInstanceOf(TaskDeploymentDescriptor.NonOffloaded.class); } /** * Checks that the task information for the job vertex has been offloaded successfully (if * offloading is used). * * @param eg the execution graph that was created * @param jobVertexId job vertex ID */ protected void checkTaskOffloaded(ExecutionGraph eg, JobVertexID jobVertexId) throws Exception { assertThat(eg.getJobVertex(jobVertexId).getTaskInformationOrBlobKey().isLeft()).isTrue(); } @Test void testBuildDeploymentDescriptor() throws Exception { final JobVertexID jid1 = new JobVertexID(); final JobVertexID jid2 = new JobVertexID(); final JobVertexID jid3 = new JobVertexID(); final JobVertexID jid4 = new JobVertexID(); JobVertex v1 = new JobVertex("v1", jid1); JobVertex v2 = new JobVertex("v2", jid2); JobVertex v3 = new JobVertex("v3", jid3); JobVertex v4 = new JobVertex("v4", jid4); v1.setParallelism(10); v2.setParallelism(10); v3.setParallelism(10); v4.setParallelism(10); v1.setInvokableClass(BatchTask.class); v2.setInvokableClass(BatchTask.class); v3.setInvokableClass(BatchTask.class); v4.setInvokableClass(BatchTask.class); connectNewDataSetAsInput( v2, v1, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); connectNewDataSetAsInput( v3, v2, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); connectNewDataSetAsInput( v4, v2, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); final JobGraph jobGraph = JobGraphTestUtils.batchJobGraph(v1, v2, v3, v4); final JobID jobId = jobGraph.getJobID(); DirectScheduledExecutorService executor = new DirectScheduledExecutorService(); DefaultExecutionGraph eg = TestingDefaultExecutionGraphBuilder.newBuilder() .setJobGraph(jobGraph) .setBlobWriter(blobWriter) .build(executor); eg.start(ComponentMainThreadExecutorServiceAdapter.forMainThread()); checkJobOffloaded(eg); ExecutionJobVertex ejv = eg.getAllVertices().get(jid2); ExecutionVertex vertex = ejv.getTaskVertices()[3]; final SimpleAckingTaskManagerGateway taskManagerGateway = new SimpleAckingTaskManagerGateway(); final CompletableFuture<TaskDeploymentDescriptor> tdd = new CompletableFuture<>(); taskManagerGateway.setSubmitConsumer( FunctionUtils.uncheckedConsumer( taskDeploymentDescriptor -> { taskDeploymentDescriptor.loadBigData( blobCache, new NoOpGroupCache<>(), new NoOpGroupCache<>(), new NoOpGroupCache<>()); tdd.complete(taskDeploymentDescriptor); })); final LogicalSlot slot = new TestingLogicalSlotBuilder() .setTaskManagerGateway(taskManagerGateway) .createTestingLogicalSlot(); assertThat(vertex.getExecutionState()).isEqualTo(ExecutionState.CREATED); vertex.getCurrentExecutionAttempt().transitionState(ExecutionState.SCHEDULED); vertex.getCurrentExecutionAttempt() .registerProducedPartitions(slot.getTaskManagerLocation()) .get(); vertex.deployToSlot(slot); assertThat(vertex.getExecutionState()).isEqualTo(ExecutionState.DEPLOYING); checkTaskOffloaded(eg, vertex.getJobvertexId()); TaskDeploymentDescriptor descr = tdd.get(); assertThat(descr).isNotNull(); JobInformation jobInformation = descr.getJobInformation(); TaskInformation taskInformation = descr.getTaskInformation(); assertThat(descr.getJobId()).isEqualTo(jobId); assertThat(jobInformation.getJobId()).isEqualTo(jobId); assertThat(taskInformation.getJobVertexId()).isEqualTo(jid2); assertThat(descr.getSubtaskIndex()).isEqualTo(3); assertThat(taskInformation.getNumberOfSubtasks()).isEqualTo(10); assertThat(taskInformation.getInvokableClassName()).isEqualTo(BatchTask.class.getName()); assertThat(taskInformation.getTaskName()).isEqualTo("v2"); Collection<ResultPartitionDeploymentDescriptor> producedPartitions = descr.getProducedPartitions(); Collection<InputGateDeploymentDescriptor> consumedPartitions = descr.getInputGates(); assertThat(producedPartitions).hasSize((2)); assertThat(consumedPartitions).hasSize(1); Iterator<ResultPartitionDeploymentDescriptor> iteratorProducedPartitions = producedPartitions.iterator(); Iterator<InputGateDeploymentDescriptor> iteratorConsumedPartitions = consumedPartitions.iterator(); assertThat(iteratorProducedPartitions.next().getNumberOfSubpartitions()).isEqualTo(10); assertThat(iteratorProducedPartitions.next().getNumberOfSubpartitions()).isEqualTo(10); ShuffleDescriptor[] shuffleDescriptors = iteratorConsumedPartitions.next().getShuffleDescriptors(); assertThat(shuffleDescriptors.length).isEqualTo(10); Iterator<ConsumedPartitionGroup> iteratorConsumedPartitionGroup = vertex.getAllConsumedPartitionGroups().iterator(); int idx = 0; for (IntermediateResultPartitionID partitionId : iteratorConsumedPartitionGroup.next()) { assertThat(shuffleDescriptors[idx++].getResultPartitionID().getPartitionId()) .isEqualTo(partitionId); } } @Test void testRegistrationOfExecutionsFinishing() throws Exception { final JobVertexID jid1 = new JobVertexID(); final JobVertexID jid2 = new JobVertexID(); JobVertex v1 = new JobVertex("v1", jid1); JobVertex v2 = new JobVertex("v2", jid2); SchedulerBase scheduler = setupScheduler(v1, 7650, v2, 2350); Collection<Execution> executions = new ArrayList<>(scheduler.getExecutionGraph().getRegisteredExecutions().values()); for (Execution e : executions) { e.markFinished(); } assertThat(scheduler.getExecutionGraph().getRegisteredExecutions()).isEmpty(); } @Test void testRegistrationOfExecutionsFailing() throws Exception { final JobVertexID jid1 = new JobVertexID(); final JobVertexID jid2 = new JobVertexID(); JobVertex v1 = new JobVertex("v1", jid1); JobVertex v2 = new JobVertex("v2", jid2); SchedulerBase scheduler = setupScheduler(v1, 7, v2, 6); Collection<Execution> executions = new ArrayList<>(scheduler.getExecutionGraph().getRegisteredExecutions().values()); for (Execution e : executions) { e.markFailed(null); } assertThat(scheduler.getExecutionGraph().getRegisteredExecutions()).isEmpty(); } @Test void testRegistrationOfExecutionsFailedExternally() throws Exception { final JobVertexID jid1 = new JobVertexID(); final JobVertexID jid2 = new JobVertexID(); JobVertex v1 = new JobVertex("v1", jid1); JobVertex v2 = new JobVertex("v2", jid2); SchedulerBase scheduler = setupScheduler(v1, 7, v2, 6); Collection<Execution> executions = new ArrayList<>(scheduler.getExecutionGraph().getRegisteredExecutions().values()); for (Execution e : executions) { e.fail(null); } assertThat(scheduler.getExecutionGraph().getRegisteredExecutions()).isEmpty(); } /** * Verifies that {@link SchedulerNG#updateTaskExecutionState(TaskExecutionState)} updates the * accumulators and metrics for an execution that failed or was canceled. */ @Test void testAccumulatorsAndMetricsForwarding() throws Exception { final JobVertexID jid1 = new JobVertexID(); final JobVertexID jid2 = new JobVertexID(); JobVertex v1 = new JobVertex("v1", jid1); JobVertex v2 = new JobVertex("v2", jid2); SchedulerBase scheduler = setupScheduler(v1, 1, v2, 1); ExecutionGraph graph = scheduler.getExecutionGraph(); Map<ExecutionAttemptID, Execution> executions = graph.getRegisteredExecutions(); // verify behavior for canceled executions Execution execution1 = executions.values().iterator().next(); IOMetrics ioMetrics = new IOMetrics(0, 0, 0, 0, 0, 0, 0); Map<String, Accumulator<?, ?>> accumulators = new HashMap<>(); accumulators.put("acc", new IntCounter(4)); AccumulatorSnapshot accumulatorSnapshot = new AccumulatorSnapshot(graph.getJobID(), execution1.getAttemptId(), accumulators); TaskExecutionState state = new TaskExecutionState( execution1.getAttemptId(), ExecutionState.CANCELED, null, accumulatorSnapshot, ioMetrics); scheduler.updateTaskExecutionState(state); assertIOMetricsEqual(execution1.getIOMetrics(), ioMetrics); assertThat(execution1.getUserAccumulators()).isNotNull(); assertThat(execution1.getUserAccumulators().get("acc").getLocalValue()).isEqualTo(4); // verify behavior for failed executions Execution execution2 = executions.values().iterator().next(); IOMetrics ioMetrics2 = new IOMetrics(0, 0, 0, 0, 0, 0, 0); Map<String, Accumulator<?, ?>> accumulators2 = new HashMap<>(); accumulators2.put("acc", new IntCounter(8)); AccumulatorSnapshot accumulatorSnapshot2 = new AccumulatorSnapshot(graph.getJobID(), execution2.getAttemptId(), accumulators2); TaskExecutionState state2 = new TaskExecutionState( execution2.getAttemptId(), ExecutionState.FAILED, null, accumulatorSnapshot2, ioMetrics2); scheduler.updateTaskExecutionState(state2); assertIOMetricsEqual(execution2.getIOMetrics(), ioMetrics2); assertThat(execution2.getUserAccumulators()).isNotNull(); assertThat(execution2.getUserAccumulators().get("acc").getLocalValue()).isEqualTo(8); } /** * Verifies that {@link Execution#completeCancelling(Map, IOMetrics, boolean)} and {@link * Execution#markFailed(Throwable, boolean, Map, IOMetrics, boolean, boolean)} store the given * accumulators and metrics correctly. */ @Test void testAccumulatorsAndMetricsStorage() throws Exception { final JobVertexID jid1 = new JobVertexID(); final JobVertexID jid2 = new JobVertexID(); JobVertex v1 = new JobVertex("v1", jid1); JobVertex v2 = new JobVertex("v2", jid2); SchedulerBase scheduler = setupScheduler(v1, 1, v2, 1); Map<ExecutionAttemptID, Execution> executions = scheduler.getExecutionGraph().getRegisteredExecutions(); IOMetrics ioMetrics = new IOMetrics(0, 0, 0, 0, 0, 0, 0); Map<String, Accumulator<?, ?>> accumulators = Collections.emptyMap(); Execution execution1 = executions.values().iterator().next(); execution1.cancel(); execution1.completeCancelling(accumulators, ioMetrics, false); assertIOMetricsEqual(execution1.getIOMetrics(), ioMetrics); assertThat(execution1.getUserAccumulators()).isEqualTo(accumulators); Execution execution2 = executions.values().iterator().next(); execution2.markFailed(new Throwable(), false, accumulators, ioMetrics, false, true); assertIOMetricsEqual(execution2.getIOMetrics(), ioMetrics); assertThat(execution2.getUserAccumulators()).isEqualTo(accumulators); } @Test void testRegistrationOfExecutionsCanceled() throws Exception { final JobVertexID jid1 = new JobVertexID(); final JobVertexID jid2 = new JobVertexID(); JobVertex v1 = new JobVertex("v1", jid1); JobVertex v2 = new JobVertex("v2", jid2); SchedulerBase scheduler = setupScheduler(v1, 19, v2, 37); Collection<Execution> executions = new ArrayList<>(scheduler.getExecutionGraph().getRegisteredExecutions().values()); for (Execution e : executions) { e.cancel(); e.completeCancelling(); } assertThat(scheduler.getExecutionGraph().getRegisteredExecutions()).isEmpty(); } /** * Tests that a blocking batch job fails if there are not enough resources left to schedule the * succeeding tasks. This test case is related to [FLINK-4296] where finished producing tasks * swallow the fail exception when scheduling a consumer task. */ @Test void testNoResourceAvailableFailure() throws Exception { JobVertex v1 = new JobVertex("source"); JobVertex v2 = new JobVertex("sink"); int dop1 = 2; int dop2 = 2; v1.setParallelism(dop1); v2.setParallelism(dop2); v1.setInvokableClass(BatchTask.class); v2.setInvokableClass(BatchTask.class); connectNewDataSetAsInput( v2, v1, DistributionPattern.POINTWISE, ResultPartitionType.BLOCKING); final JobGraph graph = JobGraphTestUtils.batchJobGraph(v1, v2); DirectScheduledExecutorService directExecutor = new DirectScheduledExecutorService(); // execution graph that executes actions synchronously final SchedulerBase scheduler = new DefaultSchedulerBuilder( graph, ComponentMainThreadExecutorServiceAdapter.forMainThread(), EXECUTOR_EXTENSION.getExecutor()) .setExecutionSlotAllocatorFactory( SchedulerTestingUtils.newSlotSharingExecutionSlotAllocatorFactory( TestingPhysicalSlotProvider .createWithLimitedAmountOfPhysicalSlots(1))) .setFutureExecutor(directExecutor) .setBlobWriter(blobWriter) .build(); final ExecutionGraph eg = scheduler.getExecutionGraph(); checkJobOffloaded((DefaultExecutionGraph) eg); // schedule, this triggers mock deployment scheduler.startScheduling(); ExecutionAttemptID attemptID = eg.getJobVertex(v1.getID()) .getTaskVertices()[0] .getCurrentExecutionAttempt() .getAttemptId(); scheduler.updateTaskExecutionState( new TaskExecutionState(attemptID, ExecutionState.RUNNING)); scheduler.updateTaskExecutionState( new TaskExecutionState(attemptID, ExecutionState.FINISHED, null)); assertThat(eg.getState()).isEqualTo(JobStatus.FAILED); } // ------------------------------------------------------------------------ // retained checkpoints config test // ------------------------------------------------------------------------ @Test void testSettingDefaultMaxNumberOfCheckpointsToRetain() throws Exception { final Configuration jobManagerConfig = new Configuration(); final ExecutionGraph eg = createExecutionGraph(jobManagerConfig); assertThat( eg.getCheckpointCoordinator() .getCheckpointStore() .getMaxNumberOfRetainedCheckpoints()) .isEqualTo(CheckpointingOptions.MAX_RETAINED_CHECKPOINTS.defaultValue().intValue()); } private SchedulerBase setupScheduler(JobVertex v1, int dop1, JobVertex v2, int dop2) throws Exception { v1.setParallelism(dop1); v2.setParallelism(dop2); v1.setInvokableClass(BatchTask.class); v2.setInvokableClass(BatchTask.class); DirectScheduledExecutorService executorService = new DirectScheduledExecutorService(); // execution graph that executes actions synchronously final SchedulerBase scheduler = new DefaultSchedulerBuilder( JobGraphTestUtils.streamingJobGraph(v1, v2), ComponentMainThreadExecutorServiceAdapter.forMainThread(), EXECUTOR_EXTENSION.getExecutor()) .setExecutionSlotAllocatorFactory( SchedulerTestingUtils.newSlotSharingExecutionSlotAllocatorFactory()) .setFutureExecutor(executorService) .setBlobWriter(blobWriter) .build(); final ExecutionGraph eg = scheduler.getExecutionGraph(); checkJobOffloaded((DefaultExecutionGraph) eg); // schedule, this triggers mock deployment scheduler.startScheduling(); Map<ExecutionAttemptID, Execution> executions = eg.getRegisteredExecutions(); assertThat(executions).hasSize(dop1 + dop2); return scheduler; } @Test void testSettingIllegalMaxNumberOfCheckpointsToRetain() throws Exception { final int negativeMaxNumberOfCheckpointsToRetain = -10; final Configuration jobManagerConfig = new Configuration(); jobManagerConfig.set( CheckpointingOptions.MAX_RETAINED_CHECKPOINTS, negativeMaxNumberOfCheckpointsToRetain); final ExecutionGraph eg = createExecutionGraph(jobManagerConfig); assertThat( eg.getCheckpointCoordinator() .getCheckpointStore() .getMaxNumberOfRetainedCheckpoints()) .isNotEqualTo(negativeMaxNumberOfCheckpointsToRetain); assertThat( eg.getCheckpointCoordinator() .getCheckpointStore() .getMaxNumberOfRetainedCheckpoints()) .isEqualTo(CheckpointingOptions.MAX_RETAINED_CHECKPOINTS.defaultValue().intValue()); } /** Tests that the {@link ExecutionGraph} is deployed in topological order. */ @Test void testExecutionGraphIsDeployedInTopologicalOrder() throws Exception { final int sourceParallelism = 2; final int sinkParallelism = 1; final JobVertex sourceVertex = new JobVertex("source"); sourceVertex.setInvokableClass(NoOpInvokable.class); sourceVertex.setParallelism(sourceParallelism); final JobVertex sinkVertex = new JobVertex("sink"); sinkVertex.setInvokableClass(NoOpInvokable.class); sinkVertex.setParallelism(sinkParallelism); connectNewDataSetAsInput( sinkVertex, sourceVertex, DistributionPattern.POINTWISE, ResultPartitionType.PIPELINED); final int numberTasks = sourceParallelism + sinkParallelism; final ArrayBlockingQueue<ExecutionAttemptID> submittedTasksQueue = new ArrayBlockingQueue<>(numberTasks); TestingTaskExecutorGatewayBuilder testingTaskExecutorGatewayBuilder = new TestingTaskExecutorGatewayBuilder(); testingTaskExecutorGatewayBuilder.setSubmitTaskConsumer( (taskDeploymentDescriptor, jobMasterId) -> { submittedTasksQueue.offer(taskDeploymentDescriptor.getExecutionAttemptId()); return CompletableFuture.completedFuture(Acknowledge.get()); }); final TaskManagerLocation taskManagerLocation = new LocalTaskManagerLocation(); final TestingTaskExecutorGateway taskExecutorGateway = testingTaskExecutorGatewayBuilder.createTestingTaskExecutorGateway(); final RpcTaskManagerGateway taskManagerGateway = new RpcTaskManagerGateway(taskExecutorGateway, JobMasterId.generate()); final JobGraph jobGraph = JobGraphTestUtils.streamingJobGraph(sourceVertex, sinkVertex); final TestingPhysicalSlotProvider physicalSlotProvider = TestingPhysicalSlotProvider.createWithoutImmediatePhysicalSlotCreation(); final SchedulerBase scheduler = new DefaultSchedulerBuilder( jobGraph, ComponentMainThreadExecutorServiceAdapter.forMainThread(), EXECUTOR_EXTENSION.getExecutor()) .setExecutionSlotAllocatorFactory( SchedulerTestingUtils.newSlotSharingExecutionSlotAllocatorFactory( physicalSlotProvider)) .setFutureExecutor(new DirectScheduledExecutorService()) .build(); final ExecutionGraph executionGraph = scheduler.getExecutionGraph(); scheduler.startScheduling(); // change the order in which the futures are completed final List<CompletableFuture<TestingPhysicalSlot>> shuffledFutures = new ArrayList<>(physicalSlotProvider.getResponses().values()); Collections.shuffle(shuffledFutures); for (CompletableFuture<TestingPhysicalSlot> slotFuture : shuffledFutures) { slotFuture.complete( TestingPhysicalSlot.builder() .withTaskManagerLocation(taskManagerLocation) .withTaskManagerGateway(taskManagerGateway) .build()); } final List<ExecutionAttemptID> submittedTasks = new ArrayList<>(numberTasks); for (int i = 0; i < numberTasks; i++) { submittedTasks.add(submittedTasksQueue.take()); } final Collection<ExecutionAttemptID> firstStage = new ArrayList<>(sourceParallelism); for (ExecutionVertex taskVertex : executionGraph.getJobVertex(sourceVertex.getID()).getTaskVertices()) { firstStage.add(taskVertex.getCurrentExecutionAttempt().getAttemptId()); } final Collection<ExecutionAttemptID> secondStage = new ArrayList<>(sinkParallelism); for (ExecutionVertex taskVertex : executionGraph.getJobVertex(sinkVertex.getID()).getTaskVertices()) { secondStage.add(taskVertex.getCurrentExecutionAttempt().getAttemptId()); } assertThat( isDeployedInTopologicalOrder( submittedTasks, Arrays.asList(firstStage, secondStage))) .isTrue(); } private ExecutionGraph createExecutionGraph(Configuration configuration) throws Exception { final JobGraph jobGraph = JobGraphTestUtils.emptyJobGraph(); jobGraph.setSnapshotSettings( new JobCheckpointingSettings( new CheckpointCoordinatorConfiguration( 100, 10 * 60 * 1000, 0, 1, CheckpointRetentionPolicy.NEVER_RETAIN_AFTER_TERMINATION, false, false, 0, 0), null)); return TestingDefaultExecutionGraphBuilder.newBuilder() .setJobGraph(jobGraph) .setJobMasterConfig(configuration) .setBlobWriter(blobWriter) .build(EXECUTOR_EXTENSION.getExecutor()); } private static boolean isDeployedInTopologicalOrder( List<ExecutionAttemptID> submissionOrder, List<Collection<ExecutionAttemptID>> executionStages) { final Iterator<ExecutionAttemptID> submissionIterator = submissionOrder.iterator(); for (Collection<ExecutionAttemptID> stage : executionStages) { final Collection<ExecutionAttemptID> currentStage = new ArrayList<>(stage); while (!currentStage.isEmpty() && submissionIterator.hasNext()) { if (!currentStage.remove(submissionIterator.next())) { return false; } } if (!currentStage.isEmpty()) { return false; } } return !submissionIterator.hasNext(); } private void assertIOMetricsEqual(IOMetrics ioMetrics1, IOMetrics ioMetrics2) { assertThat(ioMetrics1.numBytesIn).isEqualTo(ioMetrics2.numBytesIn); assertThat(ioMetrics1.numBytesOut).isEqualTo(ioMetrics2.numBytesOut); assertThat(ioMetrics1.numRecordsIn).isEqualTo(ioMetrics2.numRecordsIn); assertThat(ioMetrics1.numRecordsOut).isEqualTo(ioMetrics2.numRecordsOut); assertThat(ioMetrics1.accumulateIdleTime).isEqualTo(ioMetrics2.accumulateIdleTime); assertThat(ioMetrics1.accumulateBusyTime).isEqualTo(ioMetrics2.accumulateBusyTime); assertThat(ioMetrics1.accumulateBackPressuredTime) .isEqualTo(ioMetrics2.accumulateBackPressuredTime); assertThat(ioMetrics1.resultPartitionBytes).isEqualTo(ioMetrics2.resultPartitionBytes); } }
DefaultExecutionGraphDeploymentTest
java
quarkusio__quarkus
extensions/quartz/deployment/src/test/java/io/quarkus/quartz/test/InvalidCronExpressionTest.java
{ "start": 635, "end": 739 }
class ____ { @Scheduled(cron = "0 0 0 ????") void wrong() { } } }
InvalidBean
java
google__error-prone
check_api/src/main/java/com/google/errorprone/bugpatterns/BugChecker.java
{ "start": 16897, "end": 17016 }
interface ____ extends Suppressible { Description matchIf(IfTree tree, VisitorState state); } public
IfTreeMatcher
java
apache__logging-log4j2
log4j-1.2-api/src/main/java/org/apache/log4j/xml/XmlConfiguration.java
{ "start": 34171, "end": 35031 }
class ____ implements org.xml.sax.ErrorHandler { private static final org.apache.logging.log4j.Logger LOGGER = StatusLogger.getLogger(); @Override public void error(final SAXParseException ex) { emitMessage("Continuable parsing error ", ex); } @Override public void fatalError(final SAXParseException ex) { emitMessage("Fatal parsing error ", ex); } @Override public void warning(final SAXParseException ex) { emitMessage("Parsing warning ", ex); } private static void emitMessage(final String msg, final SAXParseException ex) { LOGGER.warn("{} {} and column {}", msg, ex.getLineNumber(), ex.getColumnNumber()); LOGGER.warn(ex.getMessage(), ex.getException()); } } private static
SAXErrorHandler
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/notfound/CompositeForeignKeyNotFoundTest.java
{ "start": 2996, "end": 3415 }
class ____ { @Id @GeneratedValue Long id; @ManyToOne @NotFound(action = NotFoundAction.IGNORE) @JoinColumnOrFormula(column = @JoinColumn(name = "owner", referencedColumnName = "id", insertable = false, updatable = false)) @JoinColumnOrFormula(formula = @JoinFormula(value = "'fubar'", referencedColumnName = "name")) Person owner; } @Entity(name = "DocumentException") public static
DocumentIgnore
java
quarkusio__quarkus
core/deployment/src/main/java/io/quarkus/deployment/builditem/nativeimage/NativeImageAgentConfigDirectoryBuildItem.java
{ "start": 610, "end": 904 }
class ____ extends SimpleBuildItem { private final String directory; public NativeImageAgentConfigDirectoryBuildItem(String directory) { this.directory = directory; } public String getDirectory() { return directory; } }
NativeImageAgentConfigDirectoryBuildItem
java
FasterXML__jackson-databind
src/test/java/tools/jackson/databind/ser/RequireSetterForGetterSerTest.java
{ "start": 1324, "end": 1474 }
class ____ { @JsonProperty public int getA() { return 123; } } // for [databind#736] public static
GettersWithoutSetters2
java
apache__flink
flink-core/src/main/java/org/apache/flink/api/common/operators/util/UserCodeWrapper.java
{ "start": 2417, "end": 2585 }
class ____ just * returned. If the user code is provided as an object, {@link Object#getClass()} is called on * the user code object. * * @return The
is
java
quarkusio__quarkus
extensions/oidc/runtime/src/main/java/io/quarkus/oidc/AuthenticationContext.java
{ "start": 708, "end": 1068 }
interface ____ { /** * Required ACR ('acr') claim values. */ String[] value(); /** * Token age relative to the value of the 'auth_time' claim value. * * @see io.quarkus.runtime.configuration.DurationConverter#parseDuration(String) for supported duration values */ String maxAge() default ""; }
AuthenticationContext
java
apache__maven
impl/maven-impl/src/main/java/org/apache/maven/impl/MappedCollection.java
{ "start": 977, "end": 1700 }
class ____<U, V> extends AbstractCollection<U> { private final Collection<V> list; private final Function<V, U> mapper; public MappedCollection(Collection<V> list, Function<V, U> mapper) { this.list = list; this.mapper = mapper; } @Override public Iterator<U> iterator() { Iterator<V> it = list.iterator(); return new Iterator<U>() { @Override public boolean hasNext() { return it.hasNext(); } @Override public U next() { return mapper.apply(it.next()); } }; } @Override public int size() { return list.size(); } }
MappedCollection
java
google__auto
value/src/it/functional/src/test/java/com/google/auto/value/AutoValueTest.java
{ "start": 15661, "end": 15919 }
class ____ extends Super { public abstract int subInt(); public static Sub create(Object superObject, boolean superBoolean, int subInt) { return new AutoValue_AutoValueTest_Sub(superObject, superBoolean, subInt); } } // The @AutoValue
Sub
java
apache__maven
impl/maven-xml/src/main/java/org/apache/maven/internal/xml/DefaultXmlService.java
{ "start": 19507, "end": 22563 }
class ____ extends StreamWriterDelegate { int depth = 0; boolean hasChildren = false; boolean anew = true; IndentingXMLStreamWriter(XMLStreamWriter parent) { super(parent); } @Override public void writeStartDocument() throws XMLStreamException { super.writeStartDocument(); anew = false; } @Override public void writeStartDocument(String version) throws XMLStreamException { super.writeStartDocument(version); anew = false; } @Override public void writeStartDocument(String encoding, String version) throws XMLStreamException { super.writeStartDocument(encoding, version); anew = false; } @Override public void writeEmptyElement(String localName) throws XMLStreamException { indent(); super.writeEmptyElement(localName); hasChildren = true; anew = false; } @Override public void writeEmptyElement(String namespaceURI, String localName) throws XMLStreamException { indent(); super.writeEmptyElement(namespaceURI, localName); hasChildren = true; anew = false; } @Override public void writeEmptyElement(String prefix, String localName, String namespaceURI) throws XMLStreamException { indent(); super.writeEmptyElement(prefix, localName, namespaceURI); hasChildren = true; anew = false; } @Override public void writeStartElement(String localName) throws XMLStreamException { indent(); super.writeStartElement(localName); depth++; hasChildren = false; anew = false; } @Override public void writeStartElement(String namespaceURI, String localName) throws XMLStreamException { indent(); super.writeStartElement(namespaceURI, localName); depth++; hasChildren = false; anew = false; } @Override public void writeStartElement(String prefix, String localName, String namespaceURI) throws XMLStreamException { indent(); super.writeStartElement(prefix, localName, namespaceURI); depth++; hasChildren = false; anew = false; } @Override public void writeEndElement() throws XMLStreamException { depth--; if (hasChildren) { indent(); } super.writeEndElement(); hasChildren = true; anew = false; } private void indent() throws XMLStreamException { if (!anew) { super.writeCharacters("\n"); } for (int i = 0; i < depth; i++) { super.writeCharacters(" "); } } } }
IndentingXMLStreamWriter
java
apache__flink
flink-runtime/src/test/java/org/apache/flink/streaming/graph/StreamConfigTest.java
{ "start": 1315, "end": 2330 }
class ____ { @Test void testClearInitialConfigs() { int chainedTaskId = 3456; MockStreamConfig streamConfig = new MockStreamConfig( new Configuration(), 1, Collections.singletonMap( chainedTaskId, new MockStreamConfig(new Configuration(), 1))); ClassLoader classLoader = getClass().getClassLoader(); StreamOperatorFactory<?> streamOperatorFactory = streamConfig.getStreamOperatorFactory(classLoader); assertThat(streamOperatorFactory).isNotNull(); assertThat(streamConfig.getStreamOperatorFactoryClass(classLoader)).isNotNull(); assertThat(streamConfig.getTransitiveChainedTaskConfigs(classLoader)) .hasSize(1) .containsKey(chainedTaskId); // StreamOperatorFactory and ChainedTaskConfigs should be cleared after clearInitialConfigs, // but the factory
StreamConfigTest
java
spring-projects__spring-framework
spring-context/src/test/java/org/springframework/validation/beanvalidation/SpringValidatorAdapterTests.java
{ "start": 16604, "end": 16875 }
class ____ { @Valid private Map<@NotNull String, @NotNull String> property; public Map<String, String> getProperty() { return property; } public void setProperty(Map<String, String> property) { this.property = property; } } }
BeanWithMapEntryConstraint
java
FasterXML__jackson-databind
src/test/java/tools/jackson/databind/ext/javatime/misc/DeductionTypeSerialization296Test.java
{ "start": 525, "end": 2638 }
class ____ { @JsonTypeInfo(use = JsonTypeInfo.Id.DEDUCTION) public Object value; public Wrapper(Object value) { this.value = value; } } private final ObjectMapper MAPPER = mapperBuilder() .disable(DateTimeFeature.WRITE_DATES_AS_TIMESTAMPS) .build(); @Test public void testLocalDate() throws Exception { LocalDate date = LocalDate.of(1986, Month.JANUARY, 17); assertEquals(a2q("{'value':'1986-01-17'}"), MAPPER.writeValueAsString(new Wrapper(date))); } @Test public void testLocalDateTime() throws Exception { LocalDateTime datetime = LocalDateTime.of(2013, Month.AUGUST, 21, 9, 22, 0, 57); assertEquals(a2q("{'value':'2013-08-21T09:22:00.000000057'}"), MAPPER.writeValueAsString(new Wrapper(datetime))); } @Test public void testLocalTime() throws Exception { LocalTime time = LocalTime.of(9, 22, 57); assertEquals(a2q("{'value':'09:22:57'}"), MAPPER.writeValueAsString(new Wrapper(time))); } @Test public void testMonthDate() throws Exception { MonthDay date = MonthDay.of(Month.JANUARY, 17); assertEquals(a2q("{'value':'--01-17'}"), MAPPER.writeValueAsString(new Wrapper(date))); } @Test public void testOffsetTime() throws Exception { OffsetTime time = OffsetTime.of(15, 43, 0, 0, ZoneOffset.of("+0300")); assertEquals(a2q("{'value':'15:43+03:00'}"), MAPPER.writeValueAsString(new Wrapper(time))); } @Test public void testYearMonth() throws Exception { YearMonth date = YearMonth.of(1986, Month.JANUARY); assertEquals(a2q("{'value':'1986-01'}"), MAPPER.writeValueAsString(new Wrapper(date))); } @Test public void testZoneId() throws Exception { ZoneId zone = ZoneId.of("America/Denver"); assertEquals(a2q("{'value':'America/Denver'}"), MAPPER.writeValueAsString(new Wrapper(zone))); } }
Wrapper
java
quarkusio__quarkus
integration-tests/main/src/test/java/io/quarkus/it/main/TestMockTestCase.java
{ "start": 197, "end": 351 }
class ____ { @Test public void testMockService() { RestAssured.when().get("/test/service").then().body(is("mock")); } }
TestMockTestCase
java
elastic__elasticsearch
server/src/test/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinalsTests.java
{ "start": 4509, "end": 8963 }
class ____ { long ord; final int id; public OrdAndId(long ord, int id) { this.ord = ord; this.id = id; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + id; result = prime * result + (int) ord; return result; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } OrdAndId other = (OrdAndId) obj; if (id != other.id) { return false; } if (ord != other.ord) { return false; } return true; } } public void testOrdinals() throws Exception { int maxDoc = 7; long maxOrds = 32; OrdinalsBuilder builder = new OrdinalsBuilder(maxDoc); builder.nextOrdinal(); // 0 builder.addDoc(1).addDoc(4).addDoc(5).addDoc(6); builder.nextOrdinal(); // 1 builder.addDoc(0).addDoc(5).addDoc(6); builder.nextOrdinal(); // 3 builder.addDoc(2).addDoc(4).addDoc(5).addDoc(6); builder.nextOrdinal(); // 3 builder.addDoc(0).addDoc(4).addDoc(5).addDoc(6); builder.nextOrdinal(); // 4 builder.addDoc(4).addDoc(5).addDoc(6); builder.nextOrdinal(); // 5 builder.addDoc(4).addDoc(5).addDoc(6); while (builder.getValueCount() < maxOrds) { builder.nextOrdinal(); builder.addDoc(5).addDoc(6); } long[][] ordinalPlan = new long[][] { { 1, 3 }, { 0 }, { 2 }, {}, { 0, 2, 3, 4, 5 }, { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31 }, { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31 } }; Ordinals ordinals = creationMultiOrdinals(builder); SortedSetDocValues docs = ordinals.ordinals(); assertEquals(docs, ordinalPlan); } public void testMultiValuesDocsWithOverlappingStorageArrays() throws Exception { int maxDoc = 7; long maxOrds = 15; OrdinalsBuilder builder = new OrdinalsBuilder(maxDoc); for (int i = 0; i < maxOrds; i++) { builder.nextOrdinal(); if (i < 10) { builder.addDoc(0); } builder.addDoc(1); if (i == 0) { builder.addDoc(2); } if (i < 5) { builder.addDoc(3); } if (i < 6) { builder.addDoc(4); } if (i == 1) { builder.addDoc(5); } if (i < 10) { builder.addDoc(6); } } long[][] ordinalPlan = new long[][] { { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 }, { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14 }, { 0 }, { 0, 1, 2, 3, 4 }, { 0, 1, 2, 3, 4, 5 }, { 1 }, { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 } }; Ordinals ordinals = new MultiOrdinals(builder, PackedInts.FASTEST); SortedSetDocValues docs = ordinals.ordinals(); assertEquals(docs, ordinalPlan); } private void assertEquals(SortedSetDocValues docs, long[][] ordinalPlan) throws IOException { long maxOrd = 0; for (int doc = 0; doc < ordinalPlan.length; ++doc) { if (ordinalPlan[doc].length > 0) { maxOrd = Math.max(maxOrd, 1 + ordinalPlan[doc][ordinalPlan[doc].length - 1]); } } assertThat(docs.getValueCount(), equalTo(maxOrd)); assertNull(DocValues.unwrapSingleton(docs)); for (int doc = 0; doc < ordinalPlan.length; ++doc) { long[] ords = ordinalPlan[doc]; assertEquals(ords.length > 0, docs.advanceExact(doc)); if (ords.length > 0) { for (long ord : ords) { assertThat(docs.nextOrd(), equalTo(ord)); } } } } }
OrdAndId
java
apache__kafka
streams/src/test/java/org/apache/kafka/streams/tests/RelationalSmokeTest.java
{ "start": 6707, "end": 7072 }
class ____ implements Serde<Article> { @Override public Serializer<Article> serializer() { return new ArticleSerializer(); } @Override public Deserializer<Article> deserializer() { return new ArticleDeserializer(); } } } public static
ArticleSerde
java
apache__flink
flink-end-to-end-tests/flink-end-to-end-tests-sql/src/test/java/org/apache/flink/table/sql/PlannerScalaFreeITCase.java
{ "start": 1687, "end": 1850 }
class ____ execution time, ClassNotFound exception will be thrown. ITCase in table planner can not * cover it, so we should add E2E test for these case. */ public
in
java
google__dagger
java/dagger/example/atm/UserCommandsRouter.java
{ "start": 819, "end": 903 }
interface ____ { CommandRouter router(); @Subcomponent.Factory
UserCommandsRouter
java
quarkusio__quarkus
independent-projects/tools/devtools-common/src/main/java/io/quarkus/devtools/project/QuarkusProjectHelper.java
{ "start": 816, "end": 10361 }
class ____ { private static QuarkusProject cachedProject; private static RegistriesConfig toolsConfig; private static MessageWriter log; private static MavenArtifactResolver artifactResolver; private static ExtensionCatalogResolver catalogResolver; private static boolean registryClientEnabled; static { initRegistryClientEnabled(); } private static void initRegistryClientEnabled() { String value = System.getProperty("quarkusRegistryClient"); if (value == null) { value = System.getenv("QUARKUS_REGISTRY_CLIENT"); } registryClientEnabled = value == null || value.isBlank() || Boolean.parseBoolean(value); } public static boolean isRegistryClientEnabled() { return registryClientEnabled; } public static BuildTool detectExistingBuildTool(Path projectDirPath) { return BuildTool.fromProject(projectDirPath); } public static QuarkusProject getCachedProject(Path projectDir) { if (cachedProject == null) { PrintStream nullPrintStream = new PrintStream(OutputStream.nullOutputStream()); log = MessageWriter.info(nullPrintStream); BuildTool buildTool = detectExistingBuildTool(projectDir); if (buildTool == null) { buildTool = BuildTool.MAVEN; } if (BuildTool.MAVEN.equals(buildTool)) { try { return MavenProjectBuildFile.getProject(projectDir, log, null); } catch (RegistryResolutionException e) { throw new RuntimeException("Failed to initialize the Quarkus Maven extension manager", e); } } final ExtensionCatalog catalog; try { catalog = resolveExtensionCatalog(); } catch (Exception e) { throw new RuntimeException("Failed to resolve the Quarkus extension catalog", e); } cachedProject = getProject(projectDir, catalog, buildTool, JavaVersion.NA, log); } return cachedProject; } public static QuarkusProject getProject(Path projectDir) { BuildTool buildTool = detectExistingBuildTool(projectDir); if (buildTool == null) { buildTool = BuildTool.MAVEN; } return getProject(projectDir, buildTool); } @Deprecated public static QuarkusProject getProject(Path projectDir, String quarkusVersion) { // TODO remove this method once the default registry becomes available BuildTool buildTool = detectExistingBuildTool(projectDir); if (buildTool == null) { buildTool = BuildTool.MAVEN; } return getProject(projectDir, buildTool, quarkusVersion); } @Deprecated public static QuarkusProject getProject(Path projectDir, BuildTool buildTool, String quarkusVersion) { // TODO remove this method once the default registry becomes available return QuarkusProjectHelper.getProject(projectDir, getExtensionCatalog(quarkusVersion), buildTool, JavaVersion.NA); } @Deprecated public static ExtensionCatalog getExtensionCatalog(String quarkusVersion) { // TODO remove this method once the default registry becomes available try { if (registryClientEnabled && getCatalogResolver().hasRegistries()) { return quarkusVersion == null ? catalogResolver.resolveExtensionCatalog() : catalogResolver.resolveExtensionCatalog(quarkusVersion); } else { return ToolsUtils.resolvePlatformDescriptorDirectly(null, null, quarkusVersion, artifactResolver(), messageWriter()); } } catch (Exception e) { throw new RuntimeException("Failed to resolve the Quarkus extension catalog", e); } } public static QuarkusProject getProject(Path projectDir, BuildTool buildTool) { if (BuildTool.MAVEN.equals(buildTool)) { try { return MavenProjectBuildFile.getProject(projectDir, messageWriter(), null); } catch (RegistryResolutionException e) { throw new RuntimeException("Failed to initialize the Quarkus Maven extension manager", e); } } final ExtensionCatalog catalog; try { catalog = resolveExtensionCatalog(); } catch (Exception e) { throw new RuntimeException("Failed to resolve the Quarkus extension catalog", e); } return getProject(projectDir, catalog, buildTool, JavaVersion.NA, messageWriter()); } public static QuarkusProject getProject(Path projectDir, ExtensionCatalog catalog, BuildTool buildTool, JavaVersion javaVersion) { return getProject(projectDir, catalog, buildTool, javaVersion, messageWriter()); } public static QuarkusProject getProject(Path projectDir, ExtensionCatalog catalog, BuildTool buildTool) { return getProject(projectDir, catalog, buildTool, JavaVersion.NA, messageWriter()); } public static QuarkusProject getProject(Path projectDir, ExtensionCatalog catalog, BuildTool buildTool, JavaVersion javaVersion, MessageWriter log) { return QuarkusProject.of(projectDir, catalog, getCodestartResourceLoaders(log, catalog), log, buildTool, javaVersion); } public static QuarkusProject getProject(Path projectDir, ExtensionManager extManager) throws RegistryResolutionException { return getProject(projectDir, resolveExtensionCatalog(), extManager, JavaVersion.NA, messageWriter()); } public static ExtensionCatalog resolveExtensionCatalog() throws RegistryResolutionException { return getCatalogResolver().resolveExtensionCatalog(); } public static QuarkusProject getProject(Path projectDir, ExtensionCatalog catalog, ExtensionManager extManager, JavaVersion javaVersion, MessageWriter log) { return QuarkusProject.of(projectDir, catalog, getCodestartResourceLoaders(log, catalog), log, extManager, javaVersion); } public static ExtensionCatalogResolver getCatalogResolver() throws RegistryResolutionException { return catalogResolver == null ? catalogResolver = getCatalogResolver(true, messageWriter()) : catalogResolver; } public static ExtensionCatalogResolver getCatalogResolver(MessageWriter log) throws RegistryResolutionException { return getCatalogResolver(true, log); } public static ExtensionCatalogResolver getCatalogResolver(boolean enableRegistryClient, MessageWriter log) throws RegistryResolutionException { if (catalogResolver == null) { if (enableRegistryClient) { catalogResolver = getCatalogResolver(artifactResolver(), log); } else { catalogResolver = ExtensionCatalogResolver.empty(); } } return catalogResolver; } public static ExtensionCatalogResolver getCatalogResolver(MavenArtifactResolver resolver, MessageWriter log) throws RegistryResolutionException { return ExtensionCatalogResolver.builder() .artifactResolver(resolver) .config(toolsConfig()) .messageWriter(log) .build(); } public static RegistriesConfig toolsConfig() { return toolsConfig == null ? toolsConfig = RegistriesConfig.resolveConfig() : toolsConfig; } public static void setToolsConfig(RegistriesConfig config) { toolsConfig = config; } public static void reset() { initRegistryClientEnabled(); toolsConfig = null; artifactResolver = null; catalogResolver = null; log = null; } public static void setMessageWriter(MessageWriter newLog) { if (log == null) { log = newLog; } } public static MessageWriter messageWriter() { return log == null ? log = toolsConfig().isDebug() ? MessageWriter.debug() : MessageWriter.info() : log; } public static void setArtifactResolver(MavenArtifactResolver resolver) { if (artifactResolver == null) { artifactResolver = resolver; } } public static MavenArtifactResolver artifactResolver() { if (artifactResolver == null) { try { artifactResolver = MavenArtifactResolver.builder() .setArtifactTransferLogging(toolsConfig().isDebug()) .setWorkspaceDiscovery(false) .build(); } catch (BootstrapMavenException e) { StringBuilder messages = new StringBuilder("Failed to initialize the Maven artifact resolver"); Throwable current = e; while (null != current) { if (null != current.getMessage() && !current.getMessage().isBlank()) { messages .append("\n") .append(current.getMessage()); } current = current.getCause(); } throw new IllegalStateException(messages.toString().trim(), e); } } return artifactResolver; } }
QuarkusProjectHelper