method
stringlengths
13
441k
clean_method
stringlengths
7
313k
doc
stringlengths
17
17.3k
comment
stringlengths
3
1.42k
method_name
stringlengths
1
273
extra
dict
imports
sequence
imports_info
stringlengths
19
34.8k
cluster_imports_info
stringlengths
15
3.66k
libraries
sequence
libraries_info
stringlengths
6
661
id
int64
0
2.92M
@Test public void testMapCategoryToCategoryEntity() { // Given Category category = new Category(); category.setName(mockValues.nextString(150)); CategoryEntity categoryEntity = new CategoryEntity(); // When categoryServiceMapper.mapCategoryToCategoryEntity(category, categoryEntity); // Then assertEquals(category.getName(), categoryEntity.getName()); }
void function() { Category category = new Category(); category.setName(mockValues.nextString(150)); CategoryEntity categoryEntity = new CategoryEntity(); categoryServiceMapper.mapCategoryToCategoryEntity(category, categoryEntity); assertEquals(category.getName(), categoryEntity.getName()); }
/** * Test : Mapping from 'Category' to 'CategoryEntity' */
Test : Mapping from 'Category' to 'CategoryEntity'
testMapCategoryToCategoryEntity
{ "repo_name": "obasola/master", "path": "src/test/java/com/kumasi/journal/business/service/mapping/CategoryServiceMapperTest.java", "license": "mit", "size": 2012 }
[ "com.kumasi.journal.domain.Category", "com.kumasi.journal.domain.jpa.CategoryEntity", "org.junit.Assert" ]
import com.kumasi.journal.domain.Category; import com.kumasi.journal.domain.jpa.CategoryEntity; import org.junit.Assert;
import com.kumasi.journal.domain.*; import com.kumasi.journal.domain.jpa.*; import org.junit.*;
[ "com.kumasi.journal", "org.junit" ]
com.kumasi.journal; org.junit;
254,542
@Override public List<String> getGroups(String user) throws IOException { // parent gets unix groups List<String> groups = new LinkedList<String>(super.getGroups(user)); NetgroupCache.getNetgroups(user, groups); return groups; }
List<String> function(String user) throws IOException { List<String> groups = new LinkedList<String>(super.getGroups(user)); NetgroupCache.getNetgroups(user, groups); return groups; }
/** * Gets unix groups and netgroups for the user. * * It gets all unix groups as returned by id -Gn but it * only returns netgroups that are used in ACLs (there is * no way to get all netgroups for a given user, see * documentation for getent netgroup) */
Gets unix groups and netgroups for the user. It gets all unix groups as returned by id -Gn but it only returns netgroups that are used in ACLs (there is no way to get all netgroups for a given user, see documentation for getent netgroup)
getGroups
{ "repo_name": "bitmybytes/hadoop", "path": "hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.java", "license": "apache-2.0", "size": 4274 }
[ "java.io.IOException", "java.util.LinkedList", "java.util.List", "org.apache.hadoop.security.NetgroupCache" ]
import java.io.IOException; import java.util.LinkedList; import java.util.List; import org.apache.hadoop.security.NetgroupCache;
import java.io.*; import java.util.*; import org.apache.hadoop.security.*;
[ "java.io", "java.util", "org.apache.hadoop" ]
java.io; java.util; org.apache.hadoop;
2,250,740
protected void buildRequiredValidUntilFilterIfNeeded(final SamlRegisteredService service, final List<MetadataFilter> metadataFilterList) { if (service.getMetadataMaxValidity() > 0) { final RequiredValidUntilFilter requiredValidUntilFilter = new RequiredValidUntilFilter(service.getMetadataMaxValidity()); metadataFilterList.add(requiredValidUntilFilter); LOGGER.debug("Added metadata RequiredValidUntilFilter with max validity of [{}]", service.getMetadataMaxValidity()); } else { LOGGER.debug("No metadata maximum validity criteria is defined for [{}], so RequiredValidUntilFilter will not be invoked", service.getMetadataLocation()); } }
void function(final SamlRegisteredService service, final List<MetadataFilter> metadataFilterList) { if (service.getMetadataMaxValidity() > 0) { final RequiredValidUntilFilter requiredValidUntilFilter = new RequiredValidUntilFilter(service.getMetadataMaxValidity()); metadataFilterList.add(requiredValidUntilFilter); LOGGER.debug(STR, service.getMetadataMaxValidity()); } else { LOGGER.debug(STR, service.getMetadataLocation()); } }
/** * Build required valid until filter if needed. See {@link RequiredValidUntilFilter}. * * @param service the service * @param metadataFilterList the metadata filter list */
Build required valid until filter if needed. See <code>RequiredValidUntilFilter</code>
buildRequiredValidUntilFilterIfNeeded
{ "repo_name": "dodok1/cas", "path": "support/cas-server-support-saml-idp-metadata/src/main/java/org/apereo/cas/support/saml/services/idp/metadata/cache/resolver/BaseSamlRegisteredServiceMetadataResolver.java", "license": "apache-2.0", "size": 14522 }
[ "java.util.List", "org.apereo.cas.support.saml.services.SamlRegisteredService", "org.opensaml.saml.metadata.resolver.filter.MetadataFilter", "org.opensaml.saml.metadata.resolver.filter.impl.RequiredValidUntilFilter" ]
import java.util.List; import org.apereo.cas.support.saml.services.SamlRegisteredService; import org.opensaml.saml.metadata.resolver.filter.MetadataFilter; import org.opensaml.saml.metadata.resolver.filter.impl.RequiredValidUntilFilter;
import java.util.*; import org.apereo.cas.support.saml.services.*; import org.opensaml.saml.metadata.resolver.filter.*; import org.opensaml.saml.metadata.resolver.filter.impl.*;
[ "java.util", "org.apereo.cas", "org.opensaml.saml" ]
java.util; org.apereo.cas; org.opensaml.saml;
1,008,143
@Test public void testGetManagementNetworks() { List<Network> result = dao.getManagementNetworks(datacenter); assertEquals(NUM_OF_MANAGEMENT_NETWORKS, result.size()); }
void function() { List<Network> result = dao.getManagementNetworks(datacenter); assertEquals(NUM_OF_MANAGEMENT_NETWORKS, result.size()); }
/** * Ensures that all management networks are returned. */
Ensures that all management networks are returned
testGetManagementNetworks
{ "repo_name": "OpenUniversity/ovirt-engine", "path": "backend/manager/modules/dal/src/test/java/org/ovirt/engine/core/dao/network/NetworkDaoTest.java", "license": "apache-2.0", "size": 12422 }
[ "java.util.List", "org.junit.Assert", "org.ovirt.engine.core.common.businessentities.network.Network" ]
import java.util.List; import org.junit.Assert; import org.ovirt.engine.core.common.businessentities.network.Network;
import java.util.*; import org.junit.*; import org.ovirt.engine.core.common.businessentities.network.*;
[ "java.util", "org.junit", "org.ovirt.engine" ]
java.util; org.junit; org.ovirt.engine;
2,304,654
public static String getReadableIP(String ip) { // Convert the ip if its an ipv6 ip. For ipv4 no conversion is needed if (Inet6Util.isValidIP6Address(ip)) { try { return getConvertedIP(ip); } catch (UnknownHostException e) { // ignore this } } return ip; }
static String function(String ip) { if (Inet6Util.isValidIP6Address(ip)) { try { return getConvertedIP(ip); } catch (UnknownHostException e) { } } return ip; }
/** * This method try to covnert an ip address to an easy readable ip. See * http://java.sun.com/j2se/1.4.2/docs/api/java/net/Inet6Address.html for * the format it returns. For ipv4 it make no convertion * * @param ip * The ip which should be tried to convert * @return ip The converted ip */
This method try to covnert an ip address to an easy readable ip. See HREF for the format it returns. For ipv4 it make no convertion
getReadableIP
{ "repo_name": "chibenwa/james-jspf", "path": "resolver/src/main/java/org/apache/james/jspf/core/IPAddr.java", "license": "apache-2.0", "size": 15018 }
[ "java.net.UnknownHostException" ]
import java.net.UnknownHostException;
import java.net.*;
[ "java.net" ]
java.net;
858,229
public Set<Signal> getAllValuesOfsg(final SignalPropertyMultipleMatch partialMatch) { return rawAccumulateAllValuesOfsg(partialMatch.toArray()); }
Set<Signal> function(final SignalPropertyMultipleMatch partialMatch) { return rawAccumulateAllValuesOfsg(partialMatch.toArray()); }
/** * Retrieve the set of values that occur in matches for sg. * @return the Set of all values, null if no parameter with the given name exists, empty set if there are no matches * */
Retrieve the set of values that occur in matches for sg
getAllValuesOfsg
{ "repo_name": "ELTE-Soft/xUML-RT-Executor", "path": "plugins/hu.eltesoft.modelexecution.validation/src-gen/hu/eltesoft/modelexecution/validation/SignalPropertyMultipleMatcher.java", "license": "epl-1.0", "size": 13700 }
[ "hu.eltesoft.modelexecution.validation.SignalPropertyMultipleMatch", "java.util.Set", "org.eclipse.uml2.uml.Signal" ]
import hu.eltesoft.modelexecution.validation.SignalPropertyMultipleMatch; import java.util.Set; import org.eclipse.uml2.uml.Signal;
import hu.eltesoft.modelexecution.validation.*; import java.util.*; import org.eclipse.uml2.uml.*;
[ "hu.eltesoft.modelexecution", "java.util", "org.eclipse.uml2" ]
hu.eltesoft.modelexecution; java.util; org.eclipse.uml2;
2,237,982
private void writeConfigFile() { configLock.writeLock().lock(); try { File tempConfig = new File(configFile.getPath() + ".tmp"); RandomAccessFile raf = new RandomAccessFile(tempConfig, "rw"); raf.seek(0); raf.write(cipherManager.getDiskSalt()); raf.writeLong(storeSize); raf.writeLong(prevStoreSize); raf.writeLong(keyCount.get()); raf.writeInt(generation); raf.writeInt(flags); raf.writeInt(0); // bloomFilterK raf.writeInt(0); raf.writeLong(0); raf.writeLong(writes.get()); raf.writeLong(hits.get()); raf.writeLong(misses.get()); raf.writeLong(bloomFalsePos.get()); raf.getFD().sync(); raf.close(); FileUtil.renameTo(tempConfig, configFile); } catch (IOException ioe) { Logger.error(this, "error writing config file for " + name, ioe); } finally { configLock.writeLock().unlock(); } } // ------------- Store resizing private long prevStoreSize = 0; private Lock cleanerLock = new ReentrantLock(); // local to this datastore private Condition cleanerCondition = cleanerLock.newCondition(); private static Lock cleanerGlobalLock = new ReentrantLock(); // global across all datastore private Cleaner cleanerThread; private CleanerStatusUserAlert cleanerStatusUserAlert; private final Entry NOT_MODIFIED = new Entry();
void function() { configLock.writeLock().lock(); try { File tempConfig = new File(configFile.getPath() + ".tmp"); RandomAccessFile raf = new RandomAccessFile(tempConfig, "rw"); raf.seek(0); raf.write(cipherManager.getDiskSalt()); raf.writeLong(storeSize); raf.writeLong(prevStoreSize); raf.writeLong(keyCount.get()); raf.writeInt(generation); raf.writeInt(flags); raf.writeInt(0); raf.writeInt(0); raf.writeLong(0); raf.writeLong(writes.get()); raf.writeLong(hits.get()); raf.writeLong(misses.get()); raf.writeLong(bloomFalsePos.get()); raf.getFD().sync(); raf.close(); FileUtil.renameTo(tempConfig, configFile); } catch (IOException ioe) { Logger.error(this, STR + name, ioe); } finally { configLock.writeLock().unlock(); } } private long prevStoreSize = 0; private Lock cleanerLock = new ReentrantLock(); private Condition cleanerCondition = cleanerLock.newCondition(); private static Lock cleanerGlobalLock = new ReentrantLock(); private Cleaner cleanerThread; private CleanerStatusUserAlert cleanerStatusUserAlert; private final Entry NOT_MODIFIED = new Entry();
/** * Write config file */
Write config file
writeConfigFile
{ "repo_name": "NiteshBharadwaj/android-staging", "path": "src/freenet/store/saltedhash/SaltedHashFreenetStore.java", "license": "gpl-2.0", "size": 69331 }
[ "java.io.File", "java.io.IOException", "java.io.RandomAccessFile", "java.util.concurrent.locks.Condition", "java.util.concurrent.locks.Lock", "java.util.concurrent.locks.ReentrantLock" ]
import java.io.File; import java.io.IOException; import java.io.RandomAccessFile; import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock;
import java.io.*; import java.util.concurrent.locks.*;
[ "java.io", "java.util" ]
java.io; java.util;
2,086,950
public static List<Method> getMethods(Class clazz) { return DECLARED_METHODS.computeIfAbsent( clazz, c -> { return Arrays.stream(c.getDeclaredMethods()) .filter( m -> !m.isBridge()) // Covariant overloads insert bridge functions, which we must // ignore. .filter(m -> !Modifier.isPrivate(m.getModifiers())) .filter(m -> !Modifier.isProtected(m.getModifiers())) .filter(m -> !Modifier.isStatic(m.getModifiers())) .collect(Collectors.toList()); }); }
static List<Method> function(Class clazz) { return DECLARED_METHODS.computeIfAbsent( clazz, c -> { return Arrays.stream(c.getDeclaredMethods()) .filter( m -> !m.isBridge()) .filter(m -> !Modifier.isPrivate(m.getModifiers())) .filter(m -> !Modifier.isProtected(m.getModifiers())) .filter(m -> !Modifier.isStatic(m.getModifiers())) .collect(Collectors.toList()); }); }
/** * Returns the list of non private/protected, non-static methods in the class, caching the * results. */
Returns the list of non private/protected, non-static methods in the class, caching the results
getMethods
{ "repo_name": "iemejia/incubator-beam", "path": "sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/utils/ReflectUtils.java", "license": "apache-2.0", "size": 9464 }
[ "java.lang.reflect.Method", "java.lang.reflect.Modifier", "java.util.Arrays", "java.util.List", "java.util.stream.Collectors" ]
import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.Arrays; import java.util.List; import java.util.stream.Collectors;
import java.lang.reflect.*; import java.util.*; import java.util.stream.*;
[ "java.lang", "java.util" ]
java.lang; java.util;
1,047,615
public MultipleCurrencyAmount presentValue(final AnnuityCouponFixed annuity, final MulticurveProviderInterface multicurves) { ArgumentChecker.notNull(annuity, "Annuity"); ArgumentChecker.notNull(multicurves, "Multi-curves provider"); MultipleCurrencyAmount pv = MultipleCurrencyAmount.of(annuity.getCurrency(), 0); for (final CouponFixed cpn : annuity.getPayments()) { pv = pv.plus(METHOD_CPN_FIXED.presentValue(cpn, multicurves)); } return pv; }
MultipleCurrencyAmount function(final AnnuityCouponFixed annuity, final MulticurveProviderInterface multicurves) { ArgumentChecker.notNull(annuity, STR); ArgumentChecker.notNull(multicurves, STR); MultipleCurrencyAmount pv = MultipleCurrencyAmount.of(annuity.getCurrency(), 0); for (final CouponFixed cpn : annuity.getPayments()) { pv = pv.plus(METHOD_CPN_FIXED.presentValue(cpn, multicurves)); } return pv; }
/** * Computes the present value of an annuity of fixed coupons. * @param annuity The annuity. * @param multicurves The multi-curves provider. * @return The present value. */
Computes the present value of an annuity of fixed coupons
presentValue
{ "repo_name": "DevStreet/FinanceAnalytics", "path": "projects/OG-Analytics/src/main/java/com/opengamma/analytics/financial/interestrate/annuity/provider/AnnuityDiscountingMethod.java", "license": "apache-2.0", "size": 2805 }
[ "com.opengamma.analytics.financial.interestrate.annuity.derivative.AnnuityCouponFixed", "com.opengamma.analytics.financial.interestrate.payments.derivative.CouponFixed", "com.opengamma.analytics.financial.provider.description.interestrate.MulticurveProviderInterface", "com.opengamma.util.ArgumentChecker", "com.opengamma.util.money.MultipleCurrencyAmount" ]
import com.opengamma.analytics.financial.interestrate.annuity.derivative.AnnuityCouponFixed; import com.opengamma.analytics.financial.interestrate.payments.derivative.CouponFixed; import com.opengamma.analytics.financial.provider.description.interestrate.MulticurveProviderInterface; import com.opengamma.util.ArgumentChecker; import com.opengamma.util.money.MultipleCurrencyAmount;
import com.opengamma.analytics.financial.interestrate.annuity.derivative.*; import com.opengamma.analytics.financial.interestrate.payments.derivative.*; import com.opengamma.analytics.financial.provider.description.interestrate.*; import com.opengamma.util.*; import com.opengamma.util.money.*;
[ "com.opengamma.analytics", "com.opengamma.util" ]
com.opengamma.analytics; com.opengamma.util;
448,408
public KeyValue getValue(int index);
KeyValue function(int index);
/** * Get the KeyValue at the given index. * @param index the zero-based KeyValue index between 0 and {@link #size()} exclusive * @return the KeyValue at the given index * @throws IndexOutOfBoundsException if an invalid index is used */
Get the KeyValue at the given index
getValue
{ "repo_name": "ramkrish86/incubator-phoenix", "path": "phoenix-core/src/main/java/org/apache/phoenix/schema/tuple/Tuple.java", "license": "apache-2.0", "size": 3235 }
[ "org.apache.hadoop.hbase.KeyValue" ]
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.*;
[ "org.apache.hadoop" ]
org.apache.hadoop;
2,323,632
public long getLastModified(String path) { try { // this entry time can cause problems ... ZipEntry entry = getJarEntry(path); return entry != null ? entry.getTime() : -1; } catch (IOException e) { log.log(Level.FINE, e.toString(), e); } return -1; }
long function(String path) { try { ZipEntry entry = getJarEntry(path); return entry != null ? entry.getTime() : -1; } catch (IOException e) { log.log(Level.FINE, e.toString(), e); } return -1; }
/** * Returns the last-modified time of the entry in the jar file. * * @param path full path to the jar entry * @return the length of the entry */
Returns the last-modified time of the entry in the jar file
getLastModified
{ "repo_name": "CleverCloud/Bianca", "path": "bianca/src/main/java/com/clevercloud/vfs/Jar.java", "license": "gpl-2.0", "size": 21977 }
[ "java.io.IOException", "java.util.logging.Level", "java.util.zip.ZipEntry" ]
import java.io.IOException; import java.util.logging.Level; import java.util.zip.ZipEntry;
import java.io.*; import java.util.logging.*; import java.util.zip.*;
[ "java.io", "java.util" ]
java.io; java.util;
1,582,831
NodeRef getRootHome();
NodeRef getRootHome();
/** * Gets the root home of the company home store * * @return root node ref */
Gets the root home of the company home store
getRootHome
{ "repo_name": "Alfresco/alfresco-repository", "path": "src/main/java/org/alfresco/repo/virtual/config/NodeRefResolver.java", "license": "lgpl-3.0", "size": 4525 }
[ "org.alfresco.service.cmr.repository.NodeRef" ]
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.*;
[ "org.alfresco.service" ]
org.alfresco.service;
238,026
public String getFileName() { File f = new File(getRemotePath()); return f.getName().length() == 0 ? ROOT_PATH : f.getName(); }
String function() { File f = new File(getRemotePath()); return f.getName().length() == 0 ? ROOT_PATH : f.getName(); }
/** * Returns the filename and "/" for the root directory * * @return The name of the file */
Returns the filename and "/" for the root directory
getFileName
{ "repo_name": "Maysami/elenoon-drive", "path": "src/com/elenoondrive/android/datamodel/OCFile.java", "license": "gpl-2.0", "size": 15965 }
[ "java.io.File" ]
import java.io.File;
import java.io.*;
[ "java.io" ]
java.io;
895,251
public String getTokenDisplayName(int ttype) { // inside any target's char range and is lexer grammar? if ( isLexer() && ttype >= Lexer.MIN_CHAR_VALUE && ttype <= Lexer.MAX_CHAR_VALUE ) { return CharSupport.getANTLRCharLiteralForChar(ttype); } if ( ttype==Token.EOF ) { return "EOF"; } if ( ttype==Token.INVALID_TYPE ) { return INVALID_TOKEN_NAME; } if (ttype >= 0 && ttype < typeToStringLiteralList.size() && typeToStringLiteralList.get(ttype) != null) { return typeToStringLiteralList.get(ttype); } if (ttype >= 0 && ttype < typeToTokenList.size() && typeToTokenList.get(ttype) != null) { return typeToTokenList.get(ttype); } return String.valueOf(ttype); }
String function(int ttype) { if ( isLexer() && ttype >= Lexer.MIN_CHAR_VALUE && ttype <= Lexer.MAX_CHAR_VALUE ) { return CharSupport.getANTLRCharLiteralForChar(ttype); } if ( ttype==Token.EOF ) { return "EOF"; } if ( ttype==Token.INVALID_TYPE ) { return INVALID_TOKEN_NAME; } if (ttype >= 0 && ttype < typeToStringLiteralList.size() && typeToStringLiteralList.get(ttype) != null) { return typeToStringLiteralList.get(ttype); } if (ttype >= 0 && ttype < typeToTokenList.size() && typeToTokenList.get(ttype) != null) { return typeToTokenList.get(ttype); } return String.valueOf(ttype); }
/** Given a token type, get a meaningful name for it such as the ID * or string literal. If this is a lexer and the ttype is in the * char vocabulary, compute an ANTLR-valid (possibly escaped) char literal. */
Given a token type, get a meaningful name for it such as the ID or string literal. If this is a lexer and the ttype is in the char vocabulary, compute an ANTLR-valid (possibly escaped) char literal
getTokenDisplayName
{ "repo_name": "Pursuit92/antlr4", "path": "tool/src/org/antlr/v4/tool/Grammar.java", "license": "bsd-3-clause", "size": 44518 }
[ "org.antlr.v4.misc.CharSupport", "org.antlr.v4.runtime.Lexer", "org.antlr.v4.runtime.Token" ]
import org.antlr.v4.misc.CharSupport; import org.antlr.v4.runtime.Lexer; import org.antlr.v4.runtime.Token;
import org.antlr.v4.misc.*; import org.antlr.v4.runtime.*;
[ "org.antlr.v4" ]
org.antlr.v4;
1,015,053
interface WithCustomHostNames { WithCreate withCustomHostNames(List<String> customHostNames); }
interface WithCustomHostNames { WithCreate withCustomHostNames(List<String> customHostNames); }
/** * Specifies customHostNames. * @param customHostNames The custom host names of the StreamingEndpoint * @return the next definition stage */
Specifies customHostNames
withCustomHostNames
{ "repo_name": "selvasingh/azure-sdk-for-java", "path": "sdk/mediaservices/mgmt-v2018_06_01_preview/src/main/java/com/microsoft/azure/management/mediaservices/v2018_06_01_preview/StreamingEndpoint.java", "license": "mit", "size": 15644 }
[ "java.util.List" ]
import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
2,725,341
public static String getExtraByName(@NonNull String experimentName, String key, String defaultValue) { return getInstance().getBucket(experimentName).getExtraByName(key, defaultValue); }
static String function(@NonNull String experimentName, String key, String defaultValue) { return getInstance().getBucket(experimentName).getExtraByName(key, defaultValue); }
/** * get extra attributes stored in bucket, this handy when we we have specific value returned from server * @param experimentName experiment name which this bucket are contained * @param key name of extra value inside this bucket for example : buttonColor or buttonTitle * @param defaultValue in case this bucket doesn't contains this key this value are returned, this is handy in case response still didn't retrieved for any reason. * @return extra value in a form of String, then you have to parse it depend on expected value is */
get extra attributes stored in bucket, this handy when we we have specific value returned from server
getExtraByName
{ "repo_name": "rocket-internet-berlin/RocketBucket", "path": "android/RIBucket/src/main/java/de/rocketinternet/android/bucket/RocketBucket.java", "license": "mit", "size": 8810 }
[ "android.support.annotation.NonNull" ]
import android.support.annotation.NonNull;
import android.support.annotation.*;
[ "android.support" ]
android.support;
1,919,164
private void initUploader() { uploader = new ArrayList<>(); //Setup Imgur Uploader upImgur = new Uploader("Imgur"); upImgur.setHost("https://imgur.com"); upImgur.setPathUpload("https://api.imgur.com/3/upload"); upImgur.getHeader().add(new BasicHeader("Authorization", "Client-ID e6de7d8d5a3bd1c")); uploader.add(upImgur); }
void function() { uploader = new ArrayList<>(); Uploader upImgur = new Uploader("Imgur"); upImgur.setHost(STRhttps: upImgur.getHeader().add(new BasicHeader(STR, STR)); uploader.add(upImgur); }
/** * Initialize the uploader */
Initialize the uploader
initUploader
{ "repo_name": "fanor79/EasyTake", "path": "src/main/java/fr/mvinet/easyTake/EasyTake.java", "license": "lgpl-2.1", "size": 3286 }
[ "java.util.ArrayList", "org.apache.http.message.BasicHeader" ]
import java.util.ArrayList; import org.apache.http.message.BasicHeader;
import java.util.*; import org.apache.http.message.*;
[ "java.util", "org.apache.http" ]
java.util; org.apache.http;
1,785,338
private void evaluateExpressions() throws JspException { String string = null; Boolean bool = null; if ((string = EvalHelper.evalString("accesskey", getAccesskeyExpr(), this, pageContext)) != null) { setAccesskey(string); } if ((string = EvalHelper.evalString("alt", getAltExpr(), this, pageContext)) != null) { setAlt(string); } if ((string = EvalHelper.evalString("altKey", getAltKeyExpr(), this, pageContext)) != null) { setAltKey(string); } if ((string = EvalHelper.evalString("bundle", getBundleExpr(), this, pageContext)) != null) { setBundle(string); } if ((string = EvalHelper.evalString("dir", getDirExpr(), this, pageContext)) != null) { setDir(string); } if ((bool = EvalHelper.evalBoolean("disabled", getDisabledExpr(), this, pageContext)) != null) { setDisabled(bool.booleanValue()); } if ((string = EvalHelper.evalString("errorKey", getErrorKeyExpr(), this, pageContext)) != null) { setErrorKey(string); } if ((string = EvalHelper.evalString("errorStyle", getErrorStyleExpr(), this, pageContext)) != null) { setErrorStyle(string); } if ((string = EvalHelper.evalString("errorStyleClass", getErrorStyleClassExpr(), this, pageContext)) != null) { setErrorStyleClass(string); } if ((string = EvalHelper.evalString("errorStyleId", getErrorStyleIdExpr(), this, pageContext)) != null) { setErrorStyleId(string); } if ((bool = EvalHelper.evalBoolean("indexed", getIndexedExpr(), this, pageContext)) != null) { setIndexed(bool.booleanValue()); } if ((string = EvalHelper.evalString("lang", getLangExpr(), this, pageContext)) != null) { setLang(string); } if ((string = EvalHelper.evalString("name", getNameExpr(), this, pageContext)) != null) { setName(string); } if ((string = EvalHelper.evalString("onblur", getOnblurExpr(), this, pageContext)) != null) { setOnblur(string); } if ((string = EvalHelper.evalString("onchange", getOnchangeExpr(), this, pageContext)) != null) { setOnchange(string); } if ((string = EvalHelper.evalString("onclick", getOnclickExpr(), this, pageContext)) != null) { setOnclick(string); } if ((string = EvalHelper.evalString("ondblclick", getOndblclickExpr(), this, pageContext)) != null) { setOndblclick(string); } if ((string = EvalHelper.evalString("onfocus", getOnfocusExpr(), this, pageContext)) != null) { setOnfocus(string); } if ((string = EvalHelper.evalString("onkeydown", getOnkeydownExpr(), this, pageContext)) != null) { setOnkeydown(string); } if ((string = EvalHelper.evalString("onkeypress", getOnkeypressExpr(), this, pageContext)) != null) { setOnkeypress(string); } if ((string = EvalHelper.evalString("onkeyup", getOnkeyupExpr(), this, pageContext)) != null) { setOnkeyup(string); } if ((string = EvalHelper.evalString("onmousedown", getOnmousedownExpr(), this, pageContext)) != null) { setOnmousedown(string); } if ((string = EvalHelper.evalString("onmousemove", getOnmousemoveExpr(), this, pageContext)) != null) { setOnmousemove(string); } if ((string = EvalHelper.evalString("onmouseout", getOnmouseoutExpr(), this, pageContext)) != null) { setOnmouseout(string); } if ((string = EvalHelper.evalString("onmouseover", getOnmouseoverExpr(), this, pageContext)) != null) { setOnmouseover(string); } if ((string = EvalHelper.evalString("onmouseup", getOnmouseupExpr(), this, pageContext)) != null) { setOnmouseup(string); } if ((string = EvalHelper.evalString("property", getPropertyExpr(), this, pageContext)) != null) { setProperty(string); } if ((string = EvalHelper.evalString("style", getStyleExpr(), this, pageContext)) != null) { setStyle(string); } if ((string = EvalHelper.evalString("styleClass", getStyleClassExpr(), this, pageContext)) != null) { setStyleClass(string); } if ((string = EvalHelper.evalString("styleId", getStyleIdExpr(), this, pageContext)) != null) { setStyleId(string); } if ((string = EvalHelper.evalString("tabindex", getTabindexExpr(), this, pageContext)) != null) { setTabindex(string); } if ((string = EvalHelper.evalString("title", getTitleExpr(), this, pageContext)) != null) { setTitle(string); } if ((string = EvalHelper.evalString("titleKey", getTitleKeyExpr(), this, pageContext)) != null) { setTitleKey(string); } if ((string = EvalHelper.evalString("value", getValueExpr(), this, pageContext)) != null) { setValue(string); } }
void function() throws JspException { String string = null; Boolean bool = null; if ((string = EvalHelper.evalString(STR, getAccesskeyExpr(), this, pageContext)) != null) { setAccesskey(string); } if ((string = EvalHelper.evalString("alt", getAltExpr(), this, pageContext)) != null) { setAlt(string); } if ((string = EvalHelper.evalString(STR, getAltKeyExpr(), this, pageContext)) != null) { setAltKey(string); } if ((string = EvalHelper.evalString(STR, getBundleExpr(), this, pageContext)) != null) { setBundle(string); } if ((string = EvalHelper.evalString("dir", getDirExpr(), this, pageContext)) != null) { setDir(string); } if ((bool = EvalHelper.evalBoolean(STR, getDisabledExpr(), this, pageContext)) != null) { setDisabled(bool.booleanValue()); } if ((string = EvalHelper.evalString(STR, getErrorKeyExpr(), this, pageContext)) != null) { setErrorKey(string); } if ((string = EvalHelper.evalString(STR, getErrorStyleExpr(), this, pageContext)) != null) { setErrorStyle(string); } if ((string = EvalHelper.evalString(STR, getErrorStyleClassExpr(), this, pageContext)) != null) { setErrorStyleClass(string); } if ((string = EvalHelper.evalString(STR, getErrorStyleIdExpr(), this, pageContext)) != null) { setErrorStyleId(string); } if ((bool = EvalHelper.evalBoolean(STR, getIndexedExpr(), this, pageContext)) != null) { setIndexed(bool.booleanValue()); } if ((string = EvalHelper.evalString("lang", getLangExpr(), this, pageContext)) != null) { setLang(string); } if ((string = EvalHelper.evalString("name", getNameExpr(), this, pageContext)) != null) { setName(string); } if ((string = EvalHelper.evalString(STR, getOnblurExpr(), this, pageContext)) != null) { setOnblur(string); } if ((string = EvalHelper.evalString(STR, getOnchangeExpr(), this, pageContext)) != null) { setOnchange(string); } if ((string = EvalHelper.evalString(STR, getOnclickExpr(), this, pageContext)) != null) { setOnclick(string); } if ((string = EvalHelper.evalString(STR, getOndblclickExpr(), this, pageContext)) != null) { setOndblclick(string); } if ((string = EvalHelper.evalString(STR, getOnfocusExpr(), this, pageContext)) != null) { setOnfocus(string); } if ((string = EvalHelper.evalString(STR, getOnkeydownExpr(), this, pageContext)) != null) { setOnkeydown(string); } if ((string = EvalHelper.evalString(STR, getOnkeypressExpr(), this, pageContext)) != null) { setOnkeypress(string); } if ((string = EvalHelper.evalString(STR, getOnkeyupExpr(), this, pageContext)) != null) { setOnkeyup(string); } if ((string = EvalHelper.evalString(STR, getOnmousedownExpr(), this, pageContext)) != null) { setOnmousedown(string); } if ((string = EvalHelper.evalString(STR, getOnmousemoveExpr(), this, pageContext)) != null) { setOnmousemove(string); } if ((string = EvalHelper.evalString(STR, getOnmouseoutExpr(), this, pageContext)) != null) { setOnmouseout(string); } if ((string = EvalHelper.evalString(STR, getOnmouseoverExpr(), this, pageContext)) != null) { setOnmouseover(string); } if ((string = EvalHelper.evalString(STR, getOnmouseupExpr(), this, pageContext)) != null) { setOnmouseup(string); } if ((string = EvalHelper.evalString(STR, getPropertyExpr(), this, pageContext)) != null) { setProperty(string); } if ((string = EvalHelper.evalString("style", getStyleExpr(), this, pageContext)) != null) { setStyle(string); } if ((string = EvalHelper.evalString(STR, getStyleClassExpr(), this, pageContext)) != null) { setStyleClass(string); } if ((string = EvalHelper.evalString(STR, getStyleIdExpr(), this, pageContext)) != null) { setStyleId(string); } if ((string = EvalHelper.evalString(STR, getTabindexExpr(), this, pageContext)) != null) { setTabindex(string); } if ((string = EvalHelper.evalString("title", getTitleExpr(), this, pageContext)) != null) { setTitle(string); } if ((string = EvalHelper.evalString(STR, getTitleKeyExpr(), this, pageContext)) != null) { setTitleKey(string); } if ((string = EvalHelper.evalString("value", getValueExpr(), this, pageContext)) != null) { setValue(string); } }
/** * Processes all attribute values which use the JSTL expression evaluation * engine to determine their values. * * @throws JspException if a JSP exception has occurred */
Processes all attribute values which use the JSTL expression evaluation engine to determine their values
evaluateExpressions
{ "repo_name": "shuliangtao/struts-1.3.10", "path": "src/el/src/main/java/org/apache/strutsel/taglib/html/ELCheckboxTag.java", "license": "apache-2.0", "size": 29059 }
[ "javax.servlet.jsp.JspException", "org.apache.strutsel.taglib.utils.EvalHelper" ]
import javax.servlet.jsp.JspException; import org.apache.strutsel.taglib.utils.EvalHelper;
import javax.servlet.jsp.*; import org.apache.strutsel.taglib.utils.*;
[ "javax.servlet", "org.apache.strutsel" ]
javax.servlet; org.apache.strutsel;
1,354,458
public JTextField getTxtBarabasi() { if (txtBarabasi == null) { txtBarabasi = new JTextField(); txtBarabasi.setText("5"); txtBarabasi.setBounds(new Rectangle(125, 15, 85, 20)); } return txtBarabasi; }
JTextField function() { if (txtBarabasi == null) { txtBarabasi = new JTextField(); txtBarabasi.setText("5"); txtBarabasi.setBounds(new Rectangle(125, 15, 85, 20)); } return txtBarabasi; }
/** * This method initializes txtBarabasi * * @return javax.swing.JTextField */
This method initializes txtBarabasi
getTxtBarabasi
{ "repo_name": "emrahcem/profid", "path": "src/example/popularItems/gui/content/NetworkParamsPanel.java", "license": "apache-2.0", "size": 43194 }
[ "java.awt.Rectangle", "javax.swing.JTextField" ]
import java.awt.Rectangle; import javax.swing.JTextField;
import java.awt.*; import javax.swing.*;
[ "java.awt", "javax.swing" ]
java.awt; javax.swing;
779,634
if (!fgAvailabilityChecked) { try { Browser browser= new Browser(parent, SWT.NONE); browser.dispose(); fgIsAvailable= true; Slider sliderV= new Slider(parent, SWT.VERTICAL); Slider sliderH= new Slider(parent, SWT.HORIZONTAL); int width= sliderV.computeSize(SWT.DEFAULT, SWT.DEFAULT).x; int height= sliderH.computeSize(SWT.DEFAULT, SWT.DEFAULT).y; fgScrollBarSize= new Point(width, height); sliderV.dispose(); sliderH.dispose(); } catch (SWTError er) { fgIsAvailable= false; } finally { fgAvailabilityChecked= true; } } return fgIsAvailable; } private static final int MIN_WIDTH = 80; private static final int MIN_HEIGHT = 50; private static boolean fgIsAvailable = false; private static boolean fgAvailabilityChecked = false; private static Point fgScrollBarSize; private Browser fBrowser; private boolean fBrowserHasContent; private TextLayout fTextLayout; private TextStyle fBoldStyle; private eu.hyvar.mspl.manifest.resource.hymanifest.ui.HymanifestDocBrowserInformationControlInput fInput; private boolean fCompleted = false; private IInputChangedListener fDelayedInputChangeListener; private ListenerList fInputChangeListeners = new ListenerList(ListenerList.IDENTITY); private final String fSymbolicFontName; public HymanifestBrowserInformationControl(Shell parent, String symbolicFontName, boolean resizable) { super(parent, resizable); fSymbolicFontName= symbolicFontName; create(); } public HymanifestBrowserInformationControl(Shell parent, String symbolicFontName, String statusFieldText) { super(parent, statusFieldText); fSymbolicFontName= symbolicFontName; create(); } public HymanifestBrowserInformationControl(Shell parent, String symbolicFontName, ToolBarManager toolBarManager) { super(parent, toolBarManager); fSymbolicFontName= symbolicFontName; create(); }
if (!fgAvailabilityChecked) { try { Browser browser= new Browser(parent, SWT.NONE); browser.dispose(); fgIsAvailable= true; Slider sliderV= new Slider(parent, SWT.VERTICAL); Slider sliderH= new Slider(parent, SWT.HORIZONTAL); int width= sliderV.computeSize(SWT.DEFAULT, SWT.DEFAULT).x; int height= sliderH.computeSize(SWT.DEFAULT, SWT.DEFAULT).y; fgScrollBarSize= new Point(width, height); sliderV.dispose(); sliderH.dispose(); } catch (SWTError er) { fgIsAvailable= false; } finally { fgAvailabilityChecked= true; } } return fgIsAvailable; } private static final int MIN_WIDTH = 80; private static final int MIN_HEIGHT = 50; private static boolean fgIsAvailable = false; private static boolean fgAvailabilityChecked = false; private static Point fgScrollBarSize; private Browser fBrowser; private boolean fBrowserHasContent; private TextLayout fTextLayout; private TextStyle fBoldStyle; private eu.hyvar.mspl.manifest.resource.hymanifest.ui.HymanifestDocBrowserInformationControlInput fInput; private boolean fCompleted = false; private IInputChangedListener fDelayedInputChangeListener; private ListenerList fInputChangeListeners = new ListenerList(ListenerList.IDENTITY); private final String fSymbolicFontName; public HymanifestBrowserInformationControl(Shell parent, String symbolicFontName, boolean resizable) { super(parent, resizable); fSymbolicFontName= symbolicFontName; create(); } public HymanifestBrowserInformationControl(Shell parent, String symbolicFontName, String statusFieldText) { super(parent, statusFieldText); fSymbolicFontName= symbolicFontName; create(); } public HymanifestBrowserInformationControl(Shell parent, String symbolicFontName, ToolBarManager toolBarManager) { super(parent, toolBarManager); fSymbolicFontName= symbolicFontName; create(); }
/** * <p> * Tells whether the SWT Browser widget and hence this information control is * available. * </p> * * @param parent the parent component used for checking or <code>null</code> if * none * * @return <code>true</code> if this control is available */
Tells whether the SWT Browser widget and hence this information control is available.
isAvailable
{ "repo_name": "HyVar/DarwinSPL", "path": "plugins/eu.hyvar.mspl.manifest.resource.hymanifest.ui/src-gen/eu/hyvar/mspl/manifest/resource/hymanifest/ui/HymanifestBrowserInformationControl.java", "license": "apache-2.0", "size": 18018 }
[ "org.eclipse.core.runtime.ListenerList", "org.eclipse.jface.action.ToolBarManager", "org.eclipse.jface.text.IInputChangedListener", "org.eclipse.swt.SWTError", "org.eclipse.swt.browser.Browser", "org.eclipse.swt.graphics.Point", "org.eclipse.swt.graphics.TextLayout", "org.eclipse.swt.graphics.TextStyle", "org.eclipse.swt.widgets.Shell", "org.eclipse.swt.widgets.Slider" ]
import org.eclipse.core.runtime.ListenerList; import org.eclipse.jface.action.ToolBarManager; import org.eclipse.jface.text.IInputChangedListener; import org.eclipse.swt.SWTError; import org.eclipse.swt.browser.Browser; import org.eclipse.swt.graphics.Point; import org.eclipse.swt.graphics.TextLayout; import org.eclipse.swt.graphics.TextStyle; import org.eclipse.swt.widgets.Shell; import org.eclipse.swt.widgets.Slider;
import org.eclipse.core.runtime.*; import org.eclipse.jface.action.*; import org.eclipse.jface.text.*; import org.eclipse.swt.*; import org.eclipse.swt.browser.*; import org.eclipse.swt.graphics.*; import org.eclipse.swt.widgets.*;
[ "org.eclipse.core", "org.eclipse.jface", "org.eclipse.swt" ]
org.eclipse.core; org.eclipse.jface; org.eclipse.swt;
1,818,867
public void dismissMessageDelivery(NotificationMessageDelivery messageDelivery, String user, String cause) throws NotificationMessageDismissalException;
void function(NotificationMessageDelivery messageDelivery, String user, String cause) throws NotificationMessageDismissalException;
/** * This method dismisses/removes the NotificationMessageDelivery so that it is no longer being presented to the user * via this deliverer. Note, whether this action is meaningful is dependent on the deliverer implementation. If the * deliverer cannot control the presentation of the message, then this method need not do anything. * @param messageDelivery the messageDelivery to dismiss * @param the user that caused the dismissal; in the case of end-user actions, this will most likely be the user to * which the message was delivered (user recipient in the NotificationMessageDelivery object) * @param cause the reason the message was dismissed */
This method dismisses/removes the NotificationMessageDelivery so that it is no longer being presented to the user via this deliverer. Note, whether this action is meaningful is dependent on the deliverer implementation. If the deliverer cannot control the presentation of the message, then this method need not do anything
dismissMessageDelivery
{ "repo_name": "mztaylor/rice-git", "path": "rice-middleware/impl/src/main/java/org/kuali/rice/ken/deliverer/NotificationMessageDeliverer.java", "license": "apache-2.0", "size": 2943 }
[ "org.kuali.rice.ken.bo.NotificationMessageDelivery", "org.kuali.rice.ken.exception.NotificationMessageDismissalException" ]
import org.kuali.rice.ken.bo.NotificationMessageDelivery; import org.kuali.rice.ken.exception.NotificationMessageDismissalException;
import org.kuali.rice.ken.bo.*; import org.kuali.rice.ken.exception.*;
[ "org.kuali.rice" ]
org.kuali.rice;
1,265,948
private void preLoadBeanPool() { final boolean isTraceOn = TraceComponent.isAnyTracingEnabled(); if (isTraceOn && tc.isEntryEnabled()) Tr.debug(tc, "preLoadBeanPool: " + j2eeName); synchronized (beanPool) { Object oldClassLoader = ThreadContextAccessor.UNCHANGED; try { // To support injection, etc. we must put the bmd and classloader on the thread. ivCMDAccessor.beginContext(beanMetaData); oldClassLoader = EJBThreadData.svThreadContextAccessor.pushContextClassLoaderForUnprivileged(beanMetaData.ivContextClassLoader); for (int i = ivNumberBeansCreated; i < beanMetaData.ivInitialPoolSize; i++) { BeanO beanO = beanOFactory.create(container, this, false); beanPool.put(beanO); if (beanMetaData.ivMaxCreation > 0) { ++ivNumberBeansCreated; if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) Tr.debug(tc, "Pre-load BeanPool(" + ivNumberBeansCreated + "/" + beanMetaData.ivMaxCreation + ")"); } } } catch (Throwable ex) { FFDCFilter.processException(ex, CLASS_NAME + ".preLoadBeanPool", "561", this); if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) Tr.debug(tc, "Pre-load of BeanPool failed: exception ignored: " + ex); if (ex instanceof InvocationTargetException) { ex = ex.getCause(); } Tr.warning(tc, "IGNORING_UNEXPECTED_EXCEPTION_CNTR0033E", ex); } finally { EJBThreadData.svThreadContextAccessor.popContextClassLoaderForUnprivileged(oldClassLoader); ivCMDAccessor.endContext(); } } if (isTraceOn && tc.isEntryEnabled()) Tr.exit(tc, "preLoadBeanPool"); }
void function() { final boolean isTraceOn = TraceComponent.isAnyTracingEnabled(); if (isTraceOn && tc.isEntryEnabled()) Tr.debug(tc, STR + j2eeName); synchronized (beanPool) { Object oldClassLoader = ThreadContextAccessor.UNCHANGED; try { ivCMDAccessor.beginContext(beanMetaData); oldClassLoader = EJBThreadData.svThreadContextAccessor.pushContextClassLoaderForUnprivileged(beanMetaData.ivContextClassLoader); for (int i = ivNumberBeansCreated; i < beanMetaData.ivInitialPoolSize; i++) { BeanO beanO = beanOFactory.create(container, this, false); beanPool.put(beanO); if (beanMetaData.ivMaxCreation > 0) { ++ivNumberBeansCreated; if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) Tr.debug(tc, STR + ivNumberBeansCreated + "/" + beanMetaData.ivMaxCreation + ")"); } } } catch (Throwable ex) { FFDCFilter.processException(ex, CLASS_NAME + STR, "561", this); if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) Tr.debug(tc, STR + ex); if (ex instanceof InvocationTargetException) { ex = ex.getCause(); } Tr.warning(tc, STR, ex); } finally { EJBThreadData.svThreadContextAccessor.popContextClassLoaderForUnprivileged(oldClassLoader); ivCMDAccessor.endContext(); } } if (isTraceOn && tc.isEntryEnabled()) Tr.exit(tc, STR); }
/** * Pre-load the BeanPool if the BeanPool min value has been configured as a * 'hard' limit. Any exceptions encountered will be ignored, and the pool * just not pre-loaded. Currently only supported for Stateless Session * beans. */
Pre-load the BeanPool if the BeanPool min value has been configured as a 'hard' limit. Any exceptions encountered will be ignored, and the pool just not pre-loaded. Currently only supported for Stateless Session beans
preLoadBeanPool
{ "repo_name": "OpenLiberty/open-liberty", "path": "dev/com.ibm.ws.ejbcontainer.core/src/com/ibm/ejs/container/EJSHome.java", "license": "epl-1.0", "size": 155847 }
[ "com.ibm.websphere.ras.Tr", "com.ibm.websphere.ras.TraceComponent", "com.ibm.ws.ffdc.FFDCFilter", "com.ibm.ws.util.ThreadContextAccessor", "java.lang.reflect.InvocationTargetException" ]
import com.ibm.websphere.ras.Tr; import com.ibm.websphere.ras.TraceComponent; import com.ibm.ws.ffdc.FFDCFilter; import com.ibm.ws.util.ThreadContextAccessor; import java.lang.reflect.InvocationTargetException;
import com.ibm.websphere.ras.*; import com.ibm.ws.ffdc.*; import com.ibm.ws.util.*; import java.lang.reflect.*;
[ "com.ibm.websphere", "com.ibm.ws", "java.lang" ]
com.ibm.websphere; com.ibm.ws; java.lang;
1,389,847
private void bind(Task task) { task.bindToOwner(this); task.init(); }
void function(Task task) { task.bindToOwner(this); task.init(); }
/** * Bind and initialise a task * @param task task to bind */
Bind and initialise a task
bind
{ "repo_name": "Mayo-WE01051879/mayosapp", "path": "Build/src/main/org/apache/tools/ant/taskdefs/optional/testing/Funtest.java", "license": "mit", "size": 17947 }
[ "org.apache.tools.ant.Task" ]
import org.apache.tools.ant.Task;
import org.apache.tools.ant.*;
[ "org.apache.tools" ]
org.apache.tools;
1,759,199
public void setEarliestReconciledTransaction(LocalDate earliestReconciledTransaction) { this.earliestReconciledTransaction = earliestReconciledTransaction; }
void function(LocalDate earliestReconciledTransaction) { this.earliestReconciledTransaction = earliestReconciledTransaction; }
/** * UTC Date which is the earliest transaction date of a statement line for which the reconciled * flag is set to TRUE. This date is represented in ISO 8601 format. * * @param earliestReconciledTransaction LocalDate */
UTC Date which is the earliest transaction date of a statement line for which the reconciled flag is set to TRUE. This date is represented in ISO 8601 format
setEarliestReconciledTransaction
{ "repo_name": "XeroAPI/Xero-Java", "path": "src/main/java/com/xero/models/finance/StatementLinesResponse.java", "license": "mit", "size": 30674 }
[ "org.threeten.bp.LocalDate" ]
import org.threeten.bp.LocalDate;
import org.threeten.bp.*;
[ "org.threeten.bp" ]
org.threeten.bp;
123,259
// From API 19 public static String actionToString(int action) { switch (action) { case MotionEvent.ACTION_DOWN: return "ACTION_DOWN"; case MotionEvent.ACTION_UP: return "ACTION_UP"; case MotionEvent.ACTION_CANCEL: return "ACTION_CANCEL"; case MotionEvent.ACTION_OUTSIDE: return "ACTION_OUTSIDE"; case MotionEvent.ACTION_MOVE: return "ACTION_MOVE"; case MotionEvent.ACTION_HOVER_MOVE: return "ACTION_HOVER_MOVE"; case MotionEvent.ACTION_SCROLL: return "ACTION_SCROLL"; case MotionEvent.ACTION_HOVER_ENTER: return "ACTION_HOVER_ENTER"; case MotionEvent.ACTION_HOVER_EXIT: return "ACTION_HOVER_EXIT"; } int index = (action & MotionEvent.ACTION_POINTER_INDEX_MASK) >> MotionEvent.ACTION_POINTER_INDEX_SHIFT; switch (action & MotionEvent.ACTION_MASK) { case MotionEvent.ACTION_POINTER_DOWN: return "ACTION_POINTER_DOWN(" + index + ")"; case MotionEvent.ACTION_POINTER_UP: return "ACTION_POINTER_UP(" + index + ")"; default: return Integer.toString(action); } }
static String function(int action) { switch (action) { case MotionEvent.ACTION_DOWN: return STR; case MotionEvent.ACTION_UP: return STR; case MotionEvent.ACTION_CANCEL: return STR; case MotionEvent.ACTION_OUTSIDE: return STR; case MotionEvent.ACTION_MOVE: return STR; case MotionEvent.ACTION_HOVER_MOVE: return STR; case MotionEvent.ACTION_SCROLL: return STR; case MotionEvent.ACTION_HOVER_ENTER: return STR; case MotionEvent.ACTION_HOVER_EXIT: return STR; } int index = (action & MotionEvent.ACTION_POINTER_INDEX_MASK) >> MotionEvent.ACTION_POINTER_INDEX_SHIFT; switch (action & MotionEvent.ACTION_MASK) { case MotionEvent.ACTION_POINTER_DOWN: return STR + index + ")"; case MotionEvent.ACTION_POINTER_UP: return STR + index + ")"; default: return Integer.toString(action); } }
/** * Returns a string that represents the symbolic name of the specified unmasked action * such as "ACTION_DOWN", "ACTION_POINTER_DOWN(3)" or an equivalent numeric constant * such as "35" if unknown. * * @param action The unmasked action. * @return The symbolic name of the specified action. * @see android.view.MotionEvent#getAction() */
Returns a string that represents the symbolic name of the specified unmasked action such as "ACTION_DOWN", "ACTION_POINTER_DOWN(3)" or an equivalent numeric constant such as "35" if unknown
actionToString
{ "repo_name": "arnotixe/androidbible", "path": "Alkitab/src/main/java/yuku/alkitab/base/widget/TwofingerLinearLayout.java", "license": "apache-2.0", "size": 8688 }
[ "android.view.MotionEvent" ]
import android.view.MotionEvent;
import android.view.*;
[ "android.view" ]
android.view;
437,800
try { final File dir = Files.createTempDirectory("test").toFile(); dir.mkdirs(); dir.deleteOnExit();
try { final File dir = Files.createTempDirectory("test").toFile(); dir.mkdirs(); dir.deleteOnExit();
/** * Create a new temporary directory that will be cleaned up automatically upon shutdown. * @return the new directory that will exist; never null */
Create a new temporary directory that will be cleaned up automatically upon shutdown
tempDir
{ "repo_name": "fluetm/kafka", "path": "streams/src/test/java/org/apache/kafka/streams/state/StateTestUtils.java", "license": "apache-2.0", "size": 2897 }
[ "java.io.File", "java.nio.file.Files" ]
import java.io.File; import java.nio.file.Files;
import java.io.*; import java.nio.file.*;
[ "java.io", "java.nio" ]
java.io; java.nio;
2,751,909
private void showExtCgeoDirChooser(final long usedBytes) { final List<File> extDirs = LocalStorage.getAvailableExternalPrivateCgeoDirectories(); final String currentExtDir = LocalStorage.getExternalPrivateCgeoDirectory().getAbsolutePath(); final List<CharSequence> directories = new ArrayList<>(); final List<Long> freeSpaces = new ArrayList<>(); int selectedDirIndex = -1; for (final File dir : extDirs) { if (StringUtils.equals(currentExtDir, dir.getAbsolutePath())) { selectedDirIndex = directories.size(); } final long freeSpace = FileUtils.getFreeDiskSpace(dir); freeSpaces.add(freeSpace); directories.add(dir.getAbsolutePath()); }
void function(final long usedBytes) { final List<File> extDirs = LocalStorage.getAvailableExternalPrivateCgeoDirectories(); final String currentExtDir = LocalStorage.getExternalPrivateCgeoDirectory().getAbsolutePath(); final List<CharSequence> directories = new ArrayList<>(); final List<Long> freeSpaces = new ArrayList<>(); int selectedDirIndex = -1; for (final File dir : extDirs) { if (StringUtils.equals(currentExtDir, dir.getAbsolutePath())) { selectedDirIndex = directories.size(); } final long freeSpace = FileUtils.getFreeDiskSpace(dir); freeSpaces.add(freeSpace); directories.add(dir.getAbsolutePath()); }
/** * Shows a list of available mount points. */
Shows a list of available mount points
showExtCgeoDirChooser
{ "repo_name": "pstorch/cgeo", "path": "main/src/cgeo/geocaching/settings/SettingsActivity.java", "license": "apache-2.0", "size": 46048 }
[ "java.io.File", "java.util.ArrayList", "java.util.List", "org.apache.commons.lang3.StringUtils" ]
import java.io.File; import java.util.ArrayList; import java.util.List; import org.apache.commons.lang3.StringUtils;
import java.io.*; import java.util.*; import org.apache.commons.lang3.*;
[ "java.io", "java.util", "org.apache.commons" ]
java.io; java.util; org.apache.commons;
874,665
Map<Collection<String>, InstrumentationDefinition> getInitialInstrumentationResults() throws StorageException; /** * Returns the {@link RetransformationStrategy} specified by the current environment. * * @return The {@link RetransformationStrategy}
Map<Collection<String>, InstrumentationDefinition> getInitialInstrumentationResults() throws StorageException; /** * Returns the {@link RetransformationStrategy} specified by the current environment. * * @return The {@link RetransformationStrategy}
/** * Set of known {@link InstrumentationDefinition} for the agent that can be used by the Agent * right away. Each {@link InstrumentationDefinition} is mapped to the collection of the class * hashes it relates to. * * @return Set of known {@link InstrumentationDefinition} for the agent that can be used by the * Agent right away. Each {@link InstrumentationDefinition} is mapped to the collection * of the class hashes it relates to. * @throws StorageException * If agent configuration is not set. */
Set of known <code>InstrumentationDefinition</code> for the agent that can be used by the Agent right away. Each <code>InstrumentationDefinition</code> is mapped to the collection of the class hashes it relates to
getInitialInstrumentationResults
{ "repo_name": "inspectIT/inspectIT", "path": "inspectit.agent.java/src/main/java/rocks/inspectit/agent/java/config/IConfigurationStorage.java", "license": "agpl-3.0", "size": 6867 }
[ "java.util.Collection", "java.util.Map", "rocks.inspectit.shared.all.instrumentation.config.impl.InstrumentationDefinition", "rocks.inspectit.shared.all.instrumentation.config.impl.RetransformationStrategy" ]
import java.util.Collection; import java.util.Map; import rocks.inspectit.shared.all.instrumentation.config.impl.InstrumentationDefinition; import rocks.inspectit.shared.all.instrumentation.config.impl.RetransformationStrategy;
import java.util.*; import rocks.inspectit.shared.all.instrumentation.config.impl.*;
[ "java.util", "rocks.inspectit.shared" ]
java.util; rocks.inspectit.shared;
1,791,139
StringBuilder format = new StringBuilder (PIPE).append(REG) .append(PIPE).append(TextUtil.checkSize(COD_ENT_REF, 255)) .append(PIPE).append(TextUtil.toNumeric(COD_INSCR)) .append(PIPE); return (TextUtil.removeEOL(format).append(EOL)).toString(); }
StringBuilder format = new StringBuilder (PIPE).append(REG) .append(PIPE).append(TextUtil.checkSize(COD_ENT_REF, 255)) .append(PIPE).append(TextUtil.toNumeric(COD_INSCR)) .append(PIPE); return (TextUtil.removeEOL(format).append(EOL)).toString(); }
/** * Formata o Bloco 0 Registro 007 * * @return */
Formata o Bloco 0 Registro 007
toString
{ "repo_name": "mgrigioni/oseb", "path": "sped/src/org/adempierelbr/sped/ecd/beans/R0007.java", "license": "gpl-2.0", "size": 2144 }
[ "org.adempierelbr.util.TextUtil" ]
import org.adempierelbr.util.TextUtil;
import org.adempierelbr.util.*;
[ "org.adempierelbr.util" ]
org.adempierelbr.util;
2,012,232
protected I2CPMessageReader.I2CPMessageEventListener createListener() { return new ClientMessageEventListener(_context, this, true); }
I2CPMessageReader.I2CPMessageEventListener function() { return new ClientMessageEventListener(_context, this, true); }
/** * Allow override for testing * @since 0.9.8 */
Allow override for testing
createListener
{ "repo_name": "oakes/Nightweb", "path": "common/java/router/net/i2p/router/client/ClientConnectionRunner.java", "license": "unlicense", "size": 31378 }
[ "net.i2p.data.i2cp.I2CPMessageReader" ]
import net.i2p.data.i2cp.I2CPMessageReader;
import net.i2p.data.i2cp.*;
[ "net.i2p.data" ]
net.i2p.data;
597,015
public void deleteIntent(com.google.cloud.dialogflow.v2.DeleteIntentRequest request, io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver) { asyncUnimplementedUnaryCall(getDeleteIntentMethodHelper(), responseObserver); }
void function(com.google.cloud.dialogflow.v2.DeleteIntentRequest request, io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver) { asyncUnimplementedUnaryCall(getDeleteIntentMethodHelper(), responseObserver); }
/** * <pre> * Deletes the specified intent. * </pre> */
<code> Deletes the specified intent. </code>
deleteIntent
{ "repo_name": "pongad/api-client-staging", "path": "generated/java/grpc-google-cloud-dialogflow-v2/src/main/java/com/google/cloud/dialogflow/v2/IntentsGrpc.java", "license": "bsd-3-clause", "size": 48285 }
[ "io.grpc.stub.ServerCalls" ]
import io.grpc.stub.ServerCalls;
import io.grpc.stub.*;
[ "io.grpc.stub" ]
io.grpc.stub;
2,293,449
T visitDimensionPointListLabel(@NotNull wcpsParser.DimensionPointListLabelContext ctx);
T visitDimensionPointListLabel(@NotNull wcpsParser.DimensionPointListLabelContext ctx);
/** * Visit a parse tree produced by {@link wcpsParser#DimensionPointListLabel}. * * @param ctx the parse tree * @return the visitor result */
Visit a parse tree produced by <code>wcpsParser#DimensionPointListLabel</code>
visitDimensionPointListLabel
{ "repo_name": "diogo-andrade/DataHubSystem", "path": "petascope/src/main/java/petascope/wcps2/parser/wcpsVisitor.java", "license": "agpl-3.0", "size": 28497 }
[ "org.antlr.v4.runtime.misc.NotNull" ]
import org.antlr.v4.runtime.misc.NotNull;
import org.antlr.v4.runtime.misc.*;
[ "org.antlr.v4" ]
org.antlr.v4;
2,329,939
public String toString(Hashtable ht, int level) { String s = new String(""); int i = 0; for (i=0; i<level; i++) s = s + " "; if (ht == null) s = s + "["+tag+" "+length+"] "; else s = s + ht.get(tag) + " "; if (tag.isConstructed()) { s = s + "\n"; for (i=0; i<level; i++) s = s + " "; } s = s + "( "; if (tag.isConstructed()) { s = s + "\n"; s = s + child.toString(ht, level+2); for (i=0; i<level; i++) s = s + " "; s = s + ")\n"; } else { boolean fPrintable = true; if (value != null) { for (i = 0; i < value.length; i++) if (value[i] < 32) fPrintable = false; if (fPrintable) s = s + "\""+new String(value)+"\""; else { s = s + "'"; for (i = 0; i < value.length; i++) s = s + HexString.hexify(value[i]); s = s + "'"; } } s = s + " )\n"; } if (sibling != null) s = s + sibling.toString(ht, level); return s; }
String function(Hashtable ht, int level) { String s = new String(STR STR["+tag+" STR] STR STR\nSTR STR( STR\nSTR STR)\nSTR\STR\STR'STR'STR )\n"; } if (sibling != null) s = s + sibling.toString(ht, level); return s; }
/** Convert a TLV to a string.<p> * * @param ht * A <tt>Hashtable</tt> object mapping <tt>Tag</tt> objects to * <String> objects. * @param level * An integer value giving the indention leve to be used. * @return A <tt>String</tt> object representing this <tt>TLV</tt> object. */
Convert a TLV to a string
toString
{ "repo_name": "zeroDenial/CNSReader", "path": "ocf/opencard/opt/util/TLV.java", "license": "gpl-2.0", "size": 21054 }
[ "java.util.Hashtable" ]
import java.util.Hashtable;
import java.util.*;
[ "java.util" ]
java.util;
2,073,528
public Transition getSharedElementEnterTransition() { return mSharedElementEnterTransition; }
Transition function() { return mSharedElementEnterTransition; }
/** * Returns the Transition that will be used for shared elements transferred into the content * Scene. Typical Transitions will affect size and location, such as * {@link android.transition.ChangeBounds}. A null * value will cause transferred shared elements to blink to the final position. * * @return The Transition to use for shared elements transferred into the content * Scene. * @attr ref android.R.styleable#Fragment_fragmentSharedElementEnterTransition */
Returns the Transition that will be used for shared elements transferred into the content Scene. Typical Transitions will affect size and location, such as <code>android.transition.ChangeBounds</code>. A null value will cause transferred shared elements to blink to the final position
getSharedElementEnterTransition
{ "repo_name": "OmniEvo/android_frameworks_base", "path": "core/java/android/app/Fragment.java", "license": "gpl-3.0", "size": 99025 }
[ "android.transition.Transition" ]
import android.transition.Transition;
import android.transition.*;
[ "android.transition" ]
android.transition;
1,117,628
@Test public void testNodeRemoved4() { OvsdbBridgeAugmentation ovbr = newBridgeAugmentation("ff:ee:dd:cc:bb:aa:99:88"); OvsdbNodeAugmentation ovnode = new OvsdbNodeAugmentationBuilder(). build(); List<TerminationPoint> tps = new ArrayList<>(); Map<String, PortAttr> pattrs = new HashMap<>(); for (long id = 1L; id <= 10L; id++) { String uuid = uniqueUuid(); PortAttr pattr = new PortAttr(uuid, id, "port-" + id); assertNull(pattrs.put(uuid, pattr)); tps.add(pattr.getTerminationPoint()); pattr.prepare(); } // Below termination points should be ignored. // No OVSDB termination point augmentation. tps.add(new TerminationPointBuilder().build()); // No port ID in the OVSDB termination point augmentation. tps.add(newTerminationPoint(null, 1000L, null)); // Invalid port UUID. tps.add(newTerminationPoint("invalid-uuid", 2000L, "port-2000")); // The target port not found. PortAttr pattr = new PortAttr(uniqueUuid(), 3000L, "port-3000"); assertNull(pattrs.put(pattr.getUuid(), pattr)); tps.add(pattr.getTerminationPoint()); pattr.prepareNotFound(); // The target port is not readable. pattr = new PortAttr(uniqueUuid(), 4000L, "port-4000"); assertNull(pattrs.put(pattr.getUuid(), pattr)); tps.add(pattr.getTerminationPoint()); pattr.prepareFailure(); Node node = new NodeBuilder(). setNodeId(new NodeId("ovsdb:node:removed4")). setTerminationPoint(tps). addAugmentation(OvsdbNodeAugmentation.class, ovnode). addAugmentation(OvsdbBridgeAugmentation.class, ovbr). build(); OvsdbNodeChange ovchg = OvsdbNodeChange.nodeRemoved(ovsdbHandler, txHolder, node); assertNotNull(ovchg); verify(ovsdbHandler).getOvsdbBridgeName(); for (PortAttr pa: pattrs.values()) { pa.checkRead(); } verifyNoMoreInteractions(ovsdbHandler, readTx); ovchg.apply(); verify(ovsdbHandler).nodeRemoved(node); for (PortAttr pa: pattrs.values()) { pa.checkUnmapped(); } verifyNoMoreInteractions(ovsdbHandler, readTx); }
void function() { OvsdbBridgeAugmentation ovbr = newBridgeAugmentation(STR); OvsdbNodeAugmentation ovnode = new OvsdbNodeAugmentationBuilder(). build(); List<TerminationPoint> tps = new ArrayList<>(); Map<String, PortAttr> pattrs = new HashMap<>(); for (long id = 1L; id <= 10L; id++) { String uuid = uniqueUuid(); PortAttr pattr = new PortAttr(uuid, id, "port-" + id); assertNull(pattrs.put(uuid, pattr)); tps.add(pattr.getTerminationPoint()); pattr.prepare(); } tps.add(new TerminationPointBuilder().build()); tps.add(newTerminationPoint(null, 1000L, null)); tps.add(newTerminationPoint(STR, 2000L, STR)); PortAttr pattr = new PortAttr(uniqueUuid(), 3000L, STR); assertNull(pattrs.put(pattr.getUuid(), pattr)); tps.add(pattr.getTerminationPoint()); pattr.prepareNotFound(); pattr = new PortAttr(uniqueUuid(), 4000L, STR); assertNull(pattrs.put(pattr.getUuid(), pattr)); tps.add(pattr.getTerminationPoint()); pattr.prepareFailure(); Node node = new NodeBuilder(). setNodeId(new NodeId(STR)). setTerminationPoint(tps). addAugmentation(OvsdbNodeAugmentation.class, ovnode). addAugmentation(OvsdbBridgeAugmentation.class, ovbr). build(); OvsdbNodeChange ovchg = OvsdbNodeChange.nodeRemoved(ovsdbHandler, txHolder, node); assertNotNull(ovchg); verify(ovsdbHandler).getOvsdbBridgeName(); for (PortAttr pa: pattrs.values()) { pa.checkRead(); } verifyNoMoreInteractions(ovsdbHandler, readTx); ovchg.apply(); verify(ovsdbHandler).nodeRemoved(node); for (PortAttr pa: pattrs.values()) { pa.checkUnmapped(); } verifyNoMoreInteractions(ovsdbHandler, readTx); }
/** * Test case for * {@link OvsdbNodeChange#nodeRemoved(OVSDBEventHandler,ReadTransactionHolder,Node)}. * * <ul> * <li>OVSDB node augmentation is present.</li> * <li>Port mappings are unconfigured for neutron ports.</li> * </ul> */
Test case for <code>OvsdbNodeChange#nodeRemoved(OVSDBEventHandler,ReadTransactionHolder,Node)</code>. OVSDB node augmentation is present. Port mappings are unconfigured for neutron ports.
testNodeRemoved4
{ "repo_name": "opendaylight/vtn", "path": "manager/neutron/src/test/java/org/opendaylight/vtn/manager/neutron/impl/OvsdbNodeChangeTest.java", "license": "epl-1.0", "size": 76494 }
[ "java.util.ArrayList", "java.util.HashMap", "java.util.List", "java.util.Map", "org.mockito.Mockito", "org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.ovsdb.rev150105.OvsdbBridgeAugmentation", "org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.ovsdb.rev150105.OvsdbNodeAugmentation", "org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.ovsdb.rev150105.OvsdbNodeAugmentationBuilder", "org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.NodeId", "org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.topology.Node", "org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.topology.NodeBuilder", "org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.topology.node.TerminationPoint", "org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.topology.node.TerminationPointBuilder" ]
import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.mockito.Mockito; import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.ovsdb.rev150105.OvsdbBridgeAugmentation; import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.ovsdb.rev150105.OvsdbNodeAugmentation; import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.ovsdb.rev150105.OvsdbNodeAugmentationBuilder; import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.NodeId; import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.topology.Node; import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.topology.NodeBuilder; import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.topology.node.TerminationPoint; import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.topology.node.TerminationPointBuilder;
import java.util.*; import org.mockito.*; import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.ovsdb.rev150105.*; import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.*; import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.topology.*; import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.topology.node.*;
[ "java.util", "org.mockito", "org.opendaylight.yang" ]
java.util; org.mockito; org.opendaylight.yang;
536,936
@Test public void testTrimCommentFalse() { assertEquals("Wrong trimmed comment", "# Comment with" + CR + " ! some mixed " + CR + "#comment" + CR + "# lines", PropertiesConfigurationLayout.trimComment("Comment with" + CR + " ! some mixed " + CR + "#comment" + CR + "lines", true)); }
void function() { assertEquals(STR, STR + CR + STR + CR + STR + CR + STR, PropertiesConfigurationLayout.trimComment(STR + CR + STR + CR + STR + CR + "lines", true)); }
/** * Tests enforcing comment characters in a comment. */
Tests enforcing comment characters in a comment
testTrimCommentFalse
{ "repo_name": "apache/commons-configuration", "path": "src/test/java/org/apache/commons/configuration2/TestPropertiesConfigurationLayout.java", "license": "apache-2.0", "size": 29381 }
[ "org.junit.Assert" ]
import org.junit.Assert;
import org.junit.*;
[ "org.junit" ]
org.junit;
2,004,753
public T selectRanked (Comparator<T> comparator, int kthLowest) { if (kthLowest < 1) { throw new GdxRuntimeException("nth_lowest must be greater than 0, 1 = first, 2 = second..."); } return Select.instance().select(items, comparator, kthLowest, size); }
T function (Comparator<T> comparator, int kthLowest) { if (kthLowest < 1) { throw new GdxRuntimeException(STR); } return Select.instance().select(items, comparator, kthLowest, size); }
/** Selects the nth-lowest element from the Array according to Comparator ranking. This might partially sort the Array. The * array must have a size greater than 0, or a {@link com.badlogic.gdx.utils.GdxRuntimeException} will be thrown. * @see Select * @param comparator used for comparison * @param kthLowest rank of desired object according to comparison, n is based on ordinal numbers, not array indices. for min * value use 1, for max value use size of array, using 0 results in runtime exception. * @return the value of the Nth lowest ranked object. */
Selects the nth-lowest element from the Array according to Comparator ranking. This might partially sort the Array. The array must have a size greater than 0, or a <code>com.badlogic.gdx.utils.GdxRuntimeException</code> will be thrown
selectRanked
{ "repo_name": "sarkanyi/libgdx", "path": "gdx/src/com/badlogic/gdx/utils/Array.java", "license": "apache-2.0", "size": 22310 }
[ "java.util.Comparator" ]
import java.util.Comparator;
import java.util.*;
[ "java.util" ]
java.util;
2,431,194
public FileMetadata uploadPutResumable(String uploadId) throws ApiException { return uploadPutResumableWithHttpInfo(uploadId).getData(); }
FileMetadata function(String uploadId) throws ApiException { return uploadPutResumableWithHttpInfo(uploadId).getData(); }
/** * Upload a chunk to the resumable upload. Use Content-Length and Content-Range to describe the chunk size and offset. Use Content-Length &#x3D; 0 and Content-Range &#x3D; *_/Length to query upload status. * * @param uploadId The uploadId (required) * @return FileMetadata * @throws ApiException if fails to make API call */
Upload a chunk to the resumable upload. Use Content-Length and Content-Range to describe the chunk size and offset. Use Content-Length &#x3D; 0 and Content-Range &#x3D; *_/Length to query upload status
uploadPutResumable
{ "repo_name": "iterate-ch/cyberduck", "path": "storegate/src/main/java/ch/cyberduck/core/storegate/io/swagger/client/api/UploadApi.java", "license": "gpl-3.0", "size": 13928 }
[ "ch.cyberduck.core.storegate.io.swagger.client.ApiException", "ch.cyberduck.core.storegate.io.swagger.client.model.FileMetadata" ]
import ch.cyberduck.core.storegate.io.swagger.client.ApiException; import ch.cyberduck.core.storegate.io.swagger.client.model.FileMetadata;
import ch.cyberduck.core.storegate.io.swagger.client.*; import ch.cyberduck.core.storegate.io.swagger.client.model.*;
[ "ch.cyberduck.core" ]
ch.cyberduck.core;
2,165,193
private Repeat getRepeatable(FrameworkMethod method) { Repeat repeatable = method.getAnnotation(Repeat.class); if (repeatable == null) { repeatable = super.getTestClass().getJavaClass().getAnnotation(Repeat.class); } return repeatable; }
Repeat function(FrameworkMethod method) { Repeat repeatable = method.getAnnotation(Repeat.class); if (repeatable == null) { repeatable = super.getTestClass().getJavaClass().getAnnotation(Repeat.class); } return repeatable; }
/** * Gets the {@link Repeat} annotation if set. * * Method level definition overrides class level definition. */
Gets the <code>Repeat</code> annotation if set. Method level definition overrides class level definition
getRepeatable
{ "repo_name": "lmjacksoniii/hazelcast", "path": "hazelcast/src/test/java/com/hazelcast/test/AbstractHazelcastClassRunner.java", "license": "apache-2.0", "size": 14744 }
[ "com.hazelcast.test.annotation.Repeat", "org.junit.runners.model.FrameworkMethod" ]
import com.hazelcast.test.annotation.Repeat; import org.junit.runners.model.FrameworkMethod;
import com.hazelcast.test.annotation.*; import org.junit.runners.model.*;
[ "com.hazelcast.test", "org.junit.runners" ]
com.hazelcast.test; org.junit.runners;
1,509,870
//----------------------------------------------------------------------- public Locale getLocale() { return locale; }
Locale function() { return locale; }
/** * Gets the locale to be used during formatting. * <p> * This is used to lookup any part of the formatter needing specific * localization, such as the text or localized pattern. * * @return the locale of this formatter, not null */
Gets the locale to be used during formatting. This is used to lookup any part of the formatter needing specific localization, such as the text or localized pattern
getLocale
{ "repo_name": "YouDiSN/OpenJDK-Research", "path": "jdk9/jdk/src/java.base/share/classes/java/time/format/DateTimeFormatter.java", "license": "gpl-2.0", "size": 103127 }
[ "java.util.Locale" ]
import java.util.Locale;
import java.util.*;
[ "java.util" ]
java.util;
1,010,661
private void registerCheck(int tokenId, AbstractCheck check) throws CheckstyleException { registerCheck(TokenUtils.getTokenName(tokenId), check); }
void function(int tokenId, AbstractCheck check) throws CheckstyleException { registerCheck(TokenUtils.getTokenName(tokenId), check); }
/** * Register a check for a specified token id. * @param tokenId the id of the token * @param check the check to register * @throws CheckstyleException if Check is misconfigured */
Register a check for a specified token id
registerCheck
{ "repo_name": "liscju/checkstyle", "path": "src/main/java/com/puppycrawl/tools/checkstyle/TreeWalker.java", "license": "lgpl-2.1", "size": 28732 }
[ "com.puppycrawl.tools.checkstyle.api.AbstractCheck", "com.puppycrawl.tools.checkstyle.api.CheckstyleException", "com.puppycrawl.tools.checkstyle.utils.TokenUtils" ]
import com.puppycrawl.tools.checkstyle.api.AbstractCheck; import com.puppycrawl.tools.checkstyle.api.CheckstyleException; import com.puppycrawl.tools.checkstyle.utils.TokenUtils;
import com.puppycrawl.tools.checkstyle.api.*; import com.puppycrawl.tools.checkstyle.utils.*;
[ "com.puppycrawl.tools" ]
com.puppycrawl.tools;
245,820
private static List<String> getFilesSafeForUninstall(AddOn addOn, Set<AddOn> installedAddOns) { if (addOn.getFiles() == null || addOn.getFiles().isEmpty()) { return Collections.emptyList(); } List<String> files = new ArrayList<>(addOn.getFiles()); installedAddOns.forEach( installedAddOn -> { if (installedAddOn == addOn) { return; } List<String> addOnFiles = installedAddOn.getFiles(); if (addOnFiles == null || addOnFiles.isEmpty()) { return; } files.removeAll(addOnFiles); }); return files; }
static List<String> function(AddOn addOn, Set<AddOn> installedAddOns) { if (addOn.getFiles() == null addOn.getFiles().isEmpty()) { return Collections.emptyList(); } List<String> files = new ArrayList<>(addOn.getFiles()); installedAddOns.forEach( installedAddOn -> { if (installedAddOn == addOn) { return; } List<String> addOnFiles = installedAddOn.getFiles(); if (addOnFiles == null addOnFiles.isEmpty()) { return; } files.removeAll(addOnFiles); }); return files; }
/** * Gets the files of the given add-on that can be safely uninstalled, that is, are not in * use/declared by other add-ons. * * @param addOn the add-on whose files should be uninstalled. * @param installedAddOns the add-ons currently installed. * @return the files that can be safely uninstalled. */
Gets the files of the given add-on that can be safely uninstalled, that is, are not in use/declared by other add-ons
getFilesSafeForUninstall
{ "repo_name": "meitar/zaproxy", "path": "zap/src/main/java/org/zaproxy/zap/control/AddOnInstaller.java", "license": "apache-2.0", "size": 33023 }
[ "java.util.ArrayList", "java.util.Collections", "java.util.List", "java.util.Set" ]
import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Set;
import java.util.*;
[ "java.util" ]
java.util;
1,036,229
@Test public void testOnLinkAddedPreSyncFlow() throws Exception { createPowerSpy(); ConversionTable conversionTable = PowerMockito.spy(new ConversionTable()); conversionTable.addEntryConnectionType("LowerNetworkId", "lower"); conversionTable.addEntryConnectionType("UpperNetworkId", "upper"); conversionTable.addEntryConnectionType("LayerizedNetworkId", "layerized"); PowerMockito.doReturn(conversionTable).when(target, "conversionTable"); target.setUpperLinkisync(false); // upperLinkSync Map<String, NetworkInterface> netIfs = new HashMap<>(); NetworkInterface lowerNetIf = new NetworkInterface(dispatcher, "LowerNetworkId"); NetworkInterface upperNetIf = PowerMockito.spy(new NetworkInterface( dispatcher, "UpperNetworkId")); NetworkInterface layerizedNetIf = PowerMockito.spy(new NetworkInterface( dispatcher, "LayerizedNetworkId")); netIfs.put("LowerNetworkId", lowerNetIf); netIfs.put("UpperNetworkId", upperNetIf); netIfs.put("LayerizedNetworkId", layerizedNetIf); PowerMockito.doReturn(netIfs).when(target, "networkInterfaces"); Link link = new Link("LinkId", "SrcNode", "SrcPort", "DstNode", "DstPort"); Link link1_1 = new Link("LinkId1_1", "SrcNode", "SrcPort", "DstNode", "DstPort"); Link link2 = new Link("LinkId2", "SrcNode2", "SrcPort2", "DstNode2", "DstPort2"); Map<String, Link> links = new HashMap<>(); links.put(link1_1.getId(), link1_1); links.put(link2.getId(), link2); PowerMockito.doReturn(links).when(layerizedNetIf).getLinks(); FlowSet layerizedFlows = new FlowSet(); List<BasicFlowMatch> matches = new ArrayList<>(); BasicFlowMatch match = new BasicFlowMatch("InNode", "InPort"); matches.add(match); List<String> path = new ArrayList<>(Arrays.asList("LinkId1_1", "LinkId2")); Map<String, List<FlowAction>> edgeActions = new HashMap<>(); Map<String, String> flowAttributes = new HashMap<>(); BasicFlow flow = new BasicFlow("0", "FlowId", "Owner", true, "0", "none", matches, path, edgeActions, flowAttributes); layerizedFlows.getFlows().put(flow.getFlowId(), flow); PowerMockito.doReturn(layerizedFlows).when(layerizedNetIf).getFlowSet(); LinkLayerizerBoundaryTable boundaryTable = Mockito .mock(LinkLayerizerBoundaryTable.class); LinkLayerizerOnFlow onFlow = Mockito.spy(new LinkLayerizerOnFlow( conversionTable, netIfs, boundaryTable)); Whitebox.setInternalState(target, "linkLayerizerOnFlow", onFlow); PowerMockito.doReturn(new HashMap<>()).when(upperNetIf).getLinks(); boolean resultUpper = target.onLinkAddedPre("UpperNetworkId", link); assertThat(resultUpper, is(false)); }
void function() throws Exception { createPowerSpy(); ConversionTable conversionTable = PowerMockito.spy(new ConversionTable()); conversionTable.addEntryConnectionType(STR, "lower"); conversionTable.addEntryConnectionType(STR, "upper"); conversionTable.addEntryConnectionType(STR, STR); PowerMockito.doReturn(conversionTable).when(target, STR); target.setUpperLinkisync(false); Map<String, NetworkInterface> netIfs = new HashMap<>(); NetworkInterface lowerNetIf = new NetworkInterface(dispatcher, STR); NetworkInterface upperNetIf = PowerMockito.spy(new NetworkInterface( dispatcher, STR)); NetworkInterface layerizedNetIf = PowerMockito.spy(new NetworkInterface( dispatcher, STR)); netIfs.put(STR, lowerNetIf); netIfs.put(STR, upperNetIf); netIfs.put(STR, layerizedNetIf); PowerMockito.doReturn(netIfs).when(target, STR); Link link = new Link(STR, STR, STR, STR, STR); Link link1_1 = new Link(STR, STR, STR, STR, STR); Link link2 = new Link(STR, STR, STR, STR, STR); Map<String, Link> links = new HashMap<>(); links.put(link1_1.getId(), link1_1); links.put(link2.getId(), link2); PowerMockito.doReturn(links).when(layerizedNetIf).getLinks(); FlowSet layerizedFlows = new FlowSet(); List<BasicFlowMatch> matches = new ArrayList<>(); BasicFlowMatch match = new BasicFlowMatch(STR, STR); matches.add(match); List<String> path = new ArrayList<>(Arrays.asList(STR, STR)); Map<String, List<FlowAction>> edgeActions = new HashMap<>(); Map<String, String> flowAttributes = new HashMap<>(); BasicFlow flow = new BasicFlow("0", STR, "Owner", true, "0", "none", matches, path, edgeActions, flowAttributes); layerizedFlows.getFlows().put(flow.getFlowId(), flow); PowerMockito.doReturn(layerizedFlows).when(layerizedNetIf).getFlowSet(); LinkLayerizerBoundaryTable boundaryTable = Mockito .mock(LinkLayerizerBoundaryTable.class); LinkLayerizerOnFlow onFlow = Mockito.spy(new LinkLayerizerOnFlow( conversionTable, netIfs, boundaryTable)); Whitebox.setInternalState(target, STR, onFlow); PowerMockito.doReturn(new HashMap<>()).when(upperNetIf).getLinks(); boolean resultUpper = target.onLinkAddedPre(STR, link); assertThat(resultUpper, is(false)); }
/** * Test method for {@link org.o3project.odenos.component.linklayerizer.LinkLayerizer#onLinkAddedPre(java.lang.String, org.o3project.odenos.core.component.network.topology.Link)}. * @throws Exception */
Test method for <code>org.o3project.odenos.component.linklayerizer.LinkLayerizer#onLinkAddedPre(java.lang.String, org.o3project.odenos.core.component.network.topology.Link)</code>
testOnLinkAddedPreSyncFlow
{ "repo_name": "y-higuchi/odenos", "path": "src/test/java/org/o3project/odenos/component/linklayerizer/LinkLayerizerTest.java", "license": "apache-2.0", "size": 128002 }
[ "java.util.ArrayList", "java.util.Arrays", "java.util.HashMap", "java.util.List", "java.util.Map", "org.hamcrest.CoreMatchers", "org.junit.Assert", "org.mockito.Mockito", "org.o3project.odenos.core.component.ConversionTable", "org.o3project.odenos.core.component.NetworkInterface", "org.o3project.odenos.core.component.network.flow.FlowSet", "org.o3project.odenos.core.component.network.flow.basic.BasicFlow", "org.o3project.odenos.core.component.network.flow.basic.BasicFlowMatch", "org.o3project.odenos.core.component.network.flow.basic.FlowAction", "org.o3project.odenos.core.component.network.topology.Link", "org.powermock.api.mockito.PowerMockito", "org.powermock.reflect.Whitebox" ]
import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import org.hamcrest.CoreMatchers; import org.junit.Assert; import org.mockito.Mockito; import org.o3project.odenos.core.component.ConversionTable; import org.o3project.odenos.core.component.NetworkInterface; import org.o3project.odenos.core.component.network.flow.FlowSet; import org.o3project.odenos.core.component.network.flow.basic.BasicFlow; import org.o3project.odenos.core.component.network.flow.basic.BasicFlowMatch; import org.o3project.odenos.core.component.network.flow.basic.FlowAction; import org.o3project.odenos.core.component.network.topology.Link; import org.powermock.api.mockito.PowerMockito; import org.powermock.reflect.Whitebox;
import java.util.*; import org.hamcrest.*; import org.junit.*; import org.mockito.*; import org.o3project.odenos.core.component.*; import org.o3project.odenos.core.component.network.flow.*; import org.o3project.odenos.core.component.network.flow.basic.*; import org.o3project.odenos.core.component.network.topology.*; import org.powermock.api.mockito.*; import org.powermock.reflect.*;
[ "java.util", "org.hamcrest", "org.junit", "org.mockito", "org.o3project.odenos", "org.powermock.api", "org.powermock.reflect" ]
java.util; org.hamcrest; org.junit; org.mockito; org.o3project.odenos; org.powermock.api; org.powermock.reflect;
2,141,798
public void testUnicodeCleanAndRegex() throws Exception { // Insert statement: final String updateStr = "PREFIX ns: <http://example.org/ns#>\n" + "INSERT DATA { GRAPH ns:graph { ns:auml ns:label \"\u00C4\", \"\u00E4\" } }\n"; final BigdataSailUpdate update = (BigdataSailUpdate) con.prepareUpdate(QueryLanguage.SPARQL, updateStr); update.execute(); // Test query: final String queryStr = "PREFIX ns: <http://example.org/ns#>\n" + "SELECT * { GRAPH ns:graph { ?s ?p ?o FILTER(regex(?o, \"\u00E4\", \"i\")) } }"; assertEquals(2L, countSolutions(queryStr)); }
void function() throws Exception { final String updateStr = STRINSERT DATA { GRAPH ns:graph { ns:auml ns:label \STR, \STR } }\n"; final BigdataSailUpdate update = (BigdataSailUpdate) con.prepareUpdate(QueryLanguage.SPARQL, updateStr); update.execute(); final String queryStr = STRSELECT * { GRAPH ns:graph { ?s ?p ?o FILTER(regex(?o, \STR, \"i\STR; assertEquals(2L, countSolutions(queryStr)); }
/** * This test is based on a forum post. This post provided an example of an * issue with Unicode case-folding in the REGEX operator and a means to * encode the Unicode characters to avoid doubt about which characters were * transmitted and receieved. * * @throws Exception * * @see <a href= * "https://sourceforge.net/projects/bigdata/forums/forum/676946/topic/7073971" * >Forum post on the REGEX Unicode case-folding issue</a> * * @see <a href="http://sourceforge.net/apps/trac/bigdata/ticket/655"> * SPARQL REGEX operator does not perform case-folding correctly for * Unicode data</a> */
This test is based on a forum post. This post provided an example of an issue with Unicode case-folding in the REGEX operator and a means to encode the Unicode characters to avoid doubt about which characters were transmitted and receieved
testUnicodeCleanAndRegex
{ "repo_name": "blazegraph/database", "path": "bigdata-sails-test/src/test/java/com/bigdata/rdf/sail/tck/BigdataSPARQLUpdateTest.java", "license": "gpl-2.0", "size": 30784 }
[ "com.bigdata.rdf.sail.BigdataSailUpdate", "org.openrdf.query.QueryLanguage" ]
import com.bigdata.rdf.sail.BigdataSailUpdate; import org.openrdf.query.QueryLanguage;
import com.bigdata.rdf.sail.*; import org.openrdf.query.*;
[ "com.bigdata.rdf", "org.openrdf.query" ]
com.bigdata.rdf; org.openrdf.query;
2,632,150
@Override public void init(Node auv_node) { super.init(auv_node); bullet = new Sphere(32, 32, 0.1f, true, false); bullet.setTextureMode(TextureMode.Projected); bulletCollisionShape = new SphereCollisionShape(0.1f); mat2 = new Material(assetManager, "Common/MatDefs/Misc/Unshaded.j3md"); TextureKey key2 = new TextureKey("Textures/Terrain/Rock/Rock.PNG"); key2.setGenerateMips(true); Texture tex2 = assetManager.loadTexture(key2); mat2.setTexture("ColorMap", tex2); Sphere sphere7 = new Sphere(8, 8, 0.025f); CanonStart = new Geometry("CanonLeftStart", sphere7); Material mark_mat7 = new Material(assetManager, "Common/MatDefs/Misc/Unshaded.j3md"); mark_mat7.setColor("Color", ColorRGBA.Orange); CanonStart.setMaterial(mark_mat7); CanonStart.updateGeometricState(); Rotation_Node.attachChild(CanonStart); Sphere sphere9 = new Sphere(8, 8, 0.025f); CanonEnd = new Geometry("CanonLeftEnd", sphere9); Material mark_mat9 = new Material(assetManager, "Common/MatDefs/Misc/Unshaded.j3md"); mark_mat9.setColor("Color", ColorRGBA.Orange); CanonEnd.setMaterial(mark_mat9); CanonEnd.setLocalTranslation(Vector3f.UNIT_X); CanonEnd.updateGeometricState(); Rotation_Node.attachChild(CanonEnd); Vector3f ray_start = Vector3f.ZERO; Vector3f ray_direction = Vector3f.UNIT_X; Geometry mark4 = new Geometry("Canon_Arrow", new Arrow(ray_direction.mult(1f))); Material mark_mat4 = new Material(assetManager, "Common/MatDefs/Misc/Unshaded.j3md"); mark_mat4.setColor("Color", ColorRGBA.Orange); mark4.setMaterial(mark_mat4); mark4.updateGeometricState(); Rotation_Node.attachChild(mark4); PhysicalExchanger_Node.setLocalTranslation(getPosition()); Quaternion quat = new Quaternion(); quat.fromAngles(getRotation().getX(), getRotation().getY(), getRotation().getZ()); PhysicalExchanger_Node.setLocalRotation(quat); PhysicalExchanger_Node.attachChild(Rotation_Node); auv_node.attachChild(PhysicalExchanger_Node); }
void function(Node auv_node) { super.init(auv_node); bullet = new Sphere(32, 32, 0.1f, true, false); bullet.setTextureMode(TextureMode.Projected); bulletCollisionShape = new SphereCollisionShape(0.1f); mat2 = new Material(assetManager, STR); TextureKey key2 = new TextureKey(STR); key2.setGenerateMips(true); Texture tex2 = assetManager.loadTexture(key2); mat2.setTexture(STR, tex2); Sphere sphere7 = new Sphere(8, 8, 0.025f); CanonStart = new Geometry(STR, sphere7); Material mark_mat7 = new Material(assetManager, STR); mark_mat7.setColor("Color", ColorRGBA.Orange); CanonStart.setMaterial(mark_mat7); CanonStart.updateGeometricState(); Rotation_Node.attachChild(CanonStart); Sphere sphere9 = new Sphere(8, 8, 0.025f); CanonEnd = new Geometry(STR, sphere9); Material mark_mat9 = new Material(assetManager, STR); mark_mat9.setColor("Color", ColorRGBA.Orange); CanonEnd.setMaterial(mark_mat9); CanonEnd.setLocalTranslation(Vector3f.UNIT_X); CanonEnd.updateGeometricState(); Rotation_Node.attachChild(CanonEnd); Vector3f ray_start = Vector3f.ZERO; Vector3f ray_direction = Vector3f.UNIT_X; Geometry mark4 = new Geometry(STR, new Arrow(ray_direction.mult(1f))); Material mark_mat4 = new Material(assetManager, STR); mark_mat4.setColor("Color", ColorRGBA.Orange); mark4.setMaterial(mark_mat4); mark4.updateGeometricState(); Rotation_Node.attachChild(mark4); PhysicalExchanger_Node.setLocalTranslation(getPosition()); Quaternion quat = new Quaternion(); quat.fromAngles(getRotation().getX(), getRotation().getY(), getRotation().getZ()); PhysicalExchanger_Node.setLocalRotation(quat); PhysicalExchanger_Node.attachChild(Rotation_Node); auv_node.attachChild(PhysicalExchanger_Node); }
/** * DON'T CALL THIS METHOD! In this method all the initialiasing for the * motor will be done and it will be attached to the physicsNode. */
DON'T CALL THIS METHOD! In this method all the initialiasing for the motor will be done and it will be attached to the physicsNode
init
{ "repo_name": "iti-luebeck/MARS", "path": "MARS_NB/MARS/MARSCore/src/mars/actuators/SpecialManipulators/Canon.java", "license": "bsd-3-clause", "size": 12130 }
[ "com.jme3.asset.TextureKey", "com.jme3.bullet.collision.shapes.SphereCollisionShape", "com.jme3.material.Material", "com.jme3.math.ColorRGBA", "com.jme3.math.Quaternion", "com.jme3.math.Vector3f", "com.jme3.scene.Geometry", "com.jme3.scene.Node", "com.jme3.scene.debug.Arrow", "com.jme3.scene.shape.Sphere", "com.jme3.texture.Texture" ]
import com.jme3.asset.TextureKey; import com.jme3.bullet.collision.shapes.SphereCollisionShape; import com.jme3.material.Material; import com.jme3.math.ColorRGBA; import com.jme3.math.Quaternion; import com.jme3.math.Vector3f; import com.jme3.scene.Geometry; import com.jme3.scene.Node; import com.jme3.scene.debug.Arrow; import com.jme3.scene.shape.Sphere; import com.jme3.texture.Texture;
import com.jme3.asset.*; import com.jme3.bullet.collision.shapes.*; import com.jme3.material.*; import com.jme3.math.*; import com.jme3.scene.*; import com.jme3.scene.debug.*; import com.jme3.scene.shape.*; import com.jme3.texture.*;
[ "com.jme3.asset", "com.jme3.bullet", "com.jme3.material", "com.jme3.math", "com.jme3.scene", "com.jme3.texture" ]
com.jme3.asset; com.jme3.bullet; com.jme3.material; com.jme3.math; com.jme3.scene; com.jme3.texture;
2,532,480
private ServerName randomAssignment(Cluster cluster, HRegionInfo regionInfo, List<ServerName> servers) { int numServers = servers.size(); // servers is not null, numServers > 1 ServerName sn = null; final int maxIterations = numServers * 4; int iterations = 0; do { int i = RANDOM.nextInt(numServers); sn = servers.get(i); } while (cluster.wouldLowerAvailability(regionInfo, sn) && iterations++ < maxIterations); cluster.doAssignRegion(regionInfo, sn); return sn; }
ServerName function(Cluster cluster, HRegionInfo regionInfo, List<ServerName> servers) { int numServers = servers.size(); ServerName sn = null; final int maxIterations = numServers * 4; int iterations = 0; do { int i = RANDOM.nextInt(numServers); sn = servers.get(i); } while (cluster.wouldLowerAvailability(regionInfo, sn) && iterations++ < maxIterations); cluster.doAssignRegion(regionInfo, sn); return sn; }
/** * Used to assign a single region to a random server. */
Used to assign a single region to a random server
randomAssignment
{ "repo_name": "gustavoanatoly/hbase", "path": "hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java", "license": "apache-2.0", "size": 65936 }
[ "java.util.List", "org.apache.hadoop.hbase.HRegionInfo", "org.apache.hadoop.hbase.ServerName" ]
import java.util.List; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.ServerName;
import java.util.*; import org.apache.hadoop.hbase.*;
[ "java.util", "org.apache.hadoop" ]
java.util; org.apache.hadoop;
2,381,524
@Override public void onComponentTag(final Component component, final ComponentTag tag) { if (tag.isOpenClose()) { tag.setType(TagType.OPEN); } }
void function(final Component component, final ComponentTag tag) { if (tag.isOpenClose()) { tag.setType(TagType.OPEN); } }
/** * Make sure we open up open-close tags to open-body-close */
Make sure we open up open-close tags to open-body-close
onComponentTag
{ "repo_name": "topicusonderwijs/wicket", "path": "wicket-core/src/main/java/org/apache/wicket/markup/html/panel/AbstractMarkupSourcingStrategy.java", "license": "apache-2.0", "size": 5097 }
[ "org.apache.wicket.Component", "org.apache.wicket.markup.ComponentTag", "org.apache.wicket.markup.parser.XmlTag" ]
import org.apache.wicket.Component; import org.apache.wicket.markup.ComponentTag; import org.apache.wicket.markup.parser.XmlTag;
import org.apache.wicket.*; import org.apache.wicket.markup.*; import org.apache.wicket.markup.parser.*;
[ "org.apache.wicket" ]
org.apache.wicket;
1,943,437
public void checkBlock(ExtendedBlock b, long minLength, ReplicaState state) throws ReplicaNotFoundException, UnexpectedReplicaStateException { final BInfo binfo = getBInfo(b); if (binfo == null) { throw new ReplicaNotFoundException(b); } if ((state == ReplicaState.FINALIZED && !binfo.isFinalized()) || (state != ReplicaState.FINALIZED && binfo.isFinalized())) { throw new UnexpectedReplicaStateException(b,state); } }
void function(ExtendedBlock b, long minLength, ReplicaState state) throws ReplicaNotFoundException, UnexpectedReplicaStateException { final BInfo binfo = getBInfo(b); if (binfo == null) { throw new ReplicaNotFoundException(b); } if ((state == ReplicaState.FINALIZED && !binfo.isFinalized()) (state != ReplicaState.FINALIZED && binfo.isFinalized())) { throw new UnexpectedReplicaStateException(b,state); } }
/** * Check if a block is valid. * * @param b The block to check. * @param minLength The minimum length that the block must have. May be 0. * @param state If this is null, it is ignored. If it is non-null, we * will check that the replica has this state. * * @throws ReplicaNotFoundException If the replica is not found * * @throws UnexpectedReplicaStateException If the replica is not in the * expected state. */
Check if a block is valid
checkBlock
{ "repo_name": "hash-X/hadoop", "path": "hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/SimulatedFSDataset.java", "license": "apache-2.0", "size": 39855 }
[ "org.apache.hadoop.hdfs.protocol.ExtendedBlock", "org.apache.hadoop.hdfs.server.common.HdfsServerConstants" ]
import org.apache.hadoop.hdfs.protocol.ExtendedBlock; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants;
import org.apache.hadoop.hdfs.protocol.*; import org.apache.hadoop.hdfs.server.common.*;
[ "org.apache.hadoop" ]
org.apache.hadoop;
2,416,562
Rectangle2D getTransformedPrimitiveBounds(AffineTransform txf);
Rectangle2D getTransformedPrimitiveBounds(AffineTransform txf);
/** * Returns the bounds of this node's primitivePaint after applying * the input transform (if any), concatenated with this node's * transform (if any). * * @param txf the affine transform with which this node's transform should * be concatenated. Should not be null. */
Returns the bounds of this node's primitivePaint after applying the input transform (if any), concatenated with this node's transform (if any)
getTransformedPrimitiveBounds
{ "repo_name": "sflyphotobooks/crp-batik", "path": "sources/org/apache/batik/gvt/GraphicsNode.java", "license": "apache-2.0", "size": 13618 }
[ "java.awt.geom.AffineTransform", "java.awt.geom.Rectangle2D" ]
import java.awt.geom.AffineTransform; import java.awt.geom.Rectangle2D;
import java.awt.geom.*;
[ "java.awt" ]
java.awt;
25,234
protected final void addInheritedField(Field field) { if (inheritedFields == null) { return; } this.inheritedFields.add(field); }
final void function(Field field) { if (inheritedFields == null) { return; } this.inheritedFields.add(field); }
/** * Marks a field as being inherited from the default Reflected and thus not being saved * <p>if this Reflected is not a child Reflected nothing happens * * @param field the inherited field */
Marks a field as being inherited from the default Reflected and thus not being saved if this Reflected is not a child Reflected nothing happens
addInheritedField
{ "repo_name": "CubeEngine/ReflecT", "path": "core/src/main/java/org/cubeengine/reflect/Reflected.java", "license": "mit", "size": 15387 }
[ "java.lang.reflect.Field" ]
import java.lang.reflect.Field;
import java.lang.reflect.*;
[ "java.lang" ]
java.lang;
1,658,720
public static void runReportToPdfFile( String sourceFileName, String destFileName, Map parameters, JRDataSource jrDataSource ) throws JRException { JasperPrint jasperPrint = JasperFillManager.fillReport(sourceFileName, parameters, jrDataSource); JasperExportManager.exportReportToPdfFile(jasperPrint, destFileName); }
static void function( String sourceFileName, String destFileName, Map parameters, JRDataSource jrDataSource ) throws JRException { JasperPrint jasperPrint = JasperFillManager.fillReport(sourceFileName, parameters, jrDataSource); JasperExportManager.exportReportToPdfFile(jasperPrint, destFileName); }
/** * Fills a report and saves it directly into a PDF file. * The intermediate JasperPrint object is not saved on disk. */
Fills a report and saves it directly into a PDF file. The intermediate JasperPrint object is not saved on disk
runReportToPdfFile
{ "repo_name": "delafer/j7project", "path": "jasper352/csb-jasperreport-dep/src/net/sf/jasperreports/engine/JasperRunManager.java", "license": "gpl-2.0", "size": 16511 }
[ "java.util.Map" ]
import java.util.Map;
import java.util.*;
[ "java.util" ]
java.util;
386,127
void onCreate(Bundle savedInstanceState);
void onCreate(Bundle savedInstanceState);
/** * binds to Fragment#onCreate(Bundle) * * @param savedInstanceState * the fragments last known state */
binds to Fragment#onCreate(Bundle)
onCreate
{ "repo_name": "tuxbox/sniggle-android-utils", "path": "utils/src/main/java/me/sniggle/android/utils/presenter/FragmentPresenter.java", "license": "bsd-3-clause", "size": 1696 }
[ "android.os.Bundle" ]
import android.os.Bundle;
import android.os.*;
[ "android.os" ]
android.os;
280,589
public MutableVector2f getPosition(MutableVector2f vector) { vector.setXY(mX, mY); return vector; }
MutableVector2f function(MutableVector2f vector) { vector.setXY(mX, mY); return vector; }
/** * <p>Get the position of the rectangle</p> * * @param vector the vector that will contain(s) the position * * @return the <code>vector</code> */
Get the position of the rectangle
getPosition
{ "repo_name": "Wolftein/Quark-Engine", "path": "Quark/src/main/java/ar/com/quark/mathematic/geometry/Rectangle.java", "license": "apache-2.0", "size": 8946 }
[ "ar.com.quark.mathematic.MutableVector2f" ]
import ar.com.quark.mathematic.MutableVector2f;
import ar.com.quark.mathematic.*;
[ "ar.com.quark" ]
ar.com.quark;
105,160
public Uncertainties getMeasures() { final XMLAttributes attrs = node.getAttributes(); final Integer id = (attrs.hasAttribute(ID)) ? Integer.parseInt(attrs.getValue(ID)) : null; final String modelName = attrs.getValue(NAME); final String comment = attrs.getValue(COMMENT); final Double r2 = (attrs.hasAttribute(R2)) ? Double.parseDouble(attrs.getValue(R2)) : null; final Double rms = (attrs.hasAttribute(RMS)) ? Double.parseDouble(attrs.getValue(RMS)) : null; final Double sse = (attrs.hasAttribute(SSE)) ? Double.parseDouble(attrs.getValue(SSE)) : null; final Double aic = (attrs.hasAttribute(AIC)) ? Double.parseDouble(attrs.getValue(AIC)) : null; final Double bic = (attrs.hasAttribute(BIC)) ? Double.parseDouble(attrs.getValue(BIC)) : null; final Integer dof = (attrs.hasAttribute(DOF)) ? Integer.parseInt(attrs.getValue(DOF)) : null; return new UncertaintiesImpl(id, modelName, comment, r2, rms, sse, aic, bic, dof); }
Uncertainties function() { final XMLAttributes attrs = node.getAttributes(); final Integer id = (attrs.hasAttribute(ID)) ? Integer.parseInt(attrs.getValue(ID)) : null; final String modelName = attrs.getValue(NAME); final String comment = attrs.getValue(COMMENT); final Double r2 = (attrs.hasAttribute(R2)) ? Double.parseDouble(attrs.getValue(R2)) : null; final Double rms = (attrs.hasAttribute(RMS)) ? Double.parseDouble(attrs.getValue(RMS)) : null; final Double sse = (attrs.hasAttribute(SSE)) ? Double.parseDouble(attrs.getValue(SSE)) : null; final Double aic = (attrs.hasAttribute(AIC)) ? Double.parseDouble(attrs.getValue(AIC)) : null; final Double bic = (attrs.hasAttribute(BIC)) ? Double.parseDouble(attrs.getValue(BIC)) : null; final Integer dof = (attrs.hasAttribute(DOF)) ? Integer.parseInt(attrs.getValue(DOF)) : null; return new UncertaintiesImpl(id, modelName, comment, r2, rms, sse, aic, bic, dof); }
/** * Gets uncertainty measures. */
Gets uncertainty measures
getMeasures
{ "repo_name": "SiLeBAT/pmfml", "path": "src/main/java/de/bund/bfr/pmfml/sbml/UncertaintyNode.java", "license": "gpl-3.0", "size": 4152 }
[ "org.sbml.jsbml.xml.XMLAttributes" ]
import org.sbml.jsbml.xml.XMLAttributes;
import org.sbml.jsbml.xml.*;
[ "org.sbml.jsbml" ]
org.sbml.jsbml;
2,173,740
public void initializePackageContents() { if (isInitialized) return; isInitialized = true; // Initialize package setName(eNAME); setNsPrefix(eNS_PREFIX); setNsURI(eNS_URI); // Obtain other dependent packages GrammarPackage theGrammarPackage = (GrammarPackage)EPackage.Registry.INSTANCE.getEPackage(GrammarPackage.eNS_URI); // Create type parameters // Set bounds for type parameters // Add supertypes to classes visibleNodeEClass.getESuperTypes().add(this.getNode()); whitespaceNodeEClass.getESuperTypes().add(this.getVisibleNode()); terminalNodeEClass.getESuperTypes().add(this.getVisibleNode()); removedTerminalNodeEClass.getESuperTypes().add(this.getTerminalNode()); insertedTerminalNodeEClass.getESuperTypes().add(this.getTerminalNode()); insertedFeatureSetTerminalNodeEClass.getESuperTypes().add(this.getInsertedTerminalNode()); insertedFeatureSetTerminalNodeEClass.getESuperTypes().add(this.getFeatureSetTerminalNode()); featureSetValueEClass.getESuperTypes().add(this.getNode()); featureSetValueEClass.getESuperTypes().add(this.getFeatureSet()); featureSetTerminalNodeEClass.getESuperTypes().add(this.getTerminalNode()); featureSetTerminalNodeEClass.getESuperTypes().add(this.getFeatureSet()); compositeNodeEClass.getESuperTypes().add(this.getNode()); featureSetCompositeNodeEClass.getESuperTypes().add(this.getCompositeNode()); featureSetCompositeNodeEClass.getESuperTypes().add(this.getFeatureSet()); pushElementEClass.getESuperTypes().add(this.getNode()); featureSetPushElementEClass.getESuperTypes().add(this.getPushElement()); featureSetPushElementEClass.getESuperTypes().add(this.getFeatureSet()); popElementEClass.getESuperTypes().add(this.getNode()); // Initialize classes, features, and operations; add parameters initEClass(nodeEClass, Node.class, "Node", IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS); initEClass(visibleNodeEClass, VisibleNode.class, "VisibleNode", IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS); initEAttribute(getVisibleNode_Start(), ecorePackage.getEInt(), "start", null, 1, 1, VisibleNode.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED); initEAttribute(getVisibleNode_Length(), ecorePackage.getEInt(), "length", null, 1, 1, VisibleNode.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED); initEClass(whitespaceNodeEClass, WhitespaceNode.class, "WhitespaceNode", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS); initEReference(getWhitespaceNode_Terminal(), theGrammarPackage.getTerminal(), null, "terminal", null, 0, 1, WhitespaceNode.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_COMPOSITE, IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED); initEClass(terminalNodeEClass, TerminalNode.class, "TerminalNode", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS); initEReference(getTerminalNode_Terminal(), theGrammarPackage.getTerminal(), null, "terminal", null, 0, 1, TerminalNode.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_COMPOSITE, IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED); initEAttribute(getTerminalNode_Content(), ecorePackage.getEString(), "content", null, 0, 1, TerminalNode.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED); initEClass(removedTerminalNodeEClass, RemovedTerminalNode.class, "RemovedTerminalNode", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS); initEClass(insertedTerminalNodeEClass, InsertedTerminalNode.class, "InsertedTerminalNode", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS); initEClass(insertedFeatureSetTerminalNodeEClass, InsertedFeatureSetTerminalNode.class, "InsertedFeatureSetTerminalNode", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS); initEClass(featureSetEClass, FeatureSet.class, "FeatureSet", IS_ABSTRACT, IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS); initEAttribute(getFeatureSet_FeatureName(), ecorePackage.getEString(), "featureName", null, 0, 1, FeatureSet.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED); initEClass(featureSetValueEClass, FeatureSetValue.class, "FeatureSetValue", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS); initEAttribute(getFeatureSetValue_Value(), ecorePackage.getEString(), "value", null, 0, 1, FeatureSetValue.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED); initEClass(featureSetTerminalNodeEClass, FeatureSetTerminalNode.class, "FeatureSetTerminalNode", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS); initEClass(compositeNodeEClass, CompositeNode.class, "CompositeNode", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS); initEReference(getCompositeNode_Nonterminal(), theGrammarPackage.getRule(), null, "nonterminal", null, 0, 1, CompositeNode.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_COMPOSITE, IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED); initEReference(getCompositeNode_Children(), this.getNode(), null, "children", null, 0, -1, CompositeNode.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, IS_COMPOSITE, !IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED); initEClass(featureSetCompositeNodeEClass, FeatureSetCompositeNode.class, "FeatureSetCompositeNode", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS); initEClass(pushElementEClass, PushElement.class, "PushElement", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS); initEReference(getPushElement_Eclass(), ecorePackage.getEClass(), null, "eclass", null, 1, 1, PushElement.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_COMPOSITE, IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED); initEClass(featureSetPushElementEClass, FeatureSetPushElement.class, "FeatureSetPushElement", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS); initEClass(popElementEClass, PopElement.class, "PopElement", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS); // Create resource createResource(eNS_URI); }
void function() { if (isInitialized) return; isInitialized = true; setName(eNAME); setNsPrefix(eNS_PREFIX); setNsURI(eNS_URI); GrammarPackage theGrammarPackage = (GrammarPackage)EPackage.Registry.INSTANCE.getEPackage(GrammarPackage.eNS_URI); visibleNodeEClass.getESuperTypes().add(this.getNode()); whitespaceNodeEClass.getESuperTypes().add(this.getVisibleNode()); terminalNodeEClass.getESuperTypes().add(this.getVisibleNode()); removedTerminalNodeEClass.getESuperTypes().add(this.getTerminalNode()); insertedTerminalNodeEClass.getESuperTypes().add(this.getTerminalNode()); insertedFeatureSetTerminalNodeEClass.getESuperTypes().add(this.getInsertedTerminalNode()); insertedFeatureSetTerminalNodeEClass.getESuperTypes().add(this.getFeatureSetTerminalNode()); featureSetValueEClass.getESuperTypes().add(this.getNode()); featureSetValueEClass.getESuperTypes().add(this.getFeatureSet()); featureSetTerminalNodeEClass.getESuperTypes().add(this.getTerminalNode()); featureSetTerminalNodeEClass.getESuperTypes().add(this.getFeatureSet()); compositeNodeEClass.getESuperTypes().add(this.getNode()); featureSetCompositeNodeEClass.getESuperTypes().add(this.getCompositeNode()); featureSetCompositeNodeEClass.getESuperTypes().add(this.getFeatureSet()); pushElementEClass.getESuperTypes().add(this.getNode()); featureSetPushElementEClass.getESuperTypes().add(this.getPushElement()); featureSetPushElementEClass.getESuperTypes().add(this.getFeatureSet()); popElementEClass.getESuperTypes().add(this.getNode()); initEClass(nodeEClass, Node.class, "Node", IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS); initEClass(visibleNodeEClass, VisibleNode.class, STR, IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS); initEAttribute(getVisibleNode_Start(), ecorePackage.getEInt(), "start", null, 1, 1, VisibleNode.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED); initEAttribute(getVisibleNode_Length(), ecorePackage.getEInt(), STR, null, 1, 1, VisibleNode.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED); initEClass(whitespaceNodeEClass, WhitespaceNode.class, STR, !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS); initEReference(getWhitespaceNode_Terminal(), theGrammarPackage.getTerminal(), null, STR, null, 0, 1, WhitespaceNode.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_COMPOSITE, IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED); initEClass(terminalNodeEClass, TerminalNode.class, STR, !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS); initEReference(getTerminalNode_Terminal(), theGrammarPackage.getTerminal(), null, STR, null, 0, 1, TerminalNode.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_COMPOSITE, IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED); initEAttribute(getTerminalNode_Content(), ecorePackage.getEString(), STR, null, 0, 1, TerminalNode.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED); initEClass(removedTerminalNodeEClass, RemovedTerminalNode.class, STR, !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS); initEClass(insertedTerminalNodeEClass, InsertedTerminalNode.class, STR, !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS); initEClass(insertedFeatureSetTerminalNodeEClass, InsertedFeatureSetTerminalNode.class, STR, !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS); initEClass(featureSetEClass, FeatureSet.class, STR, IS_ABSTRACT, IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS); initEAttribute(getFeatureSet_FeatureName(), ecorePackage.getEString(), STR, null, 0, 1, FeatureSet.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED); initEClass(featureSetValueEClass, FeatureSetValue.class, STR, !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS); initEAttribute(getFeatureSetValue_Value(), ecorePackage.getEString(), "value", null, 0, 1, FeatureSetValue.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED); initEClass(featureSetTerminalNodeEClass, FeatureSetTerminalNode.class, STR, !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS); initEClass(compositeNodeEClass, CompositeNode.class, STR, !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS); initEReference(getCompositeNode_Nonterminal(), theGrammarPackage.getRule(), null, STR, null, 0, 1, CompositeNode.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_COMPOSITE, IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED); initEReference(getCompositeNode_Children(), this.getNode(), null, STR, null, 0, -1, CompositeNode.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, IS_COMPOSITE, !IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED); initEClass(featureSetCompositeNodeEClass, FeatureSetCompositeNode.class, STR, !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS); initEClass(pushElementEClass, PushElement.class, STR, !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS); initEReference(getPushElement_Eclass(), ecorePackage.getEClass(), null, STR, null, 1, 1, PushElement.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_COMPOSITE, IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED); initEClass(featureSetPushElementEClass, FeatureSetPushElement.class, STR, !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS); initEClass(popElementEClass, PopElement.class, STR, !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS); createResource(eNS_URI); }
/** * Complete the initialization of the package and its meta-model. This * method is guarded to have no affect on any invocation but its first. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */
Complete the initialization of the package and its meta-model. This method is guarded to have no affect on any invocation but its first.
initializePackageContents
{ "repo_name": "balazsgrill/temon", "path": "hu.temon/src/hu/temon/ast/impl/AstPackageImpl.java", "license": "epl-1.0", "size": 19742 }
[ "hu.temon.ast.CompositeNode", "hu.temon.ast.FeatureSet", "hu.temon.ast.FeatureSetCompositeNode", "hu.temon.ast.FeatureSetPushElement", "hu.temon.ast.FeatureSetTerminalNode", "hu.temon.ast.FeatureSetValue", "hu.temon.ast.InsertedFeatureSetTerminalNode", "hu.temon.ast.InsertedTerminalNode", "hu.temon.ast.Node", "hu.temon.ast.PopElement", "hu.temon.ast.PushElement", "hu.temon.ast.RemovedTerminalNode", "hu.temon.ast.TerminalNode", "hu.temon.ast.VisibleNode", "hu.temon.ast.WhitespaceNode", "hu.temon.grammar.GrammarPackage", "org.eclipse.emf.ecore.EPackage" ]
import hu.temon.ast.CompositeNode; import hu.temon.ast.FeatureSet; import hu.temon.ast.FeatureSetCompositeNode; import hu.temon.ast.FeatureSetPushElement; import hu.temon.ast.FeatureSetTerminalNode; import hu.temon.ast.FeatureSetValue; import hu.temon.ast.InsertedFeatureSetTerminalNode; import hu.temon.ast.InsertedTerminalNode; import hu.temon.ast.Node; import hu.temon.ast.PopElement; import hu.temon.ast.PushElement; import hu.temon.ast.RemovedTerminalNode; import hu.temon.ast.TerminalNode; import hu.temon.ast.VisibleNode; import hu.temon.ast.WhitespaceNode; import hu.temon.grammar.GrammarPackage; import org.eclipse.emf.ecore.EPackage;
import hu.temon.ast.*; import hu.temon.grammar.*; import org.eclipse.emf.ecore.*;
[ "hu.temon.ast", "hu.temon.grammar", "org.eclipse.emf" ]
hu.temon.ast; hu.temon.grammar; org.eclipse.emf;
2,461,718
public void run() { logger.info("Starting run method...."); try { startListening(); } catch (SQLException e1) { logger .fatal(props .getProperty("sms.monitoring.responses.ErrorFatalListeningDB")); logger.fatal(e1.toString()); return; } while (true) { try { listenForResponses(); sleep(3000); } catch (SQLException e1) { logger .error(props .getProperty("sms.monitoring.responses.ErrorProcessing")); logger.error(e1.toString()); } catch (InterruptedException e) { logger.error(props .getProperty("sms.monitoring.responses.ErrorSleeping")); logger.error(e.toString()); } } }
void function() { logger.info(STR); try { startListening(); } catch (SQLException e1) { logger .fatal(props .getProperty(STR)); logger.fatal(e1.toString()); return; } while (true) { try { listenForResponses(); sleep(3000); } catch (SQLException e1) { logger .error(props .getProperty(STR)); logger.error(e1.toString()); } catch (InterruptedException e) { logger.error(props .getProperty(STR)); logger.error(e.toString()); } } }
/** * This Thread run forever listening for incoming responses in database in * order to process them */
This Thread run forever listening for incoming responses in database in order to process them
run
{ "repo_name": "barbaramartina/Java-SMSLib-PatientMonitoring", "path": "SMSMonitoring/src/sms/monitoring/responses/ResponsesMonitor.java", "license": "gpl-3.0", "size": 5167 }
[ "java.sql.SQLException" ]
import java.sql.SQLException;
import java.sql.*;
[ "java.sql" ]
java.sql;
3,456
protected void addResult(String ovar, Data data) { _out.put(ovar, data); }
void function(String ovar, Data data) { _out.put(ovar, data); }
/** * Add the output variable name and generated output data to the ResultVariable * object. Called during the execution of {@link PreparedScript}'s * {@link PreparedScript#executeScript executeScript} method. * * @param ovar output variable name * @param data generated output data */
Add the output variable name and generated output data to the ResultVariable object. Called during the execution of <code>PreparedScript</code>'s <code>PreparedScript#executeScript executeScript</code> method
addResult
{ "repo_name": "deroneriksson/incubator-systemml", "path": "src/main/java/org/apache/sysml/api/jmlc/ResultVariables.java", "license": "apache-2.0", "size": 6003 }
[ "org.apache.sysml.runtime.instructions.cp.Data" ]
import org.apache.sysml.runtime.instructions.cp.Data;
import org.apache.sysml.runtime.instructions.cp.*;
[ "org.apache.sysml" ]
org.apache.sysml;
2,515,666
@Idempotent @ReadOnly(isCoordinated = true) SnapshotDiffReportListing getSnapshotDiffReportListing(String snapshotRoot, String fromSnapshot, String toSnapshot, byte[] startPath, int index) throws IOException;
@ReadOnly(isCoordinated = true) SnapshotDiffReportListing getSnapshotDiffReportListing(String snapshotRoot, String fromSnapshot, String toSnapshot, byte[] startPath, int index) throws IOException;
/** * Get the difference between two snapshots of a directory iteratively. * * @param snapshotRoot * full path of the directory where snapshots are taken * @param fromSnapshot * snapshot name of the from point. Null indicates the current * tree * @param toSnapshot * snapshot name of the to point. Null indicates the current * tree. * @param startPath * path relative to the snapshottable root directory from where the * snapshotdiff computation needs to start across multiple rpc calls * @param index * index in the created or deleted list of the directory at which * the snapshotdiff computation stopped during the last rpc call * as the no of entries exceeded the snapshotdiffentry limit. -1 * indicates, the snapshotdiff compuatation needs to start right * from the startPath provided. * @return The difference report represented as a {@link SnapshotDiffReport}. * @throws IOException on error */
Get the difference between two snapshots of a directory iteratively
getSnapshotDiffReportListing
{ "repo_name": "apurtell/hadoop", "path": "hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java", "license": "apache-2.0", "size": 71843 }
[ "java.io.IOException", "org.apache.hadoop.hdfs.server.namenode.ha.ReadOnly" ]
import java.io.IOException; import org.apache.hadoop.hdfs.server.namenode.ha.ReadOnly;
import java.io.*; import org.apache.hadoop.hdfs.server.namenode.ha.*;
[ "java.io", "org.apache.hadoop" ]
java.io; org.apache.hadoop;
2,595,450
protected void executeMojo( String groupId, String artifactId, String version, String goal, String configuration, Dependency... dependencies) throws MojoExecutionException, MojoFailureException { final Plugin plugin = new Plugin(); plugin.setArtifactId(artifactId); plugin.setGroupId(groupId); plugin.setVersion(version); plugin.setDependencies(Arrays.asList(dependencies)); getLog().debug(Locale.getString(AbstractSarlMojo.class, "LAUNCHING", plugin.getId())); //$NON-NLS-1$ final PluginDescriptor pluginDescriptor = this.mavenHelper.loadPlugin(plugin); if (pluginDescriptor == null) { throw new MojoExecutionException(Locale.getString(AbstractSarlMojo.class, "PLUGIN_NOT_FOUND", plugin.getId())); //$NON-NLS-1$ } final MojoDescriptor mojoDescriptor = pluginDescriptor.getMojo(goal); if (mojoDescriptor == null) { throw new MojoExecutionException(Locale.getString(AbstractSarlMojo.class, "GOAL_NOT_FOUND", goal)); //$NON-NLS-1$ } final Xpp3Dom mojoXml; try { mojoXml = toXpp3Dom(mojoDescriptor.getMojoConfiguration()); } catch (PlexusConfigurationException e1) { throw new MojoExecutionException(e1.getLocalizedMessage(), e1); } Xpp3Dom configurationXml = null; if (configuration != null && !configuration.isEmpty()) { try (StringReader sr = new StringReader(configuration)) { try { configurationXml = Xpp3DomBuilder.build(sr); } catch (XmlPullParserException | IOException e) { getLog().debug(e); } } } if (configurationXml != null) { configurationXml = Xpp3DomUtils.mergeXpp3Dom( configurationXml, mojoXml); } else { configurationXml = mojoXml; } getLog().debug(Locale.getString(AbstractSarlMojo.class, "CONFIGURATION_FOR", //$NON-NLS-1$ plugin.getId(), configurationXml.toString())); final MojoExecution execution = new MojoExecution(mojoDescriptor, configurationXml); this.mavenHelper.executeMojo(execution); }
void function( String groupId, String artifactId, String version, String goal, String configuration, Dependency... dependencies) throws MojoExecutionException, MojoFailureException { final Plugin plugin = new Plugin(); plugin.setArtifactId(artifactId); plugin.setGroupId(groupId); plugin.setVersion(version); plugin.setDependencies(Arrays.asList(dependencies)); getLog().debug(Locale.getString(AbstractSarlMojo.class, STR, plugin.getId())); final PluginDescriptor pluginDescriptor = this.mavenHelper.loadPlugin(plugin); if (pluginDescriptor == null) { throw new MojoExecutionException(Locale.getString(AbstractSarlMojo.class, STR, plugin.getId())); } final MojoDescriptor mojoDescriptor = pluginDescriptor.getMojo(goal); if (mojoDescriptor == null) { throw new MojoExecutionException(Locale.getString(AbstractSarlMojo.class, STR, goal)); } final Xpp3Dom mojoXml; try { mojoXml = toXpp3Dom(mojoDescriptor.getMojoConfiguration()); } catch (PlexusConfigurationException e1) { throw new MojoExecutionException(e1.getLocalizedMessage(), e1); } Xpp3Dom configurationXml = null; if (configuration != null && !configuration.isEmpty()) { try (StringReader sr = new StringReader(configuration)) { try { configurationXml = Xpp3DomBuilder.build(sr); } catch (XmlPullParserException IOException e) { getLog().debug(e); } } } if (configurationXml != null) { configurationXml = Xpp3DomUtils.mergeXpp3Dom( configurationXml, mojoXml); } else { configurationXml = mojoXml; } getLog().debug(Locale.getString(AbstractSarlMojo.class, STR, plugin.getId(), configurationXml.toString())); final MojoExecution execution = new MojoExecution(mojoDescriptor, configurationXml); this.mavenHelper.executeMojo(execution); }
/** Execute another MOJO. * * @param groupId - identifier of the MOJO plugin group. * @param artifactId - identifier of the MOJO plugin artifact. * @param version - version of the MOJO plugin version. * @param goal - the goal to run. * @param configuration - the XML code for the configuration. * @param dependencies - the dependencies of the plugin. * @throws MojoExecutionException when cannot run the MOJO. * @throws MojoFailureException when the build failed. */
Execute another MOJO
executeMojo
{ "repo_name": "jgfoster/sarl", "path": "main/internalmaven/sarl-maven-plugin/src/main/java/io/sarl/maven/compiler/AbstractSarlMojo.java", "license": "apache-2.0", "size": 11627 }
[ "java.io.IOException", "java.io.StringReader", "java.util.Arrays", "org.apache.maven.model.Dependency", "org.apache.maven.model.Plugin", "org.apache.maven.plugin.MojoExecution", "org.apache.maven.plugin.MojoExecutionException", "org.apache.maven.plugin.MojoFailureException", "org.apache.maven.plugin.descriptor.MojoDescriptor", "org.apache.maven.plugin.descriptor.PluginDescriptor", "org.arakhne.afc.vmutil.locale.Locale", "org.codehaus.plexus.configuration.PlexusConfigurationException", "org.codehaus.plexus.util.xml.Xpp3Dom", "org.codehaus.plexus.util.xml.Xpp3DomBuilder", "org.codehaus.plexus.util.xml.Xpp3DomUtils", "org.codehaus.plexus.util.xml.pull.XmlPullParserException" ]
import java.io.IOException; import java.io.StringReader; import java.util.Arrays; import org.apache.maven.model.Dependency; import org.apache.maven.model.Plugin; import org.apache.maven.plugin.MojoExecution; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.MojoFailureException; import org.apache.maven.plugin.descriptor.MojoDescriptor; import org.apache.maven.plugin.descriptor.PluginDescriptor; import org.arakhne.afc.vmutil.locale.Locale; import org.codehaus.plexus.configuration.PlexusConfigurationException; import org.codehaus.plexus.util.xml.Xpp3Dom; import org.codehaus.plexus.util.xml.Xpp3DomBuilder; import org.codehaus.plexus.util.xml.Xpp3DomUtils; import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
import java.io.*; import java.util.*; import org.apache.maven.model.*; import org.apache.maven.plugin.*; import org.apache.maven.plugin.descriptor.*; import org.arakhne.afc.vmutil.locale.*; import org.codehaus.plexus.configuration.*; import org.codehaus.plexus.util.xml.*; import org.codehaus.plexus.util.xml.pull.*;
[ "java.io", "java.util", "org.apache.maven", "org.arakhne.afc", "org.codehaus.plexus" ]
java.io; java.util; org.apache.maven; org.arakhne.afc; org.codehaus.plexus;
895,982
protected void onPtrSaveInstanceState(Bundle saveState) { }
void function(Bundle saveState) { }
/** * Called by {@link #onSaveInstanceState()} so that derivative classes can * save their instance state. * * @param saveState - Bundle to be updated with saved state. */
Called by <code>#onSaveInstanceState()</code> so that derivative classes can save their instance state
onPtrSaveInstanceState
{ "repo_name": "muziyouyou/MyCustomPullToRefresh", "path": "PullToRefresh/src/com/handmark/pulltorefresh/library/PullToRefreshBase.java", "license": "apache-2.0", "size": 46560 }
[ "android.os.Bundle" ]
import android.os.Bundle;
import android.os.*;
[ "android.os" ]
android.os;
536,069
Double getExpressionValue( Expression expression, Map<? extends DimensionalItemObject, Double> valueMap, Map<String, Double> constantMap, Map<String, Integer> orgUnitCountMap, Integer days, Set<DataElementOperand> incompleteValues );
Double getExpressionValue( Expression expression, Map<? extends DimensionalItemObject, Double> valueMap, Map<String, Double> constantMap, Map<String, Integer> orgUnitCountMap, Integer days, Set<DataElementOperand> incompleteValues );
/** * Generates the calculated value for the given expression base on the values * supplied in the value map, constant map and days. * * @param expression the expression which holds the formula for the calculation. * @param valueMap the mapping between data element operands and values to * use in the calculation. * @param constantMap the mapping between the constant uid and value to use * in the calculation. * @param orgUnitCountMap the mapping between organisation unit group uid and * count of organisation units to use in the calculation. * @param days the number of days to use in the calculation. * @param set of data element operands that have values but they are incomplete * (for example due to aggregation from organisationUnit children where * not all children had a value.) * @return the calculated value as a double. */
Generates the calculated value for the given expression base on the values supplied in the value map, constant map and days
getExpressionValue
{ "repo_name": "uonafya/jphes-core", "path": "dhis-2/dhis-api/src/main/java/org/hisp/dhis/expression/ExpressionService.java", "license": "bsd-3-clause", "size": 21926 }
[ "java.util.Map", "java.util.Set", "org.hisp.dhis.common.DimensionalItemObject", "org.hisp.dhis.dataelement.DataElementOperand" ]
import java.util.Map; import java.util.Set; import org.hisp.dhis.common.DimensionalItemObject; import org.hisp.dhis.dataelement.DataElementOperand;
import java.util.*; import org.hisp.dhis.common.*; import org.hisp.dhis.dataelement.*;
[ "java.util", "org.hisp.dhis" ]
java.util; org.hisp.dhis;
2,244,223
public boolean isChannelBlue(int index) { return model.isColorComponent(Renderer.BLUE_BAND, index); }
boolean function(int index) { return model.isColorComponent(Renderer.BLUE_BAND, index); }
/** * Implemented as specified by the {@link ImViewer} interface. * @see ImViewer#isChannelBlue(int) */
Implemented as specified by the <code>ImViewer</code> interface
isChannelBlue
{ "repo_name": "chris-allan/openmicroscopy", "path": "components/insight/SRC/org/openmicroscopy/shoola/agents/imviewer/view/ImViewerComponent.java", "license": "gpl-2.0", "size": 95777 }
[ "org.openmicroscopy.shoola.agents.metadata.rnd.Renderer" ]
import org.openmicroscopy.shoola.agents.metadata.rnd.Renderer;
import org.openmicroscopy.shoola.agents.metadata.rnd.*;
[ "org.openmicroscopy.shoola" ]
org.openmicroscopy.shoola;
2,259,153
public static Constructor<?> getConstructor(Class<?> clazz, Class<?>... parameterTypes) throws NoSuchMethodException { Class<?>[] primitiveTypes = DataType.getPrimitive(parameterTypes); for (Constructor<?> constructor : clazz.getConstructors()) { if (!DataType.compare(DataType.getPrimitive(constructor.getParameterTypes()), primitiveTypes)) { continue; } return constructor; } throw new NoSuchMethodException("There is no such constructor in this class with the specified parameter types"); }
static Constructor<?> function(Class<?> clazz, Class<?>... parameterTypes) throws NoSuchMethodException { Class<?>[] primitiveTypes = DataType.getPrimitive(parameterTypes); for (Constructor<?> constructor : clazz.getConstructors()) { if (!DataType.compare(DataType.getPrimitive(constructor.getParameterTypes()), primitiveTypes)) { continue; } return constructor; } throw new NoSuchMethodException(STR); }
/** * Returns the constructor of a given class with the given parameter types * * @param clazz Target class * @param parameterTypes Parameter types of the desired constructor * @return The constructor of the target class with the specified parameter types * @throws NoSuchMethodException If the desired constructor with the specified parameter types cannot be found * @see DataType * @see DataType#getPrimitive(Class[]) * @see DataType#compare(Class[], Class[]) */
Returns the constructor of a given class with the given parameter types
getConstructor
{ "repo_name": "TheApocalypseMC/FunCore", "path": "src/com/theapocalypsemc/funcore/fx/ReflectionUtils.java", "license": "gpl-2.0", "size": 30770 }
[ "java.lang.reflect.Constructor" ]
import java.lang.reflect.Constructor;
import java.lang.reflect.*;
[ "java.lang" ]
java.lang;
290,404
@Test public void testParmeterRoute() { RegistryDirectory registryDirectory = getRegistryDirectory(); List<URL> serviceUrls = new ArrayList<URL>(); serviceUrls.add(SERVICEURL.addParameter("methods", "getXXX1.napoli")); serviceUrls.add(SERVICEURL2.addParameter("methods", "getXXX1.MORGAN,getXXX2")); serviceUrls.add(SERVICEURL3.addParameter("methods", "getXXX1.morgan,getXXX2,getXXX3")); registryDirectory.notify(serviceUrls); invocation = new RpcInvocation( Constants.$INVOKE, new Class[]{String.class, String[].class, Object[].class}, new Object[]{"getXXX1", new String[]{"Enum"}, new Object[]{Param.MORGAN}}); List invokers = registryDirectory.list(invocation); Assert.assertEquals(1, invokers.size()); }
void function() { RegistryDirectory registryDirectory = getRegistryDirectory(); List<URL> serviceUrls = new ArrayList<URL>(); serviceUrls.add(SERVICEURL.addParameter(STR, STR)); serviceUrls.add(SERVICEURL2.addParameter(STR, STR)); serviceUrls.add(SERVICEURL3.addParameter(STR, STR)); registryDirectory.notify(serviceUrls); invocation = new RpcInvocation( Constants.$INVOKE, new Class[]{String.class, String[].class, Object[].class}, new Object[]{STR, new String[]{"Enum"}, new Object[]{Param.MORGAN}}); List invokers = registryDirectory.list(invocation); Assert.assertEquals(1, invokers.size()); }
/** * When the first arg of a method is String or Enum, Registry server can do parameter-value-based routing. */
When the first arg of a method is String or Enum, Registry server can do parameter-value-based routing
testParmeterRoute
{ "repo_name": "mingbotang/dubbo", "path": "dubbo-registry/dubbo-registry-default/src/test/java/com/alibaba/dubbo/registry/dubbo/RegistryDirectoryTest.java", "license": "apache-2.0", "size": 45397 }
[ "com.alibaba.dubbo.common.Constants", "com.alibaba.dubbo.registry.integration.RegistryDirectory", "com.alibaba.dubbo.rpc.RpcInvocation", "java.util.ArrayList", "java.util.List", "junit.framework.Assert" ]
import com.alibaba.dubbo.common.Constants; import com.alibaba.dubbo.registry.integration.RegistryDirectory; import com.alibaba.dubbo.rpc.RpcInvocation; import java.util.ArrayList; import java.util.List; import junit.framework.Assert;
import com.alibaba.dubbo.common.*; import com.alibaba.dubbo.registry.integration.*; import com.alibaba.dubbo.rpc.*; import java.util.*; import junit.framework.*;
[ "com.alibaba.dubbo", "java.util", "junit.framework" ]
com.alibaba.dubbo; java.util; junit.framework;
680,253
@GET @Path("/*") @TypeHint(PlayerGroup[].class) public Response getAll(@QueryParam("apiKey") @ValidApiKey String apiKey) { List<PlayerGroup> groups = groupDao.getAllGroups(apiKey); return ResponseSurrogate.of(groups); }
@Path("/*") @TypeHint(PlayerGroup[].class) Response function(@QueryParam(STR) @ValidApiKey String apiKey) { List<PlayerGroup> groups = groupDao.getAllGroups(apiKey); return ResponseSurrogate.of(groups); }
/** * Returns all group of players associated with the passed API key. If the API key is not * valid an analogous message is returned. It is also checked, if the player id is a * positive number otherwise a message for an invalid number is returned. * * @param apiKey * The valid query parameter API key affiliated to one specific organisation, * to which this group of players belongs to. * @return Response of PlayerGroup in JSON. */
Returns all group of players associated with the passed API key. If the API key is not valid an analogous message is returned. It is also checked, if the player id is a positive number otherwise a message for an invalid number is returned
getAll
{ "repo_name": "InteractiveSystemsGroup/GamificationEngine-Kinben", "path": "src/main/java/info/interactivesystems/gamificationengine/api/PlayerGroupApi.java", "license": "lgpl-3.0", "size": 22473 }
[ "com.webcohesion.enunciate.metadata.rs.TypeHint", "info.interactivesystems.gamificationengine.api.validation.ValidApiKey", "info.interactivesystems.gamificationengine.entities.PlayerGroup", "java.util.List", "javax.ws.rs.Path", "javax.ws.rs.QueryParam", "javax.ws.rs.core.Response" ]
import com.webcohesion.enunciate.metadata.rs.TypeHint; import info.interactivesystems.gamificationengine.api.validation.ValidApiKey; import info.interactivesystems.gamificationengine.entities.PlayerGroup; import java.util.List; import javax.ws.rs.Path; import javax.ws.rs.QueryParam; import javax.ws.rs.core.Response;
import com.webcohesion.enunciate.metadata.rs.*; import info.interactivesystems.gamificationengine.api.validation.*; import info.interactivesystems.gamificationengine.entities.*; import java.util.*; import javax.ws.rs.*; import javax.ws.rs.core.*;
[ "com.webcohesion.enunciate", "info.interactivesystems.gamificationengine", "java.util", "javax.ws" ]
com.webcohesion.enunciate; info.interactivesystems.gamificationengine; java.util; javax.ws;
1,372,200
public java.util.List<fr.lip6.move.pnml.pthlpng.terms.hlapi.VariableHLAPI> getSubterm_terms_VariableHLAPI() { java.util.List<fr.lip6.move.pnml.pthlpng.terms.hlapi.VariableHLAPI> retour = new ArrayList<fr.lip6.move.pnml.pthlpng.terms.hlapi.VariableHLAPI>(); for (Term elemnt : getSubterm()) { if (elemnt.getClass().equals(fr.lip6.move.pnml.pthlpng.terms.impl.VariableImpl.class)) { retour.add(new fr.lip6.move.pnml.pthlpng.terms.hlapi.VariableHLAPI( (fr.lip6.move.pnml.pthlpng.terms.Variable) elemnt)); } } return retour; }
java.util.List<fr.lip6.move.pnml.pthlpng.terms.hlapi.VariableHLAPI> function() { java.util.List<fr.lip6.move.pnml.pthlpng.terms.hlapi.VariableHLAPI> retour = new ArrayList<fr.lip6.move.pnml.pthlpng.terms.hlapi.VariableHLAPI>(); for (Term elemnt : getSubterm()) { if (elemnt.getClass().equals(fr.lip6.move.pnml.pthlpng.terms.impl.VariableImpl.class)) { retour.add(new fr.lip6.move.pnml.pthlpng.terms.hlapi.VariableHLAPI( (fr.lip6.move.pnml.pthlpng.terms.Variable) elemnt)); } } return retour; }
/** * This accessor return a list of encapsulated subelement, only of VariableHLAPI * kind. WARNING : this method can creates a lot of new object in memory. */
This accessor return a list of encapsulated subelement, only of VariableHLAPI kind. WARNING : this method can creates a lot of new object in memory
getSubterm_terms_VariableHLAPI
{ "repo_name": "lhillah/pnmlframework", "path": "pnmlFw-PT-HLPNG/src/fr/lip6/move/pnml/pthlpng/booleans/hlapi/ImplyHLAPI.java", "license": "epl-1.0", "size": 69671 }
[ "fr.lip6.move.pnml.pthlpng.terms.Term", "java.util.ArrayList", "java.util.List" ]
import fr.lip6.move.pnml.pthlpng.terms.Term; import java.util.ArrayList; import java.util.List;
import fr.lip6.move.pnml.pthlpng.terms.*; import java.util.*;
[ "fr.lip6.move", "java.util" ]
fr.lip6.move; java.util;
1,901,141
@Override public List<Symptom> detect() { ArrayList<Symptom> result = new ArrayList<>(); Map<String, ComponentMetrics> backpressureMetrics = bpSensor.get(); for (ComponentMetrics compMetrics : backpressureMetrics.values()) { ComponentMetricsHelper compStats = new ComponentMetricsHelper(compMetrics); compStats.computeBpStats(); if (compStats.getTotalBackpressure() > noiseFilterMillis) { LOG.info(String.format("Detected back pressure for %s, total back pressure is %f", compMetrics.getName(), compStats.getTotalBackpressure())); result.add(new Symptom(SYMPTOM_BACK_PRESSURE.text(), compMetrics)); } } return result; }
List<Symptom> function() { ArrayList<Symptom> result = new ArrayList<>(); Map<String, ComponentMetrics> backpressureMetrics = bpSensor.get(); for (ComponentMetrics compMetrics : backpressureMetrics.values()) { ComponentMetricsHelper compStats = new ComponentMetricsHelper(compMetrics); compStats.computeBpStats(); if (compStats.getTotalBackpressure() > noiseFilterMillis) { LOG.info(String.format(STR, compMetrics.getName(), compStats.getTotalBackpressure())); result.add(new Symptom(SYMPTOM_BACK_PRESSURE.text(), compMetrics)); } } return result; }
/** * Detects all components initiating backpressure above the configured limit. Normally there * will be only one component * * @return A collection of all components causing backpressure. */
Detects all components initiating backpressure above the configured limit. Normally there will be only one component
detect
{ "repo_name": "lucperkins/heron", "path": "heron/healthmgr/src/java/com/twitter/heron/healthmgr/detectors/BackPressureDetector.java", "license": "apache-2.0", "size": 2694 }
[ "com.microsoft.dhalion.detector.Symptom", "com.microsoft.dhalion.metrics.ComponentMetrics", "com.twitter.heron.healthmgr.common.ComponentMetricsHelper", "java.util.ArrayList", "java.util.List", "java.util.Map" ]
import com.microsoft.dhalion.detector.Symptom; import com.microsoft.dhalion.metrics.ComponentMetrics; import com.twitter.heron.healthmgr.common.ComponentMetricsHelper; import java.util.ArrayList; import java.util.List; import java.util.Map;
import com.microsoft.dhalion.detector.*; import com.microsoft.dhalion.metrics.*; import com.twitter.heron.healthmgr.common.*; import java.util.*;
[ "com.microsoft.dhalion", "com.twitter.heron", "java.util" ]
com.microsoft.dhalion; com.twitter.heron; java.util;
6,463
public void setNextCallback(AvailabilityCallback callback) { synchronized(nextCallbackLock){ Preconditions.checkArgument(nextCallback == null, "Can't add a callback when one is already pending."); nextCallback = callback; if (availableGroups.get() > 0) { executeNextCallback(); } } }
void function(AvailabilityCallback callback) { synchronized(nextCallbackLock){ Preconditions.checkArgument(nextCallback == null, STR); nextCallback = callback; if (availableGroups.get() > 0) { executeNextCallback(); } } }
/** * Set the callback the next time this shared resource manager becomes * available. Note that this will be called immediately (in thread) if the * manager is currently unblocked. * * @param callback * The callback to inform. */
Set the callback the next time this shared resource manager becomes available. Note that this will be called immediately (in thread) if the manager is currently unblocked
setNextCallback
{ "repo_name": "dremio/dremio-oss", "path": "sabot/kernel/src/main/java/com/dremio/sabot/threads/sharedres/SharedResourceManager.java", "license": "apache-2.0", "size": 4258 }
[ "com.dremio.sabot.threads.AvailabilityCallback", "com.google.common.base.Preconditions" ]
import com.dremio.sabot.threads.AvailabilityCallback; import com.google.common.base.Preconditions;
import com.dremio.sabot.threads.*; import com.google.common.base.*;
[ "com.dremio.sabot", "com.google.common" ]
com.dremio.sabot; com.google.common;
2,738,486
@Override public void meet(StatementPattern node) throws RuntimeException { this.lastBGPNode = node; }
void function(StatementPattern node) throws RuntimeException { this.lastBGPNode = node; }
/** * Handles statement patterns which are always a valid BGP node. */
Handles statement patterns which are always a valid BGP node
meet
{ "repo_name": "goerlitz/rdffederator", "path": "src/de/uni_koblenz/west/splendid/model/BasicGraphPatternExtractor.java", "license": "lgpl-3.0", "size": 6250 }
[ "org.openrdf.query.algebra.StatementPattern" ]
import org.openrdf.query.algebra.StatementPattern;
import org.openrdf.query.algebra.*;
[ "org.openrdf.query" ]
org.openrdf.query;
1,796,636
@Override protected PermissionCollection getPermissions(CodeSource codeSource) { PermissionCollection perms = super.getPermissions(codeSource); ArrayList<Permission> permissions = _permissions; int size = permissions != null ? permissions.size() : 0; for (int i = 0; i < size; i++) { Permission permission = permissions.get(i); perms.add(permission); } return perms; }
PermissionCollection function(CodeSource codeSource) { PermissionCollection perms = super.getPermissions(codeSource); ArrayList<Permission> permissions = _permissions; int size = permissions != null ? permissions.size() : 0; for (int i = 0; i < size; i++) { Permission permission = permissions.get(i); perms.add(permission); } return perms; }
/** * Returns the permission collection for the given code source. */
Returns the permission collection for the given code source
getPermissions
{ "repo_name": "bertrama/resin", "path": "modules/kernel/src/com/caucho/loader/DynamicClassLoader.java", "license": "gpl-2.0", "size": 58059 }
[ "java.security.CodeSource", "java.security.Permission", "java.security.PermissionCollection", "java.util.ArrayList" ]
import java.security.CodeSource; import java.security.Permission; import java.security.PermissionCollection; import java.util.ArrayList;
import java.security.*; import java.util.*;
[ "java.security", "java.util" ]
java.security; java.util;
1,515,165
public java.util.List<org.ontoware.rdf2go.model.node.Node> getAllSeeAlso_asNodeList() { return Base.getAll_asNodeList(this.model, this.getResource(), SEEALSO); }
java.util.List<org.ontoware.rdf2go.model.node.Node> function() { return Base.getAll_asNodeList(this.model, this.getResource(), SEEALSO); }
/** * Get all values of property SeeAlso as a List of RDF2Go nodes * * @return a List of RDF2Go Nodes * * [Generated from RDFReactor template rule #get8dynamic-list] */
Get all values of property SeeAlso as a List of RDF2Go nodes
getAllSeeAlso_asNodeList
{ "repo_name": "semweb4j/semweb4j", "path": "org.semweb4j.rdfreactor.generator/src/main/java/org/ontoware/rdfreactor/schema/bootstrap/Resource.java", "license": "bsd-2-clause", "size": 83665 }
[ "org.ontoware.rdfreactor.runtime.Base" ]
import org.ontoware.rdfreactor.runtime.Base;
import org.ontoware.rdfreactor.runtime.*;
[ "org.ontoware.rdfreactor" ]
org.ontoware.rdfreactor;
1,541,707
public static XsltTemplate makeFromSystemId(String systemId) throws TransformerConfigurationException { XsltTemplate template = new XsltTemplate(); template.compileFromSystemId(systemId); return template; }
static XsltTemplate function(String systemId) throws TransformerConfigurationException { XsltTemplate template = new XsltTemplate(); template.compileFromSystemId(systemId); return template; }
/** * Makes a compiled XSLT template based upon a system path. * <br/>systemId examples: * <br/>c:/somefolder/somefile.xslt * <br/>file:///c:/somefolder/somefile.xslt * @param systemId the system path to XSLT file * @return the XSLT template * @throws TransformerConfigurationException if a configuration exception occurs */
Makes a compiled XSLT template based upon a system path. systemId examples: c:/somefolder/somefile.xslt file:///c:/somefolder/somefile.xslt
makeFromSystemId
{ "repo_name": "davidocean/geoportal-server", "path": "components/gc/src/gc/base/xml/XsltTemplate.java", "license": "apache-2.0", "size": 9728 }
[ "javax.xml.transform.TransformerConfigurationException" ]
import javax.xml.transform.TransformerConfigurationException;
import javax.xml.transform.*;
[ "javax.xml" ]
javax.xml;
2,058,644
public Set<OWLClass> getOntologyRoots() { return getOntologyRoots(null); } /** * Return the <code>OWLClass</code>es root of any ontology over the specified * {@code OWLPropertyExpression}s (<code>OWLClass</code>es with no outgoing edges * of the specified type, as returned by * {OWLGraphWrapperEdges#getOutgoingEdgesWithGCI(OWLObject, Set)}), and not deprecated * ({@link OWLGraphWrapperExtended#isObsolete(OWLObject)} returns {@code false}); * edges going to obsolete classes are not taken into account). * <p> * Filtering roots using {@code overProperties} allow to ask question such as: * what are the roots of the ontology according only to {@code is_a}
Set<OWLClass> function() { return getOntologyRoots(null); } /** * Return the <code>OWLClass</code>es root of any ontology over the specified * {@code OWLPropertyExpression}s (<code>OWLClass</code>es with no outgoing edges * of the specified type, as returned by * {OWLGraphWrapperEdges#getOutgoingEdgesWithGCI(OWLObject, Set)}), and not deprecated * ({@link OWLGraphWrapperExtended#isObsolete(OWLObject)} returns {@code false}); * edges going to obsolete classes are not taken into account). * <p> * Filtering roots using {@code overProperties} allow to ask question such as: * what are the roots of the ontology according only to {@code is_a}
/** * Return the <code>OWLClass</code>es root of any ontology * (<code>OWLClass</code>es with no outgoing edges as returned by * {OWLGraphWrapperEdges#getOutgoingEdgesWithGCI(OWLObject)}), and not deprecated * ({@link OWLGraphWrapperExtended#isObsolete(OWLObject)} returns {@code false})). * * @return A <code>Set</code> of <code>OWLClass</code>es that are * the roots of any ontology. */
Return the <code>OWLClass</code>es root of any ontology (<code>OWLClass</code>es with no outgoing edges as returned by {OWLGraphWrapperEdges#getOutgoingEdgesWithGCI(OWLObject)}), and not deprecated (<code>OWLGraphWrapperExtended#isObsolete(OWLObject)</code> returns false))
getOntologyRoots
{ "repo_name": "owlcollab/owltools", "path": "OWLTools-Core/src/main/java/owltools/graph/OWLGraphWrapperEdgesExtended.java", "license": "bsd-3-clause", "size": 87864 }
[ "java.util.Set", "org.semanticweb.owlapi.model.OWLClass", "org.semanticweb.owlapi.model.OWLObject", "org.semanticweb.owlapi.model.OWLPropertyExpression" ]
import java.util.Set; import org.semanticweb.owlapi.model.OWLClass; import org.semanticweb.owlapi.model.OWLObject; import org.semanticweb.owlapi.model.OWLPropertyExpression;
import java.util.*; import org.semanticweb.owlapi.model.*;
[ "java.util", "org.semanticweb.owlapi" ]
java.util; org.semanticweb.owlapi;
1,961,431
public static List<String> encode(AuthScopeExpression authScopeExpression) { try { return ((AbstractAuthScopeExpression) authScopeExpression).encode(); } catch (ClassCastException e) { throw new IllegalArgumentException( "Expressions of type " + authScopeExpression.getClass() + " are not supported."); } }
static List<String> function(AuthScopeExpression authScopeExpression) { try { return ((AbstractAuthScopeExpression) authScopeExpression).encode(); } catch (ClassCastException e) { throw new IllegalArgumentException( STR + authScopeExpression.getClass() + STR); } }
/** * Encodes an {@code AuthScopeExpression} back into its String List form. */
Encodes an AuthScopeExpression back into its String List form
encode
{ "repo_name": "cloudendpoints/endpoints-java", "path": "endpoints-framework/src/main/java/com/google/api/server/spi/config/scope/AuthScopeExpressions.java", "license": "apache-2.0", "size": 3823 }
[ "java.util.List" ]
import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
2,126,222
@Override public void operationComplete(int rc, Long ledgerId) { if (BKException.Code.OK != rc) { createComplete(rc, null); return; } try { lh = new LedgerHandle(bk, ledgerId, metadata, digestType, passwd); } catch (GeneralSecurityException e) { LOG.error("Security exception while creating ledger: " + ledgerId, e); createComplete(BKException.Code.DigestNotInitializedException, null); return; } catch (NumberFormatException e) { LOG.error("Incorrectly entered parameter throttle: " + bk.getConf().getThrottleValue(), e); createComplete(BKException.Code.IncorrectParameterException, null); return; } // return the ledger handle back createComplete(BKException.Code.OK, lh); }
void function(int rc, Long ledgerId) { if (BKException.Code.OK != rc) { createComplete(rc, null); return; } try { lh = new LedgerHandle(bk, ledgerId, metadata, digestType, passwd); } catch (GeneralSecurityException e) { LOG.error(STR + ledgerId, e); createComplete(BKException.Code.DigestNotInitializedException, null); return; } catch (NumberFormatException e) { LOG.error(STR + bk.getConf().getThrottleValue(), e); createComplete(BKException.Code.IncorrectParameterException, null); return; } createComplete(BKException.Code.OK, lh); }
/** * Callback when created ledger. */
Callback when created ledger
operationComplete
{ "repo_name": "fengshao0907/bookkeeper", "path": "bookkeeper-server/src/main/java/org/apache/bookkeeper/client/LedgerCreateOp.java", "license": "apache-2.0", "size": 5472 }
[ "java.security.GeneralSecurityException" ]
import java.security.GeneralSecurityException;
import java.security.*;
[ "java.security" ]
java.security;
97,544
@Nullable public ItemStack removeStackFromSlot(int index) { return ItemStackHelper.getAndRemove(this.furnaceItemStacks, index); }
ItemStack function(int index) { return ItemStackHelper.getAndRemove(this.furnaceItemStacks, index); }
/** * Removes a stack from the given slot and returns it. */
Removes a stack from the given slot and returns it
removeStackFromSlot
{ "repo_name": "Discult/Bettercraft-Mod", "path": "main/java/jordan/bettercraft/init/tileentitys/blocks/TEQuartzFurnace.java", "license": "unlicense", "size": 17795 }
[ "net.minecraft.inventory.ItemStackHelper", "net.minecraft.item.ItemStack" ]
import net.minecraft.inventory.ItemStackHelper; import net.minecraft.item.ItemStack;
import net.minecraft.inventory.*; import net.minecraft.item.*;
[ "net.minecraft.inventory", "net.minecraft.item" ]
net.minecraft.inventory; net.minecraft.item;
121,634
Text getOutputValue() { StringBuilder valueStr = new StringBuilder(); valueStr.append(getValue()); return new Text(valueStr.toString()); }
Text getOutputValue() { StringBuilder valueStr = new StringBuilder(); valueStr.append(getValue()); return new Text(valueStr.toString()); }
/** * Gets the output value in text format * * @return Text */
Gets the output value in text format
getOutputValue
{ "repo_name": "tseen/Federated-HDFS", "path": "tseenliu/FedHDFS-hadoop-src/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/OperationOutput.java", "license": "apache-2.0", "size": 8188 }
[ "org.apache.hadoop.io.Text" ]
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.*;
[ "org.apache.hadoop" ]
org.apache.hadoop;
715,073
@ServiceMethod(returns = ReturnType.SINGLE) public Mono<MicrosoftGraphSubscribedSkuInner> getSubscribedSkuAsync( String subscribedSkuId, List<SubscribedSkusSubscribedSkuSelect> select, List<String> expand) { return getSubscribedSkuWithResponseAsync(subscribedSkuId, select, expand) .flatMap( (Response<MicrosoftGraphSubscribedSkuInner> res) -> { if (res.getValue() != null) { return Mono.just(res.getValue()); } else { return Mono.empty(); } }); }
@ServiceMethod(returns = ReturnType.SINGLE) Mono<MicrosoftGraphSubscribedSkuInner> function( String subscribedSkuId, List<SubscribedSkusSubscribedSkuSelect> select, List<String> expand) { return getSubscribedSkuWithResponseAsync(subscribedSkuId, select, expand) .flatMap( (Response<MicrosoftGraphSubscribedSkuInner> res) -> { if (res.getValue() != null) { return Mono.just(res.getValue()); } else { return Mono.empty(); } }); }
/** * Get entity from subscribedSkus by key. * * @param subscribedSkuId key: id of subscribedSku. * @param select Select properties to be returned. * @param expand Expand related entities. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return entity from subscribedSkus by key. */
Get entity from subscribedSkus by key
getSubscribedSkuAsync
{ "repo_name": "Azure/azure-sdk-for-java", "path": "sdk/resourcemanager/azure-resourcemanager-authorization/src/main/java/com/azure/resourcemanager/authorization/implementation/SubscribedSkusSubscribedSkusClientImpl.java", "license": "mit", "size": 45332 }
[ "com.azure.core.annotation.ReturnType", "com.azure.core.annotation.ServiceMethod", "com.azure.core.http.rest.Response", "com.azure.resourcemanager.authorization.fluent.models.MicrosoftGraphSubscribedSkuInner", "com.azure.resourcemanager.authorization.fluent.models.SubscribedSkusSubscribedSkuSelect", "java.util.List" ]
import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.http.rest.Response; import com.azure.resourcemanager.authorization.fluent.models.MicrosoftGraphSubscribedSkuInner; import com.azure.resourcemanager.authorization.fluent.models.SubscribedSkusSubscribedSkuSelect; import java.util.List;
import com.azure.core.annotation.*; import com.azure.core.http.rest.*; import com.azure.resourcemanager.authorization.fluent.models.*; import java.util.*;
[ "com.azure.core", "com.azure.resourcemanager", "java.util" ]
com.azure.core; com.azure.resourcemanager; java.util;
1,831,417
public void trainTree() { int m = 0; TreeSet<Integer> exports; Factor<String> fct; ArrayList< Factor<String> > trained = new ArrayList< Factor<String> >(); while (fctrs.size() > 0) { //keep training until all the factors have been cleansed fct = computeGain(); //identify factor with most gain exports = fct.getPures(); //export pure data, and define factor pure trained.add(fct); //save trained factor //if we have removed all the training examples, training is complete m += exports.size(); if (m > trainLen) break; for(int i = 0; i < fctrs.size(); i++) { //remove already trained examples fctrs.get(i).removeExamples(exports); if (fctrs.get(i).getN() + fctrs.get(i).getP() == 0) { //remove useless factors fctrs.remove(fctrs.get(i)); i--; } } } fctrs = trained; }
void function() { int m = 0; TreeSet<Integer> exports; Factor<String> fct; ArrayList< Factor<String> > trained = new ArrayList< Factor<String> >(); while (fctrs.size() > 0) { fct = computeGain(); exports = fct.getPures(); trained.add(fct); m += exports.size(); if (m > trainLen) break; for(int i = 0; i < fctrs.size(); i++) { fctrs.get(i).removeExamples(exports); if (fctrs.get(i).getN() + fctrs.get(i).getP() == 0) { fctrs.remove(fctrs.get(i)); i--; } } } fctrs = trained; }
/** * removes excess data to construct a learning tree */
removes excess data to construct a learning tree
trainTree
{ "repo_name": "parejadan/AI-DTL", "path": "src/dataStructures/DecisionTree.java", "license": "mit", "size": 5677 }
[ "java.util.ArrayList", "java.util.TreeSet" ]
import java.util.ArrayList; import java.util.TreeSet;
import java.util.*;
[ "java.util" ]
java.util;
1,472,061
@Test public void testRestartConsumableBlockingTasks() { final TestingSchedulingTopology testingSchedulingTopology = new TestingSchedulingTopology(); final List<TestingSchedulingExecutionVertex> producers = testingSchedulingTopology.addExecutionVertices().finish(); final List<TestingSchedulingExecutionVertex> consumers = testingSchedulingTopology.addExecutionVertices().finish(); testingSchedulingTopology.connectAllToAll(producers, consumers).finish(); LazyFromSourcesSchedulingStrategy schedulingStrategy = startScheduling(testingSchedulingTopology); Set<ExecutionVertexID> verticesToRestart = consumers.stream().map(TestingSchedulingExecutionVertex::getId) .collect(Collectors.toSet()); for (TestingSchedulingExecutionVertex producer : producers) { schedulingStrategy.onExecutionStateChange(producer.getId(), ExecutionState.FINISHED); } schedulingStrategy.restartTasks(verticesToRestart); assertLatestScheduledVerticesAreEqualTo(consumers); }
void function() { final TestingSchedulingTopology testingSchedulingTopology = new TestingSchedulingTopology(); final List<TestingSchedulingExecutionVertex> producers = testingSchedulingTopology.addExecutionVertices().finish(); final List<TestingSchedulingExecutionVertex> consumers = testingSchedulingTopology.addExecutionVertices().finish(); testingSchedulingTopology.connectAllToAll(producers, consumers).finish(); LazyFromSourcesSchedulingStrategy schedulingStrategy = startScheduling(testingSchedulingTopology); Set<ExecutionVertexID> verticesToRestart = consumers.stream().map(TestingSchedulingExecutionVertex::getId) .collect(Collectors.toSet()); for (TestingSchedulingExecutionVertex producer : producers) { schedulingStrategy.onExecutionStateChange(producer.getId(), ExecutionState.FINISHED); } schedulingStrategy.restartTasks(verticesToRestart); assertLatestScheduledVerticesAreEqualTo(consumers); }
/** * Tests that when restart tasks will schedule input consumable vertices in given ones. */
Tests that when restart tasks will schedule input consumable vertices in given ones
testRestartConsumableBlockingTasks
{ "repo_name": "gyfora/flink", "path": "flink-runtime/src/test/java/org/apache/flink/runtime/scheduler/strategy/LazyFromSourcesSchedulingStrategyTest.java", "license": "apache-2.0", "size": 16736 }
[ "java.util.List", "java.util.Set", "java.util.stream.Collectors", "org.apache.flink.runtime.execution.ExecutionState" ]
import java.util.List; import java.util.Set; import java.util.stream.Collectors; import org.apache.flink.runtime.execution.ExecutionState;
import java.util.*; import java.util.stream.*; import org.apache.flink.runtime.execution.*;
[ "java.util", "org.apache.flink" ]
java.util; org.apache.flink;
1,668,881
public void execute(AppMigRejectEvent event) { simulation.getLogger().debug(String.format("[Cluster #%d] AppRelocationPolicyLevel2 - New Migration reject - App #%d.", manager.getCapability(ClusterManager.class).getCluster().getId(), event.getApplication().getId())); // Mark sender's status as invalid (to avoid choosing sender again in the next step). Collection<RackData> racks = manager.getCapability(RackPoolManager.class).getRacks(); for (RackData rack : racks) { if (rack.getId() == event.getSender()) { rack.invalidateStatus(simulation.getSimulationTime()); break; } } // Get entry from record and search again for a migration target. MigRequestEntry entry = manager.getCapability(MigRequestRecord.class).getEntry(event.getApplication(), event.getOrigin()); this.searchForAppMigrationTarget(entry); }
void function(AppMigRejectEvent event) { simulation.getLogger().debug(String.format(STR, manager.getCapability(ClusterManager.class).getCluster().getId(), event.getApplication().getId())); Collection<RackData> racks = manager.getCapability(RackPoolManager.class).getRacks(); for (RackData rack : racks) { if (rack.getId() == event.getSender()) { rack.invalidateStatus(simulation.getSimulationTime()); break; } } MigRequestEntry entry = manager.getCapability(MigRequestRecord.class).getEntry(event.getApplication(), event.getOrigin()); this.searchForAppMigrationTarget(entry); }
/** * This event can only come from Racks in this Cluster in response to migration requests sent by the ClusterManager. */
This event can only come from Racks in this Cluster in response to migration requests sent by the ClusterManager
execute
{ "repo_name": "digs-uwo/dcsim-projects", "path": "src/edu/uwo/csd/dcsim/projects/hierarchical/policies/RelocationPolicyLevel2.java", "license": "gpl-3.0", "size": 15619 }
[ "edu.uwo.csd.dcsim.projects.hierarchical.MigRequestEntry", "edu.uwo.csd.dcsim.projects.hierarchical.RackData", "edu.uwo.csd.dcsim.projects.hierarchical.capabilities.ClusterManager", "edu.uwo.csd.dcsim.projects.hierarchical.capabilities.MigRequestRecord", "edu.uwo.csd.dcsim.projects.hierarchical.capabilities.RackPoolManager", "edu.uwo.csd.dcsim.projects.hierarchical.events.AppMigRejectEvent", "java.util.Collection" ]
import edu.uwo.csd.dcsim.projects.hierarchical.MigRequestEntry; import edu.uwo.csd.dcsim.projects.hierarchical.RackData; import edu.uwo.csd.dcsim.projects.hierarchical.capabilities.ClusterManager; import edu.uwo.csd.dcsim.projects.hierarchical.capabilities.MigRequestRecord; import edu.uwo.csd.dcsim.projects.hierarchical.capabilities.RackPoolManager; import edu.uwo.csd.dcsim.projects.hierarchical.events.AppMigRejectEvent; import java.util.Collection;
import edu.uwo.csd.dcsim.projects.hierarchical.*; import edu.uwo.csd.dcsim.projects.hierarchical.capabilities.*; import edu.uwo.csd.dcsim.projects.hierarchical.events.*; import java.util.*;
[ "edu.uwo.csd", "java.util" ]
edu.uwo.csd; java.util;
2,596,158
@JsonAnyGetter public Map<String, Object> additionalProperties() { return this.additionalProperties; }
Map<String, Object> function() { return this.additionalProperties; }
/** * Get the additionalProperties property: notebookLinks. * * @return the additionalProperties value. */
Get the additionalProperties property: notebookLinks
additionalProperties
{ "repo_name": "Azure/azure-sdk-for-java", "path": "sdk/resourcemanager/azure-resourcemanager-authorization/src/main/java/com/azure/resourcemanager/authorization/fluent/models/MicrosoftGraphNotebookLinks.java", "license": "mit", "size": 3600 }
[ "java.util.Map" ]
import java.util.Map;
import java.util.*;
[ "java.util" ]
java.util;
1,223,068
@Override public void changePassword(final Identity identity, final String pwd, final LDAPError errors) { final String uid = identity.getName(); final String ldapUserPasswordAttribute = LDAPLoginModule.getLdapUserPasswordAttribute(); try { final DirContext ctx = bindSystem(); final String dn = searchUserDN(uid, ctx); final ModificationItem[] modificationItems = new ModificationItem[1]; Attribute userPasswordAttribute; if (LDAPLoginModule.isActiveDirectory()) { // active directory need the password enquoted and unicoded (but little-endian) final String quotedPassword = "\"" + pwd + "\""; final char unicodePwd[] = quotedPassword.toCharArray(); final byte pwdArray[] = new byte[unicodePwd.length * 2]; for (int i = 0; i < unicodePwd.length; i++) { pwdArray[i * 2 + 1] = (byte) (unicodePwd[i] >>> 8); pwdArray[i * 2 + 0] = (byte) (unicodePwd[i] & 0xff); } userPasswordAttribute = new BasicAttribute(ldapUserPasswordAttribute, pwdArray); } else { userPasswordAttribute = new BasicAttribute(ldapUserPasswordAttribute, pwd); } modificationItems[0] = new ModificationItem(DirContext.REPLACE_ATTRIBUTE, userPasswordAttribute); ctx.modifyAttributes(dn, modificationItems); ctx.close(); } catch (final NamingException e) { logError("NamingException when trying to change password with username::" + uid, e); errors.insert("Cannot change the password"); } }
void function(final Identity identity, final String pwd, final LDAPError errors) { final String uid = identity.getName(); final String ldapUserPasswordAttribute = LDAPLoginModule.getLdapUserPasswordAttribute(); try { final DirContext ctx = bindSystem(); final String dn = searchUserDN(uid, ctx); final ModificationItem[] modificationItems = new ModificationItem[1]; Attribute userPasswordAttribute; if (LDAPLoginModule.isActiveDirectory()) { final String quotedPassword = "\"STR\STRNamingException when trying to change password with username::STRCannot change the password"); } }
/** * Change the password on the LDAP server. * * @see org.olat.ldap.LDAPLoginManager#changePassword(org.olat.core.id.Identity, java.lang.String, org.olat.ldap.LDAPError) */
Change the password on the LDAP server
changePassword
{ "repo_name": "RLDevOps/Demo", "path": "src/main/java/org/olat/ldap/LDAPLoginManagerImpl.java", "license": "apache-2.0", "size": 38045 }
[ "javax.naming.directory.Attribute", "javax.naming.directory.DirContext", "javax.naming.directory.ModificationItem", "org.olat.core.id.Identity" ]
import javax.naming.directory.Attribute; import javax.naming.directory.DirContext; import javax.naming.directory.ModificationItem; import org.olat.core.id.Identity;
import javax.naming.directory.*; import org.olat.core.id.*;
[ "javax.naming", "org.olat.core" ]
javax.naming; org.olat.core;
2,442,846
@Pointcut("@annotation(com.github.tomschi.commons.annotation.logging.LogExecution)") public void annotatedMethod() {}
@Pointcut(STR) public void annotatedMethod() {}
/** * This method is marked as {@link Pointcut}. It checks, if the class * of the called method is annotated with {@link LogExecution}. */
This method is marked as <code>Pointcut</code>. It checks, if the class of the called method is annotated with <code>LogExecution</code>
annotatedBean
{ "repo_name": "Tomschi/commons", "path": "commons-aspect/src/main/java/com/github/tomschi/commons/aspect/logging/MethodExecutionLogger.java", "license": "apache-2.0", "size": 6527 }
[ "org.aspectj.lang.annotation.Pointcut" ]
import org.aspectj.lang.annotation.Pointcut;
import org.aspectj.lang.annotation.*;
[ "org.aspectj.lang" ]
org.aspectj.lang;
2,795,780
LabelEntity getLabel(String labelId);
LabelEntity getLabel(String labelId);
/** * Gets the specified label. * * @param labelId The label id * @return The label transfer object */
Gets the specified label
getLabel
{ "repo_name": "mans2singh/nifi", "path": "nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/NiFiServiceFacade.java", "license": "apache-2.0", "size": 84641 }
[ "org.apache.nifi.web.api.entity.LabelEntity" ]
import org.apache.nifi.web.api.entity.LabelEntity;
import org.apache.nifi.web.api.entity.*;
[ "org.apache.nifi" ]
org.apache.nifi;
1,377,155
public static Throwable unwrapException(Throwable t) { if (t instanceof EJBException) { EJBException ejbEx = (EJBException) t; if (ejbEx.getCause() instanceof Exception) { if (ejbEx != ejbEx.getCausedByException() && ejbEx.getCausedByException() != null) { t = unwrapException(ejbEx.getCausedByException()); } else if (ejbEx != ejbEx.getCause()) { t = unwrapException(ejbEx.getCause()); } } } if (t instanceof RemoteException) { RemoteException remoteEx = (RemoteException) t; if (remoteEx != remoteEx.detail && remoteEx.detail instanceof Exception) { t = unwrapException(remoteEx.detail); } } return t; }
static Throwable function(Throwable t) { if (t instanceof EJBException) { EJBException ejbEx = (EJBException) t; if (ejbEx.getCause() instanceof Exception) { if (ejbEx != ejbEx.getCausedByException() && ejbEx.getCausedByException() != null) { t = unwrapException(ejbEx.getCausedByException()); } else if (ejbEx != ejbEx.getCause()) { t = unwrapException(ejbEx.getCause()); } } } if (t instanceof RemoteException) { RemoteException remoteEx = (RemoteException) t; if (remoteEx != remoteEx.detail && remoteEx.detail instanceof Exception) { t = unwrapException(remoteEx.detail); } } return t; }
/** * Try to find the root cause for the Throwable * * @param t * the Throwable to be analyzed * @return the potential root cause of the Throwable */
Try to find the root cause for the Throwable
unwrapException
{ "repo_name": "opetrovski/development", "path": "oscm-portal/javasrc/org/oscm/ui/common/ExceptionHandler.java", "license": "apache-2.0", "size": 13190 }
[ "java.rmi.RemoteException", "javax.ejb.EJBException" ]
import java.rmi.RemoteException; import javax.ejb.EJBException;
import java.rmi.*; import javax.ejb.*;
[ "java.rmi", "javax.ejb" ]
java.rmi; javax.ejb;
1,963,850
@Test @Feature({"Location"}) public void testStartUpgradeStop() { assertTrue("Should be on UI thread", ThreadUtils.runningOnUiThread()); setLocationProvider(); createLocationProviderAdapter(); startLocationProviderAdapter(false); startLocationProviderAdapter(true); stopLocationProviderAdapter(); }
@Feature({STR}) void function() { assertTrue(STR, ThreadUtils.runningOnUiThread()); setLocationProvider(); createLocationProviderAdapter(); startLocationProviderAdapter(false); startLocationProviderAdapter(true); stopLocationProviderAdapter(); }
/** * Verify a start/upgrade/stop call sequencewith the given LocationProvider. */
Verify a start/upgrade/stop call sequencewith the given LocationProvider
testStartUpgradeStop
{ "repo_name": "endlessm/chromium-browser", "path": "services/device/geolocation/android/junit/src/org/chromium/device/geolocation/LocationProviderTest.java", "license": "bsd-3-clause", "size": 6948 }
[ "org.chromium.base.ThreadUtils", "org.chromium.base.test.util.Feature", "org.junit.Assert" ]
import org.chromium.base.ThreadUtils; import org.chromium.base.test.util.Feature; import org.junit.Assert;
import org.chromium.base.*; import org.chromium.base.test.util.*; import org.junit.*;
[ "org.chromium.base", "org.junit" ]
org.chromium.base; org.junit;
1,425,944
public final ConfigurationModuleBuilder merge(ConfigurationModule d) { if (d == null) { throw new NullPointerException("If merge() was passed a static final field that is initialized to non-null, " + "then this is almost certainly caused by a circular class dependency."); } try { d.assertStaticClean(); } catch (ClassHierarchyException e) { throw new ClassHierarchyException(ReflectionUtilities.getFullName(getClass()) + ": detected attempt to merge with ConfigurationModule that has had set() called on it", e); } ConfigurationModuleBuilder c = deepCopy(); try { c.b.addConfiguration(d.builder.b.build()); } catch (BindException e) { throw new ClassHierarchyException(e); } c.reqDecl.addAll(d.builder.reqDecl); c.optDecl.addAll(d.builder.optDecl); c.reqUsed.addAll(d.builder.reqUsed); c.optUsed.addAll(d.builder.optUsed); c.setOpts.addAll(d.builder.setOpts); c.map.putAll(d.builder.map); c.freeImpls.putAll(d.builder.freeImpls); c.freeParams.putAll(d.builder.freeParams); c.lateBindClazz.putAll(d.builder.lateBindClazz); return c; }
final ConfigurationModuleBuilder function(ConfigurationModule d) { if (d == null) { throw new NullPointerException(STR + STR); } try { d.assertStaticClean(); } catch (ClassHierarchyException e) { throw new ClassHierarchyException(ReflectionUtilities.getFullName(getClass()) + STR, e); } ConfigurationModuleBuilder c = deepCopy(); try { c.b.addConfiguration(d.builder.b.build()); } catch (BindException e) { throw new ClassHierarchyException(e); } c.reqDecl.addAll(d.builder.reqDecl); c.optDecl.addAll(d.builder.optDecl); c.reqUsed.addAll(d.builder.reqUsed); c.optUsed.addAll(d.builder.optUsed); c.setOpts.addAll(d.builder.setOpts); c.map.putAll(d.builder.map); c.freeImpls.putAll(d.builder.freeImpls); c.freeParams.putAll(d.builder.freeParams); c.lateBindClazz.putAll(d.builder.lateBindClazz); return c; }
/** * TODO: It would be nice if this incorporated d by reference so that static analysis / documentation tools * could document the dependency between c and d. */
could document the dependency between c and d
merge
{ "repo_name": "taegeonum/incubator-reef", "path": "lang/java/reef-tang/tang/src/main/java/org/apache/reef/tang/formats/ConfigurationModuleBuilder.java", "license": "apache-2.0", "size": 14167 }
[ "org.apache.reef.tang.exceptions.BindException", "org.apache.reef.tang.exceptions.ClassHierarchyException", "org.apache.reef.tang.util.ReflectionUtilities" ]
import org.apache.reef.tang.exceptions.BindException; import org.apache.reef.tang.exceptions.ClassHierarchyException; import org.apache.reef.tang.util.ReflectionUtilities;
import org.apache.reef.tang.exceptions.*; import org.apache.reef.tang.util.*;
[ "org.apache.reef" ]
org.apache.reef;
1,224,197
public RawTextWrapper getRawText() { return rawText; }
RawTextWrapper function() { return rawText; }
/** * Gets the original text from the relAnnis text.tab file represented as a String. * * <p> * This is a convenient and very fast method for extracting the whole text of a document, since * this method simply reads database tupel and does not map anything to salt. * </p> * * @return * <ul> * <li>null - if the {@link VisualizerPlugin#isUsingRawText()} method false for this * visualizer.</li> * * <li>empty list - if there are only segmentations and the cachedToken layer is * empty</li> * */
Gets the original text from the relAnnis text.tab file represented as a String. This is a convenient and very fast method for extracting the whole text of a document, since this method simply reads database tupel and does not map anything to salt.
getRawText
{ "repo_name": "korpling/ANNIS", "path": "src/main/java/org/corpus_tools/annis/gui/visualizers/VisualizerInput.java", "license": "apache-2.0", "size": 7546 }
[ "org.corpus_tools.annis.gui.objects.RawTextWrapper" ]
import org.corpus_tools.annis.gui.objects.RawTextWrapper;
import org.corpus_tools.annis.gui.objects.*;
[ "org.corpus_tools.annis" ]
org.corpus_tools.annis;
2,509,053
public void selected(SelectEvent event) { if (event.getEventAction().equals(SelectEvent.LEFT_CLICK)) { // This is a left click if (event.hasObjects() && event.getTopPickedObject().hasPosition()) { // There is a picked object with a position if (event.getTopObject().getClass().equals(pickedObjClass)) { // This object class we handle and we have an orbit view Position targetPos = event.getTopPickedObject().getPosition(); View view = this.wwd.getView(); // Use a PanToIterator to iterate view to target position if(view != null) { // The elevation component of 'targetPos' here is not the surface elevation, // so we ignore it when specifying the view center position. view.goTo(new Position(targetPos, 0), targetPos.getElevation() + this.elevationOffset); } } } } }
void function(SelectEvent event) { if (event.getEventAction().equals(SelectEvent.LEFT_CLICK)) { if (event.hasObjects() && event.getTopPickedObject().hasPosition()) { if (event.getTopObject().getClass().equals(pickedObjClass)) { Position targetPos = event.getTopPickedObject().getPosition(); View view = this.wwd.getView(); if(view != null) { view.goTo(new Position(targetPos, 0), targetPos.getElevation() + this.elevationOffset); } } } } }
/** * Select Listener implementation. * * @param event the SelectEvent */
Select Listener implementation
selected
{ "repo_name": "aleo72/ww-ceem-radar", "path": "src/main/java/ua/edu/odeku/ceem/mapRadar/utils/ClickAndGoSelectListener.java", "license": "apache-2.0", "size": 3256 }
[ "gov.nasa.worldwind.View", "gov.nasa.worldwind.event.SelectEvent", "gov.nasa.worldwind.geom.Position" ]
import gov.nasa.worldwind.View; import gov.nasa.worldwind.event.SelectEvent; import gov.nasa.worldwind.geom.Position;
import gov.nasa.worldwind.*; import gov.nasa.worldwind.event.*; import gov.nasa.worldwind.geom.*;
[ "gov.nasa.worldwind" ]
gov.nasa.worldwind;
14,460
@Test public void testDynamicCols() throws Exception { Assert.assertEquals(0, driver.run("drop table if exists test_thrift").getResponseCode()); Assert.assertEquals(0, driver.run( "create external table test_thrift " + "partitioned by (year string) " + "row format serde 'org.apache.hadoop.hive.serde2.thrift.ThriftDeserializer' " + "with serdeproperties ( " + " 'serialization.class'='org.apache.hadoop.hive.serde2.thrift.test.IntString', " + " 'serialization.format'='org.apache.thrift.protocol.TBinaryProtocol') " + "stored as" + " inputformat 'org.apache.hadoop.mapred.SequenceFileInputFormat'" + " outputformat 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'") .getResponseCode()); Assert.assertEquals(0, driver.run("alter table test_thrift add partition (year = '2012') location '" + intStringSeq.getParent() + "'").getResponseCode()); PigServer pigServer = createPigServer(false); pigServer.registerQuery("A = load 'test_thrift' using org.apache.hive.hcatalog.pig.HCatLoader();"); Schema expectedSchema = new Schema(); expectedSchema.add(new Schema.FieldSchema("myint", DataType.INTEGER)); expectedSchema.add(new Schema.FieldSchema("mystring", DataType.CHARARRAY)); expectedSchema.add(new Schema.FieldSchema("underscore_int", DataType.INTEGER)); expectedSchema.add(new Schema.FieldSchema("year", DataType.CHARARRAY)); Assert.assertEquals(expectedSchema, pigServer.dumpSchema("A")); Iterator<Tuple> iterator = pigServer.openIterator("A"); Tuple t = iterator.next(); Assert.assertEquals(1, t.get(0)); Assert.assertEquals("one", t.get(1)); Assert.assertEquals(1, t.get(2)); Assert.assertEquals("2012", t.get(3)); Assert.assertFalse(iterator.hasNext()); }
void function() throws Exception { Assert.assertEquals(0, driver.run(STR).getResponseCode()); Assert.assertEquals(0, driver.run( STR + STR + STR + STR + STR + STR + STR + STR + STR) .getResponseCode()); Assert.assertEquals(0, driver.run(STR + intStringSeq.getParent() + "'").getResponseCode()); PigServer pigServer = createPigServer(false); pigServer.registerQuery(STR); Schema expectedSchema = new Schema(); expectedSchema.add(new Schema.FieldSchema("myint", DataType.INTEGER)); expectedSchema.add(new Schema.FieldSchema(STR, DataType.CHARARRAY)); expectedSchema.add(new Schema.FieldSchema(STR, DataType.INTEGER)); expectedSchema.add(new Schema.FieldSchema("year", DataType.CHARARRAY)); Assert.assertEquals(expectedSchema, pigServer.dumpSchema("A")); Iterator<Tuple> iterator = pigServer.openIterator("A"); Tuple t = iterator.next(); Assert.assertEquals(1, t.get(0)); Assert.assertEquals("one", t.get(1)); Assert.assertEquals(1, t.get(2)); Assert.assertEquals("2012", t.get(3)); Assert.assertFalse(iterator.hasNext()); }
/** * Create a table with no explicit schema and ensure its correctly * discovered from the thrift struct. */
Create a table with no explicit schema and ensure its correctly discovered from the thrift struct
testDynamicCols
{ "repo_name": "alanfgates/hive", "path": "itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatHiveThriftCompatibility.java", "license": "apache-2.0", "size": 4542 }
[ "java.util.Iterator", "org.apache.pig.PigServer", "org.apache.pig.data.DataType", "org.apache.pig.data.Tuple", "org.apache.pig.impl.logicalLayer.schema.Schema", "org.junit.Assert" ]
import java.util.Iterator; import org.apache.pig.PigServer; import org.apache.pig.data.DataType; import org.apache.pig.data.Tuple; import org.apache.pig.impl.logicalLayer.schema.Schema; import org.junit.Assert;
import java.util.*; import org.apache.pig.*; import org.apache.pig.data.*; import org.apache.pig.impl.*; import org.junit.*;
[ "java.util", "org.apache.pig", "org.junit" ]
java.util; org.apache.pig; org.junit;
2,823,035
public Observable<ServiceResponse<DiskInner>> updateWithServiceResponseAsync(String resourceGroupName, String diskName, DiskUpdate disk) { if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (diskName == null) { throw new IllegalArgumentException("Parameter diskName is required and cannot be null."); } if (disk == null) { throw new IllegalArgumentException("Parameter disk is required and cannot be null."); } Validator.validate(disk); final String apiVersion = "2019-03-01"; Observable<Response<ResponseBody>> observable = service.update(this.client.subscriptionId(), resourceGroupName, diskName, apiVersion, disk, this.client.acceptLanguage(), this.client.userAgent()); return client.getAzureClient().getPutOrPatchResultAsync(observable, new TypeToken<DiskInner>() { }.getType()); }
Observable<ServiceResponse<DiskInner>> function(String resourceGroupName, String diskName, DiskUpdate disk) { if (this.client.subscriptionId() == null) { throw new IllegalArgumentException(STR); } if (resourceGroupName == null) { throw new IllegalArgumentException(STR); } if (diskName == null) { throw new IllegalArgumentException(STR); } if (disk == null) { throw new IllegalArgumentException(STR); } Validator.validate(disk); final String apiVersion = STR; Observable<Response<ResponseBody>> observable = service.update(this.client.subscriptionId(), resourceGroupName, diskName, apiVersion, disk, this.client.acceptLanguage(), this.client.userAgent()); return client.getAzureClient().getPutOrPatchResultAsync(observable, new TypeToken<DiskInner>() { }.getType()); }
/** * Updates (patches) a disk. * * @param resourceGroupName The name of the resource group. * @param diskName The name of the managed disk that is being created. The name can't be changed after the disk is created. Supported characters for the name are a-z, A-Z, 0-9 and _. The maximum name length is 80 characters. * @param disk Disk object supplied in the body of the Patch disk operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable for the request */
Updates (patches) a disk
updateWithServiceResponseAsync
{ "repo_name": "selvasingh/azure-sdk-for-java", "path": "sdk/compute/mgmt-v2019_03_01/src/main/java/com/microsoft/azure/management/compute/v2019_03_01/implementation/DisksInner.java", "license": "mit", "size": 88874 }
[ "com.google.common.reflect.TypeToken", "com.microsoft.azure.management.compute.v2019_03_01.DiskUpdate", "com.microsoft.rest.ServiceResponse", "com.microsoft.rest.Validator" ]
import com.google.common.reflect.TypeToken; import com.microsoft.azure.management.compute.v2019_03_01.DiskUpdate; import com.microsoft.rest.ServiceResponse; import com.microsoft.rest.Validator;
import com.google.common.reflect.*; import com.microsoft.azure.management.compute.v2019_03_01.*; import com.microsoft.rest.*;
[ "com.google.common", "com.microsoft.azure", "com.microsoft.rest" ]
com.google.common; com.microsoft.azure; com.microsoft.rest;
2,102,116
@Override protected void onPostExecute(String result) { e.postExecution(result); signal.countDown(); try { JSONObject j = new JSONObject(result); if(j.getString("STATUS").equals("INVALID_SESSION")) { MosesService.getInstance().loggedOut(); MosesService.getInstance().login(); } } catch (JSONException e1) { MosesService ms = MosesService.getInstance(); if(ms != null){ if(ms.isOnline()){ // Server's answer was not malformed due to an absent Internet connection Log.e(LOG_TAG, "onPostExecute() " + e1); Toaster.showBadServerResponseToast(); } } else{ Log.w(LOG_TAG, "onPostExecute() MosesService was not running."); } } }
void function(String result) { e.postExecution(result); signal.countDown(); try { JSONObject j = new JSONObject(result); if(j.getString(STR).equals(STR)) { MosesService.getInstance().loggedOut(); MosesService.getInstance().login(); } } catch (JSONException e1) { MosesService ms = MosesService.getInstance(); if(ms != null){ if(ms.isOnline()){ Log.e(LOG_TAG, STR + e1); Toaster.showBadServerResponseToast(); } } else{ Log.w(LOG_TAG, STR); } } }
/** * Called after doInBackground and given it's result as parameter. Calls * the postExecution Method of the given Request. * * @param result The result from doInBackground */
Called after doInBackground and given it's result as parameter. Calls the postExecution Method of the given Request
onPostExecute
{ "repo_name": "ischweizer/MoSeS--Client-", "path": "moses/src/de/da_sense/moses/client/com/NetworkJSON.java", "license": "apache-2.0", "size": 6308 }
[ "android.util.Log", "de.da_sense.moses.client.service.MosesService", "de.da_sense.moses.client.util.Toaster", "org.json.JSONException", "org.json.JSONObject" ]
import android.util.Log; import de.da_sense.moses.client.service.MosesService; import de.da_sense.moses.client.util.Toaster; import org.json.JSONException; import org.json.JSONObject;
import android.util.*; import de.da_sense.moses.client.service.*; import de.da_sense.moses.client.util.*; import org.json.*;
[ "android.util", "de.da_sense.moses", "org.json" ]
android.util; de.da_sense.moses; org.json;
2,578,033
public static void normalizeRequests( List<ResourceRequest> asks, ResourceCalculator resourceCalculator, Resource clusterResource, Resource minimumResource, Resource maximumResource, Resource incrementResource) { //correct! // LOG.info("calculator.class: " + resourceCalculator.getClass().toString()); for (ResourceRequest ask : asks) { //correct! // LOG.info("resource before normalized: " + ask.getCapability()); normalizeRequest( ask, resourceCalculator, clusterResource, minimumResource, maximumResource, incrementResource); } }
static void function( List<ResourceRequest> asks, ResourceCalculator resourceCalculator, Resource clusterResource, Resource minimumResource, Resource maximumResource, Resource incrementResource) { for (ResourceRequest ask : asks) { normalizeRequest( ask, resourceCalculator, clusterResource, minimumResource, maximumResource, incrementResource); } }
/** * Utility method to normalize a list of resource requests, by insuring that * the memory for each request is a multiple of minMemory and is not zero. */
Utility method to normalize a list of resource requests, by insuring that the memory for each request is a multiple of minMemory and is not zero
normalizeRequests
{ "repo_name": "ict-carch/hadoop-plus", "path": "hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/SchedulerUtils.java", "license": "apache-2.0", "size": 8957 }
[ "java.util.List", "org.apache.hadoop.yarn.api.records.Resource", "org.apache.hadoop.yarn.api.records.ResourceRequest", "org.apache.hadoop.yarn.util.resource.ResourceCalculator" ]
import java.util.List; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.ResourceRequest; import org.apache.hadoop.yarn.util.resource.ResourceCalculator;
import java.util.*; import org.apache.hadoop.yarn.api.records.*; import org.apache.hadoop.yarn.util.resource.*;
[ "java.util", "org.apache.hadoop" ]
java.util; org.apache.hadoop;
2,116,635
private Collection<HadoopTaskInfo> mapperTasks(Iterable<HadoopInputSplit> mappers, HadoopJobMetadata meta) { UUID locNodeId = ctx.localNodeId(); HadoopJobId jobId = meta.jobId(); JobLocalState state = activeJobs.get(jobId); Collection<HadoopTaskInfo> tasks = null; if (mappers != null) { if (state == null) state = initState(jobId); for (HadoopInputSplit split : mappers) { if (state.addMapper(split)) { if (log.isDebugEnabled()) log.debug("Submitting MAP task for execution [locNodeId=" + locNodeId + ", split=" + split + ']'); HadoopTaskInfo taskInfo = new HadoopTaskInfo(MAP, jobId, meta.taskNumber(split), 0, split); if (tasks == null) tasks = new ArrayList<>(); tasks.add(taskInfo); } } } return tasks; }
Collection<HadoopTaskInfo> function(Iterable<HadoopInputSplit> mappers, HadoopJobMetadata meta) { UUID locNodeId = ctx.localNodeId(); HadoopJobId jobId = meta.jobId(); JobLocalState state = activeJobs.get(jobId); Collection<HadoopTaskInfo> tasks = null; if (mappers != null) { if (state == null) state = initState(jobId); for (HadoopInputSplit split : mappers) { if (state.addMapper(split)) { if (log.isDebugEnabled()) log.debug(STR + locNodeId + STR + split + ']'); HadoopTaskInfo taskInfo = new HadoopTaskInfo(MAP, jobId, meta.taskNumber(split), 0, split); if (tasks == null) tasks = new ArrayList<>(); tasks.add(taskInfo); } } } return tasks; }
/** * Creates mapper tasks based on job information. * * @param mappers Mapper blocks. * @param meta Job metadata. * @return Collection of created task infos or {@code null} if no mapper tasks scheduled for local node. */
Creates mapper tasks based on job information
mapperTasks
{ "repo_name": "f7753/ignite", "path": "modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/jobtracker/HadoopJobTracker.java", "license": "apache-2.0", "size": 57890 }
[ "java.util.ArrayList", "java.util.Collection", "org.apache.ignite.internal.processors.hadoop.HadoopInputSplit", "org.apache.ignite.internal.processors.hadoop.HadoopJobId", "org.apache.ignite.internal.processors.hadoop.HadoopTaskInfo" ]
import java.util.ArrayList; import java.util.Collection; import org.apache.ignite.internal.processors.hadoop.HadoopInputSplit; import org.apache.ignite.internal.processors.hadoop.HadoopJobId; import org.apache.ignite.internal.processors.hadoop.HadoopTaskInfo;
import java.util.*; import org.apache.ignite.internal.processors.hadoop.*;
[ "java.util", "org.apache.ignite" ]
java.util; org.apache.ignite;
612,061
private void mountStorage(StorageData storageData, CmrRepositoryDefinition cmrRepositoryDefinition, boolean fullyDownload, boolean compressBefore, SubMonitor subMonitor) throws StorageException, IOException, SerializationException { LocalStorageData localStorageData = new LocalStorageData(storageData); Path directory = getStoragePath(localStorageData); if (!Files.exists(directory)) { try { Files.createDirectories(directory); } catch (IOException e) { throw new StorageException("Could not create local storage directory.", e); } } if (fullyDownload) { try { subMonitor.setTaskName("Downloading storage files for storage '" + storageData.getName() + "'.."); dataRetriever.downloadAndSaveStorageFiles(cmrRepositoryDefinition, storageData, directory, compressBefore, true, subMonitor, StorageFileType.values()); downloadedStorages.add(localStorageData); localStorageData.setFullyDownloaded(true); } catch (Exception e) { deleteLocalStorageData(localStorageData, false); throw e; } } else { try { subMonitor.setTaskName("Downloading agent and indexing files for storage '" + storageData.getName() + "'.."); dataRetriever.downloadAndSaveStorageFiles(cmrRepositoryDefinition, storageData, directory, compressBefore, true, subMonitor, StorageFileType.AGENT_FILE, StorageFileType.INDEX_FILE); } catch (Exception e) { deleteLocalStorageData(localStorageData, false); throw e; } } writeLocalStorageDataToDisk(localStorageData); final String systemUserName = getSystemUsername(); try { if (null != systemUserName) { StringStorageLabel mountedByLabel = new StringStorageLabel(systemUserName, new ExploredByLabelType()); cmrRepositoryDefinition.getStorageService().addLabelToStorage(storageData, mountedByLabel, true); }
void function(StorageData storageData, CmrRepositoryDefinition cmrRepositoryDefinition, boolean fullyDownload, boolean compressBefore, SubMonitor subMonitor) throws StorageException, IOException, SerializationException { LocalStorageData localStorageData = new LocalStorageData(storageData); Path directory = getStoragePath(localStorageData); if (!Files.exists(directory)) { try { Files.createDirectories(directory); } catch (IOException e) { throw new StorageException(STR, e); } } if (fullyDownload) { try { subMonitor.setTaskName(STR + storageData.getName() + "'.."); dataRetriever.downloadAndSaveStorageFiles(cmrRepositoryDefinition, storageData, directory, compressBefore, true, subMonitor, StorageFileType.values()); downloadedStorages.add(localStorageData); localStorageData.setFullyDownloaded(true); } catch (Exception e) { deleteLocalStorageData(localStorageData, false); throw e; } } else { try { subMonitor.setTaskName(STR + storageData.getName() + "'.."); dataRetriever.downloadAndSaveStorageFiles(cmrRepositoryDefinition, storageData, directory, compressBefore, true, subMonitor, StorageFileType.AGENT_FILE, StorageFileType.INDEX_FILE); } catch (Exception e) { deleteLocalStorageData(localStorageData, false); throw e; } } writeLocalStorageDataToDisk(localStorageData); final String systemUserName = getSystemUsername(); try { if (null != systemUserName) { StringStorageLabel mountedByLabel = new StringStorageLabel(systemUserName, new ExploredByLabelType()); cmrRepositoryDefinition.getStorageService().addLabelToStorage(storageData, mountedByLabel, true); }
/** * Mounts a new storage locally. Provides option to specify if the complete download should be * performed. * * @param storageData * Storage to mount. * @param cmrRepositoryDefinition * {@link CmrRepositoryDefinition}. * @param fullyDownload * Should storage be immediately fully down-loaded. Intended for future use. * @param compressBefore * If the fullyDownalod is <code>true</code>, this parameter can define if data files * should be compressed on the fly before sent. * @param subMonitor * {@link SubMonitor} to report to. * @throws StorageException * If storage directory can not be created or storage file can not be saved. * @throws IOException * If {@link IOException} occurs. * @throws SerializationException * If {@link SerializationException} occurs. * */
Mounts a new storage locally. Provides option to specify if the complete download should be performed
mountStorage
{ "repo_name": "stefansiegl/inspectIT", "path": "inspectIT/src/info/novatec/inspectit/rcp/storage/InspectITStorageManager.java", "license": "agpl-3.0", "size": 39644 }
[ "info.novatec.inspectit.rcp.repository.CmrRepositoryDefinition", "info.novatec.inspectit.storage.LocalStorageData", "info.novatec.inspectit.storage.StorageData", "info.novatec.inspectit.storage.StorageException", "info.novatec.inspectit.storage.StorageFileType", "info.novatec.inspectit.storage.label.StringStorageLabel", "info.novatec.inspectit.storage.label.type.impl.ExploredByLabelType", "info.novatec.inspectit.storage.serializer.SerializationException", "java.io.IOException", "java.nio.file.Files", "java.nio.file.Path", "org.eclipse.core.runtime.SubMonitor" ]
import info.novatec.inspectit.rcp.repository.CmrRepositoryDefinition; import info.novatec.inspectit.storage.LocalStorageData; import info.novatec.inspectit.storage.StorageData; import info.novatec.inspectit.storage.StorageException; import info.novatec.inspectit.storage.StorageFileType; import info.novatec.inspectit.storage.label.StringStorageLabel; import info.novatec.inspectit.storage.label.type.impl.ExploredByLabelType; import info.novatec.inspectit.storage.serializer.SerializationException; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import org.eclipse.core.runtime.SubMonitor;
import info.novatec.inspectit.rcp.repository.*; import info.novatec.inspectit.storage.*; import info.novatec.inspectit.storage.label.*; import info.novatec.inspectit.storage.label.type.impl.*; import info.novatec.inspectit.storage.serializer.*; import java.io.*; import java.nio.file.*; import org.eclipse.core.runtime.*;
[ "info.novatec.inspectit", "java.io", "java.nio", "org.eclipse.core" ]
info.novatec.inspectit; java.io; java.nio; org.eclipse.core;
2,066,389
@java.lang.Deprecated public java.util.List<io.kubernetes.client.openapi.models.V1KeyToPath> getItems() { return items != null ? build(items) : null; }
@java.lang.Deprecated java.util.List<io.kubernetes.client.openapi.models.V1KeyToPath> function() { return items != null ? build(items) : null; }
/** * This method has been deprecated, please use method buildItems instead. * * @return The buildable object. */
This method has been deprecated, please use method buildItems instead
getItems
{ "repo_name": "kubernetes-client/java", "path": "fluent/src/main/java/io/kubernetes/client/openapi/models/V1ConfigMapVolumeSourceFluentImpl.java", "license": "apache-2.0", "size": 12853 }
[ "java.util.List" ]
import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
2,863,112
public List<GROUP> getGroups(List<RESOURCE> resources) { List<RESOURCE> sortedResources = new ArrayList<RESOURCE>(resources); Collections.sort(sortedResources, new SortingComparator()); Map<Long, Integer> daysMap = computeDaysForResources(sortedResources); Map<GroupAge, List<RESOURCE>> resourcesByAge = partitionPublishResourcesByAge(sortedResources, daysMap); List<List<RESOURCE>> youngGroups = partitionYoungResources(resourcesByAge.get(GroupAge.young)); List<List<RESOURCE>> mediumGroups = partitionMediumResources(resourcesByAge.get(GroupAge.medium), daysMap); List<RESOURCE> oldGroup = resourcesByAge.get(GroupAge.old); List<GROUP> resultGroups = new ArrayList<GROUP>(); for (List<RESOURCE> groupRes : youngGroups) { String name = getPublishGroupName(groupRes, GroupAge.young); resultGroups.add(createGroup(name, groupRes)); } for (List<RESOURCE> groupRes : mediumGroups) { String name = getPublishGroupName(groupRes, GroupAge.medium); resultGroups.add(createGroup(name, groupRes)); } if (!oldGroup.isEmpty()) { String oldName = getPublishGroupName(oldGroup, GroupAge.old); resultGroups.add(createGroup(oldName, oldGroup)); } return resultGroups; }
List<GROUP> function(List<RESOURCE> resources) { List<RESOURCE> sortedResources = new ArrayList<RESOURCE>(resources); Collections.sort(sortedResources, new SortingComparator()); Map<Long, Integer> daysMap = computeDaysForResources(sortedResources); Map<GroupAge, List<RESOURCE>> resourcesByAge = partitionPublishResourcesByAge(sortedResources, daysMap); List<List<RESOURCE>> youngGroups = partitionYoungResources(resourcesByAge.get(GroupAge.young)); List<List<RESOURCE>> mediumGroups = partitionMediumResources(resourcesByAge.get(GroupAge.medium), daysMap); List<RESOURCE> oldGroup = resourcesByAge.get(GroupAge.old); List<GROUP> resultGroups = new ArrayList<GROUP>(); for (List<RESOURCE> groupRes : youngGroups) { String name = getPublishGroupName(groupRes, GroupAge.young); resultGroups.add(createGroup(name, groupRes)); } for (List<RESOURCE> groupRes : mediumGroups) { String name = getPublishGroupName(groupRes, GroupAge.medium); resultGroups.add(createGroup(name, groupRes)); } if (!oldGroup.isEmpty()) { String oldName = getPublishGroupName(oldGroup, GroupAge.old); resultGroups.add(createGroup(oldName, oldGroup)); } return resultGroups; }
/** * Splits a list of resources into groups.<p> * * @param resources the list of resources * * @return the list of groups */
Splits a list of resources into groups
getGroups
{ "repo_name": "mediaworx/opencms-core", "path": "src/org/opencms/ade/publish/A_CmsPublishGroupHelper.java", "license": "lgpl-2.1", "size": 14359 }
[ "java.util.ArrayList", "java.util.Collections", "java.util.List", "java.util.Map" ]
import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map;
import java.util.*;
[ "java.util" ]
java.util;
973,315
public static String fromByteBuffer(ByteBuffer byteBuffer) { return Base64.encodeAsString(copyBytesFrom(byteBuffer)); }
static String function(ByteBuffer byteBuffer) { return Base64.encodeAsString(copyBytesFrom(byteBuffer)); }
/** * Base64 encodes the data in the specified byte buffer (from the current * position to the buffer's limit) and returns it as a base64 encoded * string. * * @param byteBuffer * The data to base64 encode and return as a string; must not be * null. * * @return The base64 encoded contents of the specified byte buffer. */
Base64 encodes the data in the specified byte buffer (from the current position to the buffer's limit) and returns it as a base64 encoded string
fromByteBuffer
{ "repo_name": "bstopp/acs-aem-commons", "path": "bundle/src/test/java/com/amazonaws/util/StringUtils.java", "license": "apache-2.0", "size": 9812 }
[ "java.nio.ByteBuffer" ]
import java.nio.ByteBuffer;
import java.nio.*;
[ "java.nio" ]
java.nio;
2,422,779
@Override public boolean isStartStopGA(GroupAddress groupAddress) { synchronized (bindingConfigs) { for (BindingConfig config : bindingConfigs.values()) { KNXBindingConfig knxConfig = (KNXBindingConfig) config; for (KNXBindingConfigItem configItem : knxConfig) { Boolean startStopBehavior = configItem.startStopMap.get(groupAddress); if (startStopBehavior != null) { return startStopBehavior; } } } } return false; }
boolean function(GroupAddress groupAddress) { synchronized (bindingConfigs) { for (BindingConfig config : bindingConfigs.values()) { KNXBindingConfig knxConfig = (KNXBindingConfig) config; for (KNXBindingConfigItem configItem : knxConfig) { Boolean startStopBehavior = configItem.startStopMap.get(groupAddress); if (startStopBehavior != null) { return startStopBehavior; } } } } return false; }
/** * Determines if the given group address is marked for start-stop dimming. * * @param groupAddress the group address to check start-stop dimming for * @returns true, if the given group address is marked for start-stop dimming, false otherwise. */
Determines if the given group address is marked for start-stop dimming
isStartStopGA
{ "repo_name": "falkena/openhab", "path": "bundles/binding/org.openhab.binding.knx/src/main/java/org/openhab/binding/knx/internal/config/KNXGenericBindingProvider.java", "license": "epl-1.0", "size": 24509 }
[ "org.openhab.core.binding.BindingConfig" ]
import org.openhab.core.binding.BindingConfig;
import org.openhab.core.binding.*;
[ "org.openhab.core" ]
org.openhab.core;
1,402,032