method
stringlengths 13
441k
| clean_method
stringlengths 7
313k
| doc
stringlengths 17
17.3k
| comment
stringlengths 3
1.42k
| method_name
stringlengths 1
273
| extra
dict | imports
sequence | imports_info
stringlengths 19
34.8k
| cluster_imports_info
stringlengths 15
3.66k
| libraries
sequence | libraries_info
stringlengths 6
661
| id
int64 0
2.92M
|
---|---|---|---|---|---|---|---|---|---|---|---|
JPanel panel;
JTextPane textPane = null;
panel = currentCodePanel();
if(panel != null){
if (panel instanceof JPanelCode){
textPane = ((JPanelCode)panel).getCodePane();
}
}
return textPane;
}
| JPanel panel; JTextPane textPane = null; panel = currentCodePanel(); if(panel != null){ if (panel instanceof JPanelCode){ textPane = ((JPanelCode)panel).getCodePane(); } } return textPane; } | /**
* Returns the current Code PaneText
* @return
*/ | Returns the current Code PaneText | currentCodePane | {
"repo_name": "stefan-rass/sunset-ffapl",
"path": "src/sunset/gui/tabbedpane/JTabbedPaneCode.java",
"license": "gpl-3.0",
"size": 2782
} | [
"javax.swing.JPanel",
"javax.swing.JTextPane"
] | import javax.swing.JPanel; import javax.swing.JTextPane; | import javax.swing.*; | [
"javax.swing"
] | javax.swing; | 2,820,017 |
@NonNull
public static DefaultConfiguration create(@NonNull LintClient client, @NonNull File lintFile) {
return new DefaultConfiguration(client, null , null , lintFile);
} | static DefaultConfiguration function(@NonNull LintClient client, @NonNull File lintFile) { return new DefaultConfiguration(client, null , null , lintFile); } | /**
* Creates a new {@link DefaultConfiguration} for the given lint config
* file, not affiliated with a project. This is used for global
* configurations.
*
* @param client the client to report errors to etc
* @param lintFile the lint file containing the configuration
* @return a new configuration
*/ | Creates a new <code>DefaultConfiguration</code> for the given lint config file, not affiliated with a project. This is used for global configurations | create | {
"repo_name": "tranleduy2000/javaide",
"path": "aosp/lint-api/src/main/java/com/android/tools/lint/client/api/DefaultConfiguration.java",
"license": "gpl-3.0",
"size": 22846
} | [
"com.android.annotations.NonNull",
"java.io.File"
] | import com.android.annotations.NonNull; import java.io.File; | import com.android.annotations.*; import java.io.*; | [
"com.android.annotations",
"java.io"
] | com.android.annotations; java.io; | 446,293 |
private static List<Building> getManufacturingBuildingsNeedingSalvageWork(List<Building> buildingList,
int skill) {
List<Building> result = new ArrayList<>();
Iterator<Building> i = buildingList.iterator();
while (i.hasNext()) {
Building building = i.next();
Manufacture manufacturingFunction = building.getManufacture();
if (manufacturingFunction.requiresSalvagingWork(skill)) {
result.add(building);
}
}
return result;
}
| static List<Building> function(List<Building> buildingList, int skill) { List<Building> result = new ArrayList<>(); Iterator<Building> i = buildingList.iterator(); while (i.hasNext()) { Building building = i.next(); Manufacture manufacturingFunction = building.getManufacture(); if (manufacturingFunction.requiresSalvagingWork(skill)) { result.add(building); } } return result; } | /**
* Gets a list of manufacturing buildings needing work from a list of buildings
* with the manufacture function.
* @param buildingList list of buildings with the manufacture function.
* @param skill the materials science skill level of the person.
* @return list of manufacture buildings needing work.
*/ | Gets a list of manufacturing buildings needing work from a list of buildings with the manufacture function | getManufacturingBuildingsNeedingSalvageWork | {
"repo_name": "mars-sim/mars-sim",
"path": "mars-sim-core/src/main/java/org/mars_sim/msp/core/person/ai/task/SalvageGood.java",
"license": "gpl-3.0",
"size": 14183
} | [
"java.util.ArrayList",
"java.util.Iterator",
"java.util.List",
"org.mars_sim.msp.core.structure.building.Building",
"org.mars_sim.msp.core.structure.building.function.Manufacture"
] | import java.util.ArrayList; import java.util.Iterator; import java.util.List; import org.mars_sim.msp.core.structure.building.Building; import org.mars_sim.msp.core.structure.building.function.Manufacture; | import java.util.*; import org.mars_sim.msp.core.structure.building.*; import org.mars_sim.msp.core.structure.building.function.*; | [
"java.util",
"org.mars_sim.msp"
] | java.util; org.mars_sim.msp; | 2,325,255 |
Connection conn = dmd.getConnection();
if (conn.getAutoCommit()) {
throw new IllegalArgumentException();
}
Statement s = dmd.getConnection().createStatement();
// Functions - not supported by JDBC meta data until JDBC 4
// Need to use the CHAR() function on A.ALIASTYPE
// so that the compare will work in any schema.
PreparedStatement psf = conn.prepareStatement("SELECT ALIAS FROM SYS.SYSALIASES A, SYS.SYSSCHEMAS S"
+ " WHERE A.SCHEMAID = S.SCHEMAID " + " AND CHAR(A.ALIASTYPE) = ? " + " AND S.SCHEMANAME = ?");
psf.setString(1, "F");
psf.setString(2, schema);
ResultSet rs = psf.executeQuery();
dropUsingDMD(s, rs, schema, "ALIAS", "FUNCTION");
rs.close();
// Procedures
rs = dmd.getProcedures((String) null, schema, (String) null);
dropUsingDMD(s, rs, schema, "PROCEDURE_NAME", "PROCEDURE");
rs.close();
// Views
rs = dmd.getTables((String) null, schema, (String) null, GET_TABLES_VIEW);
dropUsingDMD(s, rs, schema, "TABLE_NAME", "VIEW");
rs.close();
// Tables
rs = dmd.getTables((String) null, schema, (String) null, GET_TABLES_TABLE);
dropUsingDMD(s, rs, schema, "TABLE_NAME", "TABLE");
rs.close();
// At this point there may be tables left due to
// foreign key constraints leading to a dependency loop.
// Drop any constraints that remain and then drop the tables.
// If there are no tables then this should be a quick no-op.
ResultSet table_rs = dmd.getTables((String) null, schema, (String) null, GET_TABLES_TABLE);
while (table_rs.next()) {
String tablename = table_rs.getString("TABLE_NAME");
rs = dmd.getExportedKeys((String) null, schema, tablename);
while (rs.next()) {
short keyPosition = rs.getShort("KEY_SEQ");
if (keyPosition != 1)
continue;
String fkName = rs.getString("FK_NAME");
// No name, probably can't happen but couldn't drop it anyway.
if (fkName == null)
continue;
String fkSchema = rs.getString("FKTABLE_SCHEM");
String fkTable = rs.getString("FKTABLE_NAME");
String ddl = "ALTER TABLE " + DerbyCleanDbUtil.escape(fkSchema, fkTable) + " DROP FOREIGN KEY " + DerbyCleanDbUtil.escape(fkName);
s.executeUpdate(ddl);
}
rs.close();
}
table_rs.close();
conn.commit();
// Tables (again)
rs = dmd.getTables((String) null, schema, (String) null, GET_TABLES_TABLE);
dropUsingDMD(s, rs, schema, "TABLE_NAME", "TABLE");
rs.close();
// drop UDTs
psf.setString(1, "A");
psf.setString(2, schema);
rs = psf.executeQuery();
dropUsingDMD(s, rs, schema, "ALIAS", "TYPE");
rs.close();
// drop aggregates
psf.setString(1, "G");
psf.setString(2, schema);
rs = psf.executeQuery();
dropUsingDMD(s, rs, schema, "ALIAS", "DERBY AGGREGATE");
rs.close();
psf.close();
// Synonyms - need work around for DERBY-1790 where
// passing a table type of SYNONYM fails.
rs = dmd.getTables((String) null, schema, (String) null, GET_TABLES_SYNONYM);
dropUsingDMD(s, rs, schema, "TABLE_NAME", "SYNONYM");
rs.close();
// sequences
if (sysSequencesExists(conn)) {
psf = conn.prepareStatement("SELECT SEQUENCENAME FROM SYS.SYSSEQUENCES A, SYS.SYSSCHEMAS S" + " WHERE A.SCHEMAID = S.SCHEMAID "
+ " AND S.SCHEMANAME = ?");
psf.setString(1, schema);
rs = psf.executeQuery();
dropUsingDMD(s, rs, schema, "SEQUENCENAME", "SEQUENCE");
rs.close();
psf.close();
}
// Finally drop the schema if it is not APP
if (!schema.equals("APP")) {
s.executeUpdate("DROP SCHEMA " + DerbyCleanDbUtil.escape(schema) + " RESTRICT");
}
conn.commit();
s.close();
} | Connection conn = dmd.getConnection(); if (conn.getAutoCommit()) { throw new IllegalArgumentException(); } Statement s = dmd.getConnection().createStatement(); PreparedStatement psf = conn.prepareStatement(STR + STR + STR + STR); psf.setString(1, "F"); psf.setString(2, schema); ResultSet rs = psf.executeQuery(); dropUsingDMD(s, rs, schema, "ALIAS", STR); rs.close(); rs = dmd.getProcedures((String) null, schema, (String) null); dropUsingDMD(s, rs, schema, STR, STR); rs.close(); rs = dmd.getTables((String) null, schema, (String) null, GET_TABLES_VIEW); dropUsingDMD(s, rs, schema, STR, "VIEW"); rs.close(); rs = dmd.getTables((String) null, schema, (String) null, GET_TABLES_TABLE); dropUsingDMD(s, rs, schema, STR, "TABLE"); rs.close(); ResultSet table_rs = dmd.getTables((String) null, schema, (String) null, GET_TABLES_TABLE); while (table_rs.next()) { String tablename = table_rs.getString(STR); rs = dmd.getExportedKeys((String) null, schema, tablename); while (rs.next()) { short keyPosition = rs.getShort(STR); if (keyPosition != 1) continue; String fkName = rs.getString(STR); if (fkName == null) continue; String fkSchema = rs.getString(STR); String fkTable = rs.getString(STR); String ddl = STR + DerbyCleanDbUtil.escape(fkSchema, fkTable) + STR + DerbyCleanDbUtil.escape(fkName); s.executeUpdate(ddl); } rs.close(); } table_rs.close(); conn.commit(); rs = dmd.getTables((String) null, schema, (String) null, GET_TABLES_TABLE); dropUsingDMD(s, rs, schema, STR, "TABLE"); rs.close(); psf.setString(1, "A"); psf.setString(2, schema); rs = psf.executeQuery(); dropUsingDMD(s, rs, schema, "ALIAS", "TYPE"); rs.close(); psf.setString(1, "G"); psf.setString(2, schema); rs = psf.executeQuery(); dropUsingDMD(s, rs, schema, "ALIAS", STR); rs.close(); psf.close(); rs = dmd.getTables((String) null, schema, (String) null, GET_TABLES_SYNONYM); dropUsingDMD(s, rs, schema, STR, STR); rs.close(); if (sysSequencesExists(conn)) { psf = conn.prepareStatement(STR + STR + STR); psf.setString(1, schema); rs = psf.executeQuery(); dropUsingDMD(s, rs, schema, STR, STR); rs.close(); psf.close(); } if (!schema.equals("APP")) { s.executeUpdate(STR + DerbyCleanDbUtil.escape(schema) + STR); } conn.commit(); s.close(); } | /**
* APP is default schema
* Drop a database schema by dropping all objects in it and then executing DROP SCHEMA. If the schema is APP it is
* cleaned but DROP
* SCHEMA is not executed.
* way until everything can be dropped.
*
* @param dmd
* DatabaseMetaData object for database
* @param schema
* Name of the schema
* @throws java.sql.SQLException
* database error
*/ | APP is default schema Drop a database schema by dropping all objects in it and then executing DROP SCHEMA. If the schema is APP it is cleaned but DROP SCHEMA is not executed. way until everything can be dropped | dropSchema | {
"repo_name": "benfortuna/copper-engine",
"path": "projects/copper-monitoring/copper-monitoring-example/src/main/java/org/copperengine/monitoring/example/util/DerbyCleanDbUtil.java",
"license": "apache-2.0",
"size": 11815
} | [
"java.sql.Connection",
"java.sql.PreparedStatement",
"java.sql.ResultSet",
"java.sql.Statement"
] | import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.Statement; | import java.sql.*; | [
"java.sql"
] | java.sql; | 1,363,544 |
private void loadFSImage(File curFile, MD5Hash expectedMd5) throws IOException {
FSImageFormat.Loader loader = new FSImageFormat.Loader(
conf, getFSNamesystem());
loader.load(curFile);
namesystem.setBlockPoolId(this.getBlockPoolID());
// Check that the image digest we loaded matches up with what
// we expected
MD5Hash readImageMd5 = loader.getLoadedImageMd5();
if (expectedMd5 != null &&
!expectedMd5.equals(readImageMd5)) {
throw new IOException("Image file " + curFile +
" is corrupt with MD5 checksum of " + readImageMd5 +
" but expecting " + expectedMd5);
}
long txId = loader.getLoadedImageTxId();
LOG.info("Loaded image for txid " + txId + " from " + curFile);
lastAppliedTxId = txId;
storage.setMostRecentCheckpointInfo(txId, curFile.lastModified());
} | void function(File curFile, MD5Hash expectedMd5) throws IOException { FSImageFormat.Loader loader = new FSImageFormat.Loader( conf, getFSNamesystem()); loader.load(curFile); namesystem.setBlockPoolId(this.getBlockPoolID()); MD5Hash readImageMd5 = loader.getLoadedImageMd5(); if (expectedMd5 != null && !expectedMd5.equals(readImageMd5)) { throw new IOException(STR + curFile + STR + readImageMd5 + STR + expectedMd5); } long txId = loader.getLoadedImageTxId(); LOG.info(STR + txId + STR + curFile); lastAppliedTxId = txId; storage.setMostRecentCheckpointInfo(txId, curFile.lastModified()); } | /**
* Load in the filesystem image from file. It's a big list of
* filenames and blocks. Return whether we should
* "re-save" and consolidate the edit-logs
*/ | Load in the filesystem image from file. It's a big list of filenames and blocks. Return whether we should "re-save" and consolidate the edit-logs | loadFSImage | {
"repo_name": "moreus/hadoop",
"path": "hadoop-0.23.10/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImage.java",
"license": "apache-2.0",
"size": 39318
} | [
"java.io.File",
"java.io.IOException",
"org.apache.hadoop.io.MD5Hash"
] | import java.io.File; import java.io.IOException; import org.apache.hadoop.io.MD5Hash; | import java.io.*; import org.apache.hadoop.io.*; | [
"java.io",
"org.apache.hadoop"
] | java.io; org.apache.hadoop; | 1,697,138 |
StringPropertyTO repostNoteIdProperty = PropertyHelper.getPropertyTO(
target.getObjectProperties(), PropertyManagement.KEY_GROUP,
RepostNoteStoringPreProcessor.KEY_ORIGIN_NOTE_ID);
if (repostNoteIdProperty != null) {
try {
Long repostNoteId = Long.parseLong(repostNoteIdProperty.getPropertyValue());
// check for Attachment autosave property of repost, and add those attachments
StringPropertyTO repostAttachmentIds = PropertyHelper.getPropertyTO(
target.getObjectProperties(), PropertyManagement.KEY_GROUP,
RepostNoteStoringPreProcessor.KEY_ORIGIN_ATTACHMENT_IDS);
if (repostAttachmentIds != null) {
List<Long> attachmentIdsToFilter = StringHelper.getStringAsLongList(
repostAttachmentIds.getPropertyValue(),
RepostNoteStoringPreProcessor.ORIGIN_ATTACHMENT_IDS_SEPARATOR);
Collection<AttachmentData> repostAttachments = ServiceLocator.findService(
ResourceStoringManagement.class)
.getAttachmentsOfNote(repostNoteId, attachmentIdsToFilter,
getAttachmentConverter());
target.getAttachments().addAll(repostAttachments);
}
} catch (NumberFormatException e) {
LOGGER.error("ID of note to repost is not valid", e);
} catch (AuthorizationException e) {
// silently ignore the authorization exception and just don't include the
// attachments
LOGGER.debug("Current user has no access to the note to repost", e);
} catch (NotFoundException e) {
// silently ignore that the note to repost does not exist anymore and just don't
// include the attachments
LOGGER.debug("Note to repost does not exist anymore", e);
}
}
}
| StringPropertyTO repostNoteIdProperty = PropertyHelper.getPropertyTO( target.getObjectProperties(), PropertyManagement.KEY_GROUP, RepostNoteStoringPreProcessor.KEY_ORIGIN_NOTE_ID); if (repostNoteIdProperty != null) { try { Long repostNoteId = Long.parseLong(repostNoteIdProperty.getPropertyValue()); StringPropertyTO repostAttachmentIds = PropertyHelper.getPropertyTO( target.getObjectProperties(), PropertyManagement.KEY_GROUP, RepostNoteStoringPreProcessor.KEY_ORIGIN_ATTACHMENT_IDS); if (repostAttachmentIds != null) { List<Long> attachmentIdsToFilter = StringHelper.getStringAsLongList( repostAttachmentIds.getPropertyValue(), RepostNoteStoringPreProcessor.ORIGIN_ATTACHMENT_IDS_SEPARATOR); Collection<AttachmentData> repostAttachments = ServiceLocator.findService( ResourceStoringManagement.class) .getAttachmentsOfNote(repostNoteId, attachmentIdsToFilter, getAttachmentConverter()); target.getAttachments().addAll(repostAttachments); } } catch (NumberFormatException e) { LOGGER.error(STR, e); } catch (AuthorizationException e) { LOGGER.debug(STR, e); } catch (NotFoundException e) { LOGGER.debug(STR, e); } } } | /**
* If the autosave is a repost, add the attachments of the original note.
*
* @param target
* the autosave item
*/ | If the autosave is a repost, add the attachments of the original note | addRepostData | {
"repo_name": "Communote/communote-server",
"path": "communote/persistence/src/main/java/com/communote/server/core/vo/query/note/SimpleNoteListItemToAutosaveNoteDataConverter.java",
"license": "apache-2.0",
"size": 5590
} | [
"com.communote.common.string.StringHelper",
"com.communote.server.api.ServiceLocator",
"com.communote.server.api.core.attachment.AttachmentData",
"com.communote.server.api.core.common.NotFoundException",
"com.communote.server.api.core.property.PropertyHelper",
"com.communote.server.api.core.property.PropertyManagement",
"com.communote.server.api.core.property.StringPropertyTO",
"com.communote.server.api.core.security.AuthorizationException",
"com.communote.server.core.blog.notes.processors.RepostNoteStoringPreProcessor",
"com.communote.server.core.storing.ResourceStoringManagement",
"java.util.Collection",
"java.util.List"
] | import com.communote.common.string.StringHelper; import com.communote.server.api.ServiceLocator; import com.communote.server.api.core.attachment.AttachmentData; import com.communote.server.api.core.common.NotFoundException; import com.communote.server.api.core.property.PropertyHelper; import com.communote.server.api.core.property.PropertyManagement; import com.communote.server.api.core.property.StringPropertyTO; import com.communote.server.api.core.security.AuthorizationException; import com.communote.server.core.blog.notes.processors.RepostNoteStoringPreProcessor; import com.communote.server.core.storing.ResourceStoringManagement; import java.util.Collection; import java.util.List; | import com.communote.common.string.*; import com.communote.server.api.*; import com.communote.server.api.core.attachment.*; import com.communote.server.api.core.common.*; import com.communote.server.api.core.property.*; import com.communote.server.api.core.security.*; import com.communote.server.core.blog.notes.processors.*; import com.communote.server.core.storing.*; import java.util.*; | [
"com.communote.common",
"com.communote.server",
"java.util"
] | com.communote.common; com.communote.server; java.util; | 1,498,364 |
@param s the string to be unquoted.
@return an unquoted string. */
public static String unquote(String s) {
return StringEscapeUtils.unescapeJava(s);
}
| @param s the string to be unquoted. @return an unquoted string. */ static String function(String s) { return StringEscapeUtils.unescapeJava(s); } | /** Unquote the given string and replace escape sequences by the
original characters.
@param s the string to be unquoted.
@return an unquoted string. */ | Unquote the given string and replace escape sequences by the | unquote | {
"repo_name": "DaveVoorhis/Rel",
"path": "Shared/src/org/reldb/rel/utilities/StringUtils.java",
"license": "apache-2.0",
"size": 773
} | [
"org.apache.commons.text.StringEscapeUtils"
] | import org.apache.commons.text.StringEscapeUtils; | import org.apache.commons.text.*; | [
"org.apache.commons"
] | org.apache.commons; | 2,219,220 |
TreeMap<String, String> map = new TreeMap<String, String>();
map.put("A", "a");
map.put("B", "b");
map.put("C", "c");
Iterator<Entry<String, String>> iterator = map.entrySet().iterator();
Entry<String, String> entryA = iterator.next();
assertEquals("a", entryA.setValue("x"));
assertEquals("x", entryA.getValue());
assertEquals("x", map.get("A"));
Entry<String, String> entryB = iterator.next();
assertEquals("b", entryB.setValue("y"));
Entry<String, String> entryC = iterator.next();
assertEquals("c", entryC.setValue("z"));
assertEquals("y", entryB.getValue());
assertEquals("y", map.get("B"));
assertEquals("z", entryC.getValue());
assertEquals("z", map.get("C"));
} | TreeMap<String, String> map = new TreeMap<String, String>(); map.put("A", "a"); map.put("B", "b"); map.put("C", "c"); Iterator<Entry<String, String>> iterator = map.entrySet().iterator(); Entry<String, String> entryA = iterator.next(); assertEquals("a", entryA.setValue("x")); assertEquals("x", entryA.getValue()); assertEquals("x", map.get("A")); Entry<String, String> entryB = iterator.next(); assertEquals("b", entryB.setValue("y")); Entry<String, String> entryC = iterator.next(); assertEquals("c", entryC.setValue("z")); assertEquals("y", entryB.getValue()); assertEquals("y", map.get("B")); assertEquals("z", entryC.getValue()); assertEquals("z", map.get("C")); } | /**
* Test that the entrySet() method produces correctly mutable entries.
*/ | Test that the entrySet() method produces correctly mutable entries | testEntrySetSetValue | {
"repo_name": "debian-pkg-android-tools/android-platform-libcore",
"path": "luni/src/test/java/libcore/java/util/TreeMapTest.java",
"license": "gpl-2.0",
"size": 25093
} | [
"java.util.Iterator",
"java.util.Map",
"java.util.TreeMap"
] | import java.util.Iterator; import java.util.Map; import java.util.TreeMap; | import java.util.*; | [
"java.util"
] | java.util; | 761,481 |
TestSuite suite = new TestSuite ( StringUtilitiesTest.class );
return suite;
} | TestSuite suite = new TestSuite ( StringUtilitiesTest.class ); return suite; } | /**
* A <code>TestSuite</code> is a <code>Composite</code> of Tests.
* It runs a collection of test cases.
*/ | A <code>TestSuite</code> is a <code>Composite</code> of Tests. It runs a collection of test cases | suite | {
"repo_name": "janekdb/ntropa",
"path": "common/utility/src/tests/org/ntropa/utility/StringUtilitiesTest.java",
"license": "apache-2.0",
"size": 9849
} | [
"junit.framework.TestSuite"
] | import junit.framework.TestSuite; | import junit.framework.*; | [
"junit.framework"
] | junit.framework; | 264,656 |
private void setupProxy(okhttp3.OkHttpClient.Builder okHttpBuilder) {
Credentials proxyCredentials = properties.getProxyCredentials();
okHttpBuilder.proxy(new Proxy(Proxy.Type.HTTP, new InetSocketAddress(
properties.getProxyLocation().getHost(),
properties.getProxyLocation().getPort())));
if (proxyCredentials != null) {
if (proxyCredentials.isNtlm()) {
okHttpBuilder
.proxyAuthenticator(new NtlmAuthenticator(
proxyCredentials.getUserName(),
proxyCredentials.getPassPhrase(),
proxyCredentials.getDomain(),
proxyCredentials.getWorkstation()));
} else {
okHttpBuilder
.proxyAuthenticator((route, response) -> response.request().newBuilder()
.header("Connection","close")
.header(PROXY_AUTHORIZATION_HEADER, okhttp3.Credentials.basic(
proxyCredentials.getUserName(),
proxyCredentials.getPassPhrase()))
.build());
}
}
} | void function(okhttp3.OkHttpClient.Builder okHttpBuilder) { Credentials proxyCredentials = properties.getProxyCredentials(); okHttpBuilder.proxy(new Proxy(Proxy.Type.HTTP, new InetSocketAddress( properties.getProxyLocation().getHost(), properties.getProxyLocation().getPort()))); if (proxyCredentials != null) { if (proxyCredentials.isNtlm()) { okHttpBuilder .proxyAuthenticator(new NtlmAuthenticator( proxyCredentials.getUserName(), proxyCredentials.getPassPhrase(), proxyCredentials.getDomain(), proxyCredentials.getWorkstation())); } else { okHttpBuilder .proxyAuthenticator((route, response) -> response.request().newBuilder() .header(STR,"close") .header(PROXY_AUTHORIZATION_HEADER, okhttp3.Credentials.basic( proxyCredentials.getUserName(), proxyCredentials.getPassPhrase())) .build()); } } } | /**
* Sets up an NTLM proxy or a regular proxy based on credential types.
*
* @param okHttpBuilder
*/ | Sets up an NTLM proxy or a regular proxy based on credential types | setupProxy | {
"repo_name": "data-integrations/anaplan",
"path": "src/main/java/com/anaplan/client/transport/AnaplanApiProvider.java",
"license": "apache-2.0",
"size": 7236
} | [
"com.anaplan.client.auth.Credentials",
"java.net.InetSocketAddress",
"java.net.Proxy"
] | import com.anaplan.client.auth.Credentials; import java.net.InetSocketAddress; import java.net.Proxy; | import com.anaplan.client.auth.*; import java.net.*; | [
"com.anaplan.client",
"java.net"
] | com.anaplan.client; java.net; | 2,736,352 |
protected static final int selectColorFormat(final MediaCodecInfo codecInfo, final String mimeType) {
if (DEBUG) Log.i(TAG, "selectColorFormat: ");
int result = 0;
final MediaCodecInfo.CodecCapabilities caps;
try {
Thread.currentThread().setPriority(Thread.MAX_PRIORITY);
caps = codecInfo.getCapabilitiesForType(mimeType);
} finally {
Thread.currentThread().setPriority(Thread.NORM_PRIORITY);
}
int colorFormat;
for (int i = 0; i < caps.colorFormats.length; i++) {
colorFormat = caps.colorFormats[i];
if (isRecognizedViewoFormat(colorFormat)) {
if (result == 0)
result = colorFormat;
break;
}
}
if (result == 0)
Log.e(TAG, "couldn't find a good color format for " + codecInfo.getName() + " / " + mimeType);
return result;
}
protected static int[] recognizedFormats;
static {
recognizedFormats = new int[] {
// MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar,
// MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
// MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface,
};
} | static final int function(final MediaCodecInfo codecInfo, final String mimeType) { if (DEBUG) Log.i(TAG, STR); int result = 0; final MediaCodecInfo.CodecCapabilities caps; try { Thread.currentThread().setPriority(Thread.MAX_PRIORITY); caps = codecInfo.getCapabilitiesForType(mimeType); } finally { Thread.currentThread().setPriority(Thread.NORM_PRIORITY); } int colorFormat; for (int i = 0; i < caps.colorFormats.length; i++) { colorFormat = caps.colorFormats[i]; if (isRecognizedViewoFormat(colorFormat)) { if (result == 0) result = colorFormat; break; } } if (result == 0) Log.e(TAG, STR + codecInfo.getName() + STR + mimeType); return result; } protected static int[] recognizedFormats; static { recognizedFormats = new int[] { MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface, }; } | /**
* select color format available on specific codec and we can use.
* @return 0 if no colorFormat is matched
*/ | select color format available on specific codec and we can use | selectColorFormat | {
"repo_name": "itsnothingg/EasyScreenRecorder",
"path": "library/src/main/java/com/choiintack/easyscreenrecorder/encoder/MediaVideoEncoderBase.java",
"license": "apache-2.0",
"size": 7216
} | [
"android.media.MediaCodecInfo",
"android.util.Log"
] | import android.media.MediaCodecInfo; import android.util.Log; | import android.media.*; import android.util.*; | [
"android.media",
"android.util"
] | android.media; android.util; | 138,883 |
public boolean setTemplate(String wikiId, boolean value)
{
XWikiContext context = xcontextProvider.get();
try {
// Check if the current script has the programing rights
authorizationManager.checkAccess(Right.PROGRAM, context.getDoc().getAuthorReference(),
context.getDoc().getDocumentReference());
// Get the descriptor
WikiDescriptor descriptor = wikiDescriptorManager.getById(wikiId);
// Get the wiki owner
String owner = descriptor.getOwnerId();
// Check right access
WikiReference wikiReference = new WikiReference(descriptor.getId());
String currentUser = entityReferenceSerializer.serialize(context.getUserReference());
if (!currentUser.equals(owner)) {
authorizationManager.checkAccess(Right.ADMIN, context.getUserReference(), wikiReference);
}
// Do the job
wikiTemplateManager.setTemplate(wikiId, value);
// Return success
return true;
} catch (WikiTemplateManagerException e) {
error(String.format("Failed to set the template value [%s] for the wiki [%s].", value, wikiId), e);
return false;
} catch (AccessDeniedException e) {
error(String.format("Access denied for [%s] to change the template value of the wiki [%s]. The user has"
+ " not the right to perform this operation or the script has not the programming right.",
context.getUserReference(), wikiId), e);
return false;
} catch (WikiManagerException e) {
error(String.format("Failed to get the descriptor of the wiki [%s].", wikiId), e);
return false;
}
} | boolean function(String wikiId, boolean value) { XWikiContext context = xcontextProvider.get(); try { authorizationManager.checkAccess(Right.PROGRAM, context.getDoc().getAuthorReference(), context.getDoc().getDocumentReference()); WikiDescriptor descriptor = wikiDescriptorManager.getById(wikiId); String owner = descriptor.getOwnerId(); WikiReference wikiReference = new WikiReference(descriptor.getId()); String currentUser = entityReferenceSerializer.serialize(context.getUserReference()); if (!currentUser.equals(owner)) { authorizationManager.checkAccess(Right.ADMIN, context.getUserReference(), wikiReference); } wikiTemplateManager.setTemplate(wikiId, value); return true; } catch (WikiTemplateManagerException e) { error(String.format(STR, value, wikiId), e); return false; } catch (AccessDeniedException e) { error(String.format(STR + STR, context.getUserReference(), wikiId), e); return false; } catch (WikiManagerException e) { error(String.format(STR, wikiId), e); return false; } } | /**
* Set if the specified wiki is a template or not.
*
* @param wikiId the ID of the wiki to specify
* @param value whether or not the wiki is a template
* @return true if the action succeed
*/ | Set if the specified wiki is a template or not | setTemplate | {
"repo_name": "xwiki/xwiki-platform",
"path": "xwiki-platform-core/xwiki-platform-wiki/xwiki-platform-wiki-template/xwiki-platform-wiki-template-script/src/main/java/org/xwiki/wiki/template/script/WikiTemplateManagerScript.java",
"license": "lgpl-2.1",
"size": 10039
} | [
"com.xpn.xwiki.XWikiContext",
"org.xwiki.model.reference.WikiReference",
"org.xwiki.security.authorization.AccessDeniedException",
"org.xwiki.security.authorization.Right",
"org.xwiki.wiki.descriptor.WikiDescriptor",
"org.xwiki.wiki.manager.WikiManagerException",
"org.xwiki.wiki.template.WikiTemplateManagerException"
] | import com.xpn.xwiki.XWikiContext; import org.xwiki.model.reference.WikiReference; import org.xwiki.security.authorization.AccessDeniedException; import org.xwiki.security.authorization.Right; import org.xwiki.wiki.descriptor.WikiDescriptor; import org.xwiki.wiki.manager.WikiManagerException; import org.xwiki.wiki.template.WikiTemplateManagerException; | import com.xpn.xwiki.*; import org.xwiki.model.reference.*; import org.xwiki.security.authorization.*; import org.xwiki.wiki.descriptor.*; import org.xwiki.wiki.manager.*; import org.xwiki.wiki.template.*; | [
"com.xpn.xwiki",
"org.xwiki.model",
"org.xwiki.security",
"org.xwiki.wiki"
] | com.xpn.xwiki; org.xwiki.model; org.xwiki.security; org.xwiki.wiki; | 2,417,184 |
public Object readRemote()
throws IOException
{
String type = readType();
String url = readString();
return resolveRemote(type, url);
} | Object function() throws IOException { String type = readType(); String url = readString(); return resolveRemote(type, url); } | /**
* Reads a remote object.
*/ | Reads a remote object | readRemote | {
"repo_name": "roidelapluie/yajsw",
"path": "src/hessian/src/main/java/com/caucho/hessian4/io/Hessian2Input.java",
"license": "lgpl-2.1",
"size": 66457
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 760,468 |
void setBehindContentView(View view, ViewGroup.LayoutParams layoutParams); | void setBehindContentView(View view, ViewGroup.LayoutParams layoutParams); | /**
* Set the behind view content to an explicit view. This view is placed directly into the behind view 's view hierarchy.
* It can itself be a complex view hierarchy.
*
* @param view The desired content to display.
* @param layoutParams Layout parameters for the view.
*/ | Set the behind view content to an explicit view. This view is placed directly into the behind view 's view hierarchy. It can itself be a complex view hierarchy | setBehindContentView | {
"repo_name": "gintechsystems/SlideOutMenu-Android",
"path": "slideoutmenu/src/main/java/com/gintechsystems/slideoutmenu/SlideOutMenuBase.java",
"license": "mit",
"size": 2481
} | [
"android.view.View",
"android.view.ViewGroup"
] | import android.view.View; import android.view.ViewGroup; | import android.view.*; | [
"android.view"
] | android.view; | 2,080,373 |
public void setAllCharacterization(Map<String, NXcharacterization> characterization);
| void function(Map<String, NXcharacterization> characterization); | /**
* Set multiple child nodes of a particular type.
* <ul>
* <li></li>
* </ul>
*
* @param characterization the child nodes to add
*/ | Set multiple child nodes of a particular type. | setAllCharacterization | {
"repo_name": "xen-0/dawnsci",
"path": "org.eclipse.dawnsci.nexus/autogen/org/eclipse/dawnsci/nexus/NXsubentry.java",
"license": "epl-1.0",
"size": 30808
} | [
"java.util.Map"
] | import java.util.Map; | import java.util.*; | [
"java.util"
] | java.util; | 2,824,275 |
protected Set<D> computeCallFlowFunction
(FlowFunction<D> callFlowFunction, D d1, D d2) {
return callFlowFunction.computeTargets(d2);
} | Set<D> function (FlowFunction<D> callFlowFunction, D d1, D d2) { return callFlowFunction.computeTargets(d2); } | /**
* Computes the call flow function for the given call-site abstraction
* @param callFlowFunction The call flow function to compute
* @param d1 The abstraction at the current method's start node.
* @param d2 The abstraction at the call site
* @return The set of caller-side abstractions at the callee's start node
*/ | Computes the call flow function for the given call-site abstraction | computeCallFlowFunction | {
"repo_name": "johspaeth/heros",
"path": "src/heros/solver/IDESolver.java",
"license": "lgpl-2.1",
"size": 33137
} | [
"java.util.Set"
] | import java.util.Set; | import java.util.*; | [
"java.util"
] | java.util; | 1,098,256 |
public void delete(Serializable objs) {
Object objMerged = getSession().merge(objs);
getSession().delete(objMerged);
flush();
} | void function(Serializable objs) { Object objMerged = getSession().merge(objs); getSession().delete(objMerged); flush(); } | /**
* Metodo Responsavel por excluir um conjunto de entidades persistentes da
* base de dados
*
* @param objs
* Objetos a serem removidos da base de dados
*/ | Metodo Responsavel por excluir um conjunto de entidades persistentes da base de dados | delete | {
"repo_name": "opensingular/singular-core",
"path": "flow/core/src/main/java/org/opensingular/flow/persistence/entity/util/SessionWrapper.java",
"license": "apache-2.0",
"size": 10005
} | [
"java.io.Serializable"
] | import java.io.Serializable; | import java.io.*; | [
"java.io"
] | java.io; | 1,196,270 |
public void saveLegacyOIVImage(FSNamesystem source, String targetDir,
Canceler canceler) throws IOException {
FSImageCompression compression =
FSImageCompression.createCompression(conf);
long txid = getLastAppliedOrWrittenTxId();
SaveNamespaceContext ctx = new SaveNamespaceContext(source, txid,
canceler);
FSImageFormat.Saver saver = new FSImageFormat.Saver(ctx);
String imageFileName = NNStorage.getLegacyOIVImageFileName(txid);
File imageFile = new File(targetDir, imageFileName);
saver.save(imageFile, compression);
archivalManager.purgeOldLegacyOIVImages(targetDir, txid);
}
private class FSImageSaver implements Runnable {
private final SaveNamespaceContext context;
private final StorageDirectory sd;
private final NameNodeFile nnf;
public FSImageSaver(SaveNamespaceContext context, StorageDirectory sd,
NameNodeFile nnf) {
this.context = context;
this.sd = sd;
this.nnf = nnf;
} | void function(FSNamesystem source, String targetDir, Canceler canceler) throws IOException { FSImageCompression compression = FSImageCompression.createCompression(conf); long txid = getLastAppliedOrWrittenTxId(); SaveNamespaceContext ctx = new SaveNamespaceContext(source, txid, canceler); FSImageFormat.Saver saver = new FSImageFormat.Saver(ctx); String imageFileName = NNStorage.getLegacyOIVImageFileName(txid); File imageFile = new File(targetDir, imageFileName); saver.save(imageFile, compression); archivalManager.purgeOldLegacyOIVImages(targetDir, txid); } private class FSImageSaver implements Runnable { private final SaveNamespaceContext context; private final StorageDirectory sd; private final NameNodeFile nnf; public FSImageSaver(SaveNamespaceContext context, StorageDirectory sd, NameNodeFile nnf) { this.context = context; this.sd = sd; this.nnf = nnf; } | /**
* Save FSimage in the legacy format. This is not for NN consumption,
* but for tools like OIV.
*/ | Save FSimage in the legacy format. This is not for NN consumption, but for tools like OIV | saveLegacyOIVImage | {
"repo_name": "Reidddddd/mo-hadoop2.6.0",
"path": "hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImage.java",
"license": "apache-2.0",
"size": 60060
} | [
"java.io.File",
"java.io.IOException",
"org.apache.hadoop.hdfs.server.common.Storage",
"org.apache.hadoop.hdfs.server.namenode.NNStorage",
"org.apache.hadoop.hdfs.util.Canceler"
] | import java.io.File; import java.io.IOException; import org.apache.hadoop.hdfs.server.common.Storage; import org.apache.hadoop.hdfs.server.namenode.NNStorage; import org.apache.hadoop.hdfs.util.Canceler; | import java.io.*; import org.apache.hadoop.hdfs.server.common.*; import org.apache.hadoop.hdfs.server.namenode.*; import org.apache.hadoop.hdfs.util.*; | [
"java.io",
"org.apache.hadoop"
] | java.io; org.apache.hadoop; | 2,292,638 |
private void cloneRegion(final Path regionDir, final RegionInfo snapshotRegionInfo,
final SnapshotRegionManifest manifest) throws IOException {
final String tableName = tableDesc.getTableName().getNameAsString();
final String snapshotName = snapshotDesc.getName();
for (SnapshotRegionManifest.FamilyFiles familyFiles: manifest.getFamilyFilesList()) {
Path familyDir = new Path(regionDir, familyFiles.getFamilyName().toStringUtf8());
for (SnapshotRegionManifest.StoreFile storeFile: familyFiles.getStoreFilesList()) {
LOG.info("Adding HFileLink " + storeFile.getName() +" from cloned region "
+ "in snapshot " + snapshotName + " to table=" + tableName);
restoreStoreFile(familyDir, snapshotRegionInfo, storeFile, createBackRefs);
}
}
} | void function(final Path regionDir, final RegionInfo snapshotRegionInfo, final SnapshotRegionManifest manifest) throws IOException { final String tableName = tableDesc.getTableName().getNameAsString(); final String snapshotName = snapshotDesc.getName(); for (SnapshotRegionManifest.FamilyFiles familyFiles: manifest.getFamilyFilesList()) { Path familyDir = new Path(regionDir, familyFiles.getFamilyName().toStringUtf8()); for (SnapshotRegionManifest.StoreFile storeFile: familyFiles.getStoreFilesList()) { LOG.info(STR + storeFile.getName() +STR + STR + snapshotName + STR + tableName); restoreStoreFile(familyDir, snapshotRegionInfo, storeFile, createBackRefs); } } } | /**
* Clone region directory content from the snapshot info.
*
* Each region is encoded with the table name, so the cloned region will have
* a different region name.
*
* Instead of copying the hfiles a HFileLink is created.
*
* @param regionDir {@link Path} cloned dir
* @param snapshotRegionInfo
*/ | Clone region directory content from the snapshot info. Each region is encoded with the table name, so the cloned region will have a different region name. Instead of copying the hfiles a HFileLink is created | cloneRegion | {
"repo_name": "ChinmaySKulkarni/hbase",
"path": "hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java",
"license": "apache-2.0",
"size": 36257
} | [
"java.io.IOException",
"org.apache.hadoop.fs.Path",
"org.apache.hadoop.hbase.client.RegionInfo",
"org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos"
] | import java.io.IOException; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos; | import java.io.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.hbase.client.*; import org.apache.hadoop.hbase.shaded.protobuf.generated.*; | [
"java.io",
"org.apache.hadoop"
] | java.io; org.apache.hadoop; | 2,644,670 |
public boolean writeToNBTOptional(NBTTagCompound tagCompund)
{
String s = this.getEntityString();
if (!this.isDead && s != null && this.riddenByEntity == null)
{
tagCompund.setString("id", s);
this.writeToNBT(tagCompund);
return true;
}
else
{
return false;
}
} | boolean function(NBTTagCompound tagCompund) { String s = this.getEntityString(); if (!this.isDead && s != null && this.riddenByEntity == null) { tagCompund.setString("id", s); this.writeToNBT(tagCompund); return true; } else { return false; } } | /**
* Either write this entity to the NBT tag given and return true, or return false without doing anything. If this
* returns false the entity is not saved on disk. Ridden entities return false here as they are saved with their
* rider.
*/ | Either write this entity to the NBT tag given and return true, or return false without doing anything. If this returns false the entity is not saved on disk. Ridden entities return false here as they are saved with their rider | writeToNBTOptional | {
"repo_name": "SkidJava/BaseClient",
"path": "new_1.8.8/net/minecraft/entity/Entity.java",
"license": "gpl-2.0",
"size": 87662
} | [
"net.minecraft.nbt.NBTTagCompound"
] | import net.minecraft.nbt.NBTTagCompound; | import net.minecraft.nbt.*; | [
"net.minecraft.nbt"
] | net.minecraft.nbt; | 1,785,052 |
void setAugmentingSchemaNode(YangSchemaNode schemaNode); | void setAugmentingSchemaNode(YangSchemaNode schemaNode); | /**
* Sets the YangSchemaNode of augmenting application root node.
*
* @param schemaNode YangSchemaNode of augmenting application module
*/ | Sets the YangSchemaNode of augmenting application root node | setAugmentingSchemaNode | {
"repo_name": "donNewtonAlpha/onos",
"path": "apps/yms/app/src/main/java/org/onosproject/yms/app/ydt/AugmentAppData.java",
"license": "apache-2.0",
"size": 1236
} | [
"org.onosproject.yangutils.datamodel.YangSchemaNode"
] | import org.onosproject.yangutils.datamodel.YangSchemaNode; | import org.onosproject.yangutils.datamodel.*; | [
"org.onosproject.yangutils"
] | org.onosproject.yangutils; | 752,267 |
TokenStream stream = new WhitespaceTokenizer(TEST_VERSION_CURRENT, new StringReader(
"\u0130STANBUL \u0130ZM\u0130R ISPARTA"));
TurkishLowerCaseFilter filter = new TurkishLowerCaseFilter(stream);
assertTokenStreamContents(filter, new String[] {"istanbul", "izmir",
"\u0131sparta",});
} | TokenStream stream = new WhitespaceTokenizer(TEST_VERSION_CURRENT, new StringReader( STR)); TurkishLowerCaseFilter filter = new TurkishLowerCaseFilter(stream); assertTokenStreamContents(filter, new String[] {STR, "izmir", STR,}); } | /**
* Test composed forms
*/ | Test composed forms | testTurkishLowerCaseFilter | {
"repo_name": "tokee/lucene",
"path": "contrib/analyzers/common/src/test/org/apache/lucene/analysis/tr/TestTurkishLowerCaseFilter.java",
"license": "apache-2.0",
"size": 2607
} | [
"java.io.StringReader",
"org.apache.lucene.analysis.TokenStream",
"org.apache.lucene.analysis.WhitespaceTokenizer"
] | import java.io.StringReader; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.WhitespaceTokenizer; | import java.io.*; import org.apache.lucene.analysis.*; | [
"java.io",
"org.apache.lucene"
] | java.io; org.apache.lucene; | 1,779,084 |
public CmsClientSitemapEntry getEntry(String entryPath) {
return m_entriesByPath.get(entryPath);
} | CmsClientSitemapEntry function(String entryPath) { return m_entriesByPath.get(entryPath); } | /**
* Returns the tree entry with the given path.<p>
*
* @param entryPath the path to look for
*
* @return the tree entry with the given path, or <code>null</code> if not found
*/ | Returns the tree entry with the given path | getEntry | {
"repo_name": "mediaworx/opencms-core",
"path": "src-gwt/org/opencms/ade/sitemap/client/control/CmsSitemapController.java",
"license": "lgpl-2.1",
"size": 74390
} | [
"org.opencms.ade.sitemap.shared.CmsClientSitemapEntry"
] | import org.opencms.ade.sitemap.shared.CmsClientSitemapEntry; | import org.opencms.ade.sitemap.shared.*; | [
"org.opencms.ade"
] | org.opencms.ade; | 2,671,987 |
@Generated
@CVariable()
@MappedReturn(ObjCStringMapper.class)
public static native String NSURLIsUbiquitousItemKey(); | @CVariable() @MappedReturn(ObjCStringMapper.class) static native String function(); | /**
* true if this item is synced to the cloud, false if it is only a local file. (Read-only, value type boolean NSNumber)
*/ | true if this item is synced to the cloud, false if it is only a local file. (Read-only, value type boolean NSNumber) | NSURLIsUbiquitousItemKey | {
"repo_name": "multi-os-engine/moe-core",
"path": "moe.apple/moe.platform.ios/src/main/java/apple/foundation/c/Foundation.java",
"license": "apache-2.0",
"size": 156135
} | [
"org.moe.natj.c.ann.CVariable",
"org.moe.natj.general.ann.MappedReturn",
"org.moe.natj.objc.map.ObjCStringMapper"
] | import org.moe.natj.c.ann.CVariable; import org.moe.natj.general.ann.MappedReturn; import org.moe.natj.objc.map.ObjCStringMapper; | import org.moe.natj.c.ann.*; import org.moe.natj.general.ann.*; import org.moe.natj.objc.map.*; | [
"org.moe.natj"
] | org.moe.natj; | 2,658,032 |
protected void diffTypes()
{
report(50, "Type definitions");
TypesDefinition diffTypeDefs = factory.createTypesDefinition();
diffSol.setTypes(diffTypeDefs); | void function() { report(50, STR); TypesDefinition diffTypeDefs = factory.createTypesDefinition(); diffSol.setTypes(diffTypeDefs); | /**
* Things to diff within types:
* Properties
* Visible fields / editability / relevance (these can be done in the same diff)
* State transitions / mandatory fields (need to be done in one lump)
* Domain declarations
*/ | Things to diff within types: Properties Visible fields / editability / relevance (these can be done in the same diff) State transitions / mandatory fields (need to be done in one lump) Domain declarations | diffTypes | {
"repo_name": "PTC-ALM/TIF",
"path": "src/com/ptc/tifworkbench/model/SolutionDifferencer.java",
"license": "mit",
"size": 16511
} | [
"com.ptc.tifworkbench.jaxbbinding.TypesDefinition"
] | import com.ptc.tifworkbench.jaxbbinding.TypesDefinition; | import com.ptc.tifworkbench.jaxbbinding.*; | [
"com.ptc.tifworkbench"
] | com.ptc.tifworkbench; | 1,674,591 |
public void readFrom(StreamInput in) throws IOException {
nodeId = in.readOptionalString();
state = State.fromValue(in.readByte());
reason = in.readOptionalString();
} | void function(StreamInput in) throws IOException { nodeId = in.readOptionalString(); state = State.fromValue(in.readByte()); reason = in.readOptionalString(); } | /**
* Reads restore status from stream input
*
* @param in stream input
* @throws IOException
*/ | Reads restore status from stream input | readFrom | {
"repo_name": "corochoone/elasticsearch",
"path": "src/main/java/org/elasticsearch/cluster/metadata/RestoreMetaData.java",
"license": "apache-2.0",
"size": 16113
} | [
"java.io.IOException",
"org.elasticsearch.common.io.stream.StreamInput"
] | import java.io.IOException; import org.elasticsearch.common.io.stream.StreamInput; | import java.io.*; import org.elasticsearch.common.io.stream.*; | [
"java.io",
"org.elasticsearch.common"
] | java.io; org.elasticsearch.common; | 695,489 |
@Override
public Object clone() throws CloneNotSupportedException {
XYShapeRenderer clone = (XYShapeRenderer) super.clone();
if (this.paintScale instanceof PublicCloneable) {
PublicCloneable pc = (PublicCloneable) this.paintScale;
clone.paintScale = (PaintScale) pc.clone();
}
return clone;
}
| Object function() throws CloneNotSupportedException { XYShapeRenderer clone = (XYShapeRenderer) super.clone(); if (this.paintScale instanceof PublicCloneable) { PublicCloneable pc = (PublicCloneable) this.paintScale; clone.paintScale = (PaintScale) pc.clone(); } return clone; } | /**
* Returns a clone of this renderer.
*
* @return A clone of this renderer.
*
* @throws CloneNotSupportedException if there is a problem creating the
* clone.
*/ | Returns a clone of this renderer | clone | {
"repo_name": "simon04/jfreechart",
"path": "src/main/java/org/jfree/chart/renderer/xy/XYShapeRenderer.java",
"license": "lgpl-2.1",
"size": 20956
} | [
"org.jfree.chart.renderer.PaintScale",
"org.jfree.util.PublicCloneable"
] | import org.jfree.chart.renderer.PaintScale; import org.jfree.util.PublicCloneable; | import org.jfree.chart.renderer.*; import org.jfree.util.*; | [
"org.jfree.chart",
"org.jfree.util"
] | org.jfree.chart; org.jfree.util; | 2,341,100 |
public synchronized void connect(BluetoothDevice device, boolean secure) {
if (D) Log.d(TAG, "connect to: " + device);
// Cancel any thread attempting to make a connection
if (mState == STATE_CONNECTING) {
if (mConnectThread != null) {mConnectThread.cancel(); mConnectThread = null;}
}
// Cancel any thread currently running a connection
if (mConnectedThread != null) {mConnectedThread.cancel(); mConnectedThread = null;}
// Start the thread to connect with the given device
mConnectThread = new ConnectThread(device, secure);
mConnectThread.start();
setState(STATE_CONNECTING);
} | synchronized void function(BluetoothDevice device, boolean secure) { if (D) Log.d(TAG, STR + device); if (mState == STATE_CONNECTING) { if (mConnectThread != null) {mConnectThread.cancel(); mConnectThread = null;} } if (mConnectedThread != null) {mConnectedThread.cancel(); mConnectedThread = null;} mConnectThread = new ConnectThread(device, secure); mConnectThread.start(); setState(STATE_CONNECTING); } | /**
* Start the ConnectThread to initiate a connection to a remote device.
* @param device The BluetoothDevice to connect
* @param secure Socket Security type - Secure (true) , Insecure (false)
*/ | Start the ConnectThread to initiate a connection to a remote device | connect | {
"repo_name": "tmator/Smart-speaking-with-tooth",
"path": "testbt/plugins/com.megster.cordova.bluetoothserial/src/android/com/megster/cordova/BluetoothSerialService.java",
"license": "gpl-2.0",
"size": 18928
} | [
"android.bluetooth.BluetoothDevice",
"android.util.Log"
] | import android.bluetooth.BluetoothDevice; import android.util.Log; | import android.bluetooth.*; import android.util.*; | [
"android.bluetooth",
"android.util"
] | android.bluetooth; android.util; | 2,406,315 |
@Override
public long skip(final long ln) throws IOException {
try {
return this.in.skip(ln);
} catch (final IOException e) {
this.handleIOException(e);
return 0;
}
} | long function(final long ln) throws IOException { try { return this.in.skip(ln); } catch (final IOException e) { this.handleIOException(e); return 0; } } | /**
* Invokes the delegate's <code>skip(long)</code> method.
* @param ln the number of bytes to skip
* @return the actual number of bytes skipped
* @throws IOException if an I/O error occurs
*/ | Invokes the delegate's <code>skip(long)</code> method | skip | {
"repo_name": "stereokrauts/stereoscope",
"path": "org.apache.commons.io/src/main/java/org/apache/commons/io/input/ProxyInputStream.java",
"license": "gpl-2.0",
"size": 7902
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 1,371,809 |
public int hashCode() {
int h = 0;
for( int j = size, i = 0, t = 0; j-- != 0; ) {
while( ! used[ i ] ) i++;
t = (key[ i ]);
t ^= it.unimi.dsi.fastutil.HashCommon.double2int(value[ i ]);
h += t;
i++;
}
return h;
} | int function() { int h = 0; for( int j = size, i = 0, t = 0; j-- != 0; ) { while( ! used[ i ] ) i++; t = (key[ i ]); t ^= it.unimi.dsi.fastutil.HashCommon.double2int(value[ i ]); h += t; i++; } return h; } | /** Returns a hash code for this map.
*
* This method overrides the generic method provided by the superclass.
* Since <code>equals()</code> is not overriden, it is important
* that the value returned by this method is the same value as
* the one returned by the overriden method.
*
* @return a hash code for this map.
*/ | Returns a hash code for this map. This method overrides the generic method provided by the superclass. Since <code>equals()</code> is not overriden, it is important that the value returned by this method is the same value as the one returned by the overriden method | hashCode | {
"repo_name": "karussell/fastutil",
"path": "src/it/unimi/dsi/fastutil/ints/Int2DoubleLinkedOpenHashMap.java",
"license": "apache-2.0",
"size": 48564
} | [
"it.unimi.dsi.fastutil.HashCommon"
] | import it.unimi.dsi.fastutil.HashCommon; | import it.unimi.dsi.fastutil.*; | [
"it.unimi.dsi"
] | it.unimi.dsi; | 1,372,874 |
public void frameworkSetBCContextEntries(List<ContextEntry> bcEntries);
| void function(List<ContextEntry> bcEntries); | /**
* INTERNAL FRAMEWORK METHOD!
* <p>
* Sets the business context entries on this IUserActivityLogger.
* <p>
* This method is called in a few carefully selected places only.
* <p>
* PS: The context entries are used to make safety checks to ensure
* the business path matches the ResourceableInfos.
* It is likely that this will become redundant (i.e. overkill) at
* some point and that we'll get rid of this
* <p>
* @param wControl
*/ | INTERNAL FRAMEWORK METHOD! Sets the business context entries on this IUserActivityLogger. This method is called in a few carefully selected places only. the business path matches the ResourceableInfos. It is likely that this will become redundant (i.e. overkill) at some point and that we'll get rid of this | frameworkSetBCContextEntries | {
"repo_name": "stevenhva/InfoLearn_OpenOLAT",
"path": "src/main/java/org/olat/core/logging/activity/IUserActivityLogger.java",
"license": "apache-2.0",
"size": 7008
} | [
"java.util.List",
"org.olat.core.id.context.ContextEntry"
] | import java.util.List; import org.olat.core.id.context.ContextEntry; | import java.util.*; import org.olat.core.id.context.*; | [
"java.util",
"org.olat.core"
] | java.util; org.olat.core; | 1,364,219 |
Collection<ShardingSphereRule> result = new LinkedList<>();
for (Entry<RuleConfiguration, GlobalRuleBuilder> entry : getRuleBuilderMap(globalRuleConfigs).entrySet()) {
result.add(entry.getValue().build(entry.getKey(), metaDataMap));
}
return result;
} | Collection<ShardingSphereRule> result = new LinkedList<>(); for (Entry<RuleConfiguration, GlobalRuleBuilder> entry : getRuleBuilderMap(globalRuleConfigs).entrySet()) { result.add(entry.getValue().build(entry.getKey(), metaDataMap)); } return result; } | /**
* Build rules.
*
* @param globalRuleConfigs global rule configurations
* @param metaDataMap meta data map
* @return built rules
*/ | Build rules | buildRules | {
"repo_name": "apache/incubator-shardingsphere",
"path": "shardingsphere-infra/shardingsphere-infra-common/src/main/java/org/apache/shardingsphere/infra/rule/builder/global/GlobalRulesBuilder.java",
"license": "apache-2.0",
"size": 4200
} | [
"java.util.Collection",
"java.util.LinkedList",
"java.util.Map",
"org.apache.shardingsphere.infra.config.RuleConfiguration",
"org.apache.shardingsphere.infra.rule.ShardingSphereRule"
] | import java.util.Collection; import java.util.LinkedList; import java.util.Map; import org.apache.shardingsphere.infra.config.RuleConfiguration; import org.apache.shardingsphere.infra.rule.ShardingSphereRule; | import java.util.*; import org.apache.shardingsphere.infra.config.*; import org.apache.shardingsphere.infra.rule.*; | [
"java.util",
"org.apache.shardingsphere"
] | java.util; org.apache.shardingsphere; | 2,644,610 |
String getName(byte[] id) throws NoSuchUniqueId, HBaseException; | String getName(byte[] id) throws NoSuchUniqueId, HBaseException; | /**
* Finds the name associated with a given ID.
*
* @param id The ID associated with that name.
* @see #getId(String)
* @see #getOrCreateId(String)
* @throws NoSuchUniqueId if the given ID is not assigned.
* @throws HBaseException if there is a problem communicating with HBase.
* @throws IllegalArgumentException if the ID given in argument is encoded
* on the wrong number of bytes.
*/ | Finds the name associated with a given ID | getName | {
"repo_name": "marcuswestin/opentsdb",
"path": "src/uid/UniqueIdInterface.java",
"license": "gpl-3.0",
"size": 3378
} | [
"org.hbase.async.HBaseException"
] | import org.hbase.async.HBaseException; | import org.hbase.async.*; | [
"org.hbase.async"
] | org.hbase.async; | 2,014,952 |
public static InflationSensitivity ofYieldDiscountingAndPriceIndex(final Map<String, List<DoublesPair>> sensitivityYieldDiscounting, final Map<String, List<DoublesPair>> sensitivityPriceCurve) {
ArgumentChecker.notNull(sensitivityYieldDiscounting, "Sensitivity yield curve");
ArgumentChecker.notNull(sensitivityPriceCurve, "Sensitivity price index curve");
return new InflationSensitivity(sensitivityYieldDiscounting, new HashMap<String, List<ForwardSensitivity>>(), sensitivityPriceCurve);
} | static InflationSensitivity function(final Map<String, List<DoublesPair>> sensitivityYieldDiscounting, final Map<String, List<DoublesPair>> sensitivityPriceCurve) { ArgumentChecker.notNull(sensitivityYieldDiscounting, STR); ArgumentChecker.notNull(sensitivityPriceCurve, STR); return new InflationSensitivity(sensitivityYieldDiscounting, new HashMap<String, List<ForwardSensitivity>>(), sensitivityPriceCurve); } | /**
* Constructor from a yield discounting map and a price map. The maps are used directly.
* @param sensitivityYieldDiscounting The map.
* @param sensitivityPriceCurve The map.
* @return The sensitivity.
*/ | Constructor from a yield discounting map and a price map. The maps are used directly | ofYieldDiscountingAndPriceIndex | {
"repo_name": "jeorme/OG-Platform",
"path": "projects/OG-Analytics/src/main/java/com/opengamma/analytics/financial/provider/sensitivity/inflation/InflationSensitivity.java",
"license": "apache-2.0",
"size": 10231
} | [
"com.opengamma.analytics.financial.provider.sensitivity.multicurve.ForwardSensitivity",
"com.opengamma.util.ArgumentChecker",
"com.opengamma.util.tuple.DoublesPair",
"java.util.HashMap",
"java.util.List",
"java.util.Map"
] | import com.opengamma.analytics.financial.provider.sensitivity.multicurve.ForwardSensitivity; import com.opengamma.util.ArgumentChecker; import com.opengamma.util.tuple.DoublesPair; import java.util.HashMap; import java.util.List; import java.util.Map; | import com.opengamma.analytics.financial.provider.sensitivity.multicurve.*; import com.opengamma.util.*; import com.opengamma.util.tuple.*; import java.util.*; | [
"com.opengamma.analytics",
"com.opengamma.util",
"java.util"
] | com.opengamma.analytics; com.opengamma.util; java.util; | 2,886,548 |
return value != null && value.getClass().isArray();
}
/**
* Check whether the given value is empty.
*
* <p>An object value is empty if:
*
* <ul>
* <li>value is null
* <li>value is {@link Optional} and {@link Optional#empty()} | return value != null && value.getClass().isArray(); } /** * Check whether the given value is empty. * * <p>An object value is empty if: * * <ul> * <li>value is null * <li>value is {@link Optional} and {@link Optional#empty()} | /**
* Check whether the given value is an array.
*
* @param value the value to check
* @return true if value is array false otherwise
*/ | Check whether the given value is an array | isArray | {
"repo_name": "axelor/axelor-development-kit",
"path": "axelor-common/src/main/java/com/axelor/common/ObjectUtils.java",
"license": "agpl-3.0",
"size": 2535
} | [
"java.util.Optional"
] | import java.util.Optional; | import java.util.*; | [
"java.util"
] | java.util; | 938,017 |
public Future<CommandResult> readAttribute(final int attributeId) {
return readAttributes(Collections.singletonList(attributeId));
} | Future<CommandResult> function(final int attributeId) { return readAttributes(Collections.singletonList(attributeId)); } | /**
* Read an attribute given the attribute ID. This method will always send a {@link ReadAttributesCommand} to the
* remote device.
*
* @param attributeId the integer attribute ID to read
* @return command future
*/ | Read an attribute given the attribute ID. This method will always send a <code>ReadAttributesCommand</code> to the remote device | readAttribute | {
"repo_name": "zsmartsystems/com.zsmartsystems.zigbee",
"path": "com.zsmartsystems.zigbee/src/main/java/com/zsmartsystems/zigbee/zcl/ZclCluster.java",
"license": "epl-1.0",
"size": 85440
} | [
"com.zsmartsystems.zigbee.CommandResult",
"java.util.Collections",
"java.util.concurrent.Future"
] | import com.zsmartsystems.zigbee.CommandResult; import java.util.Collections; import java.util.concurrent.Future; | import com.zsmartsystems.zigbee.*; import java.util.*; import java.util.concurrent.*; | [
"com.zsmartsystems.zigbee",
"java.util"
] | com.zsmartsystems.zigbee; java.util; | 854,163 |
@Test
public void testVehiclePositionAtStepOne() throws IOException {
conn.nextSimStep();
final Repository<Vehicle> repo = conn.getVehicleRepository();
Vehicle v0 = repo.getByID("0.0");
assertEquals(0, v0.getLanePosition(), DELTA);
}
| void function() throws IOException { conn.nextSimStep(); final Repository<Vehicle> repo = conn.getVehicleRepository(); Vehicle v0 = repo.getByID("0.0"); assertEquals(0, v0.getLanePosition(), DELTA); } | /**
* Tests that the vehicle at step 1 is at beginning of its departure lane.
*
* @throws IOException
*/ | Tests that the vehicle at step 1 is at beginning of its departure lane | testVehiclePositionAtStepOne | {
"repo_name": "702nADOS/sumo",
"path": "tools/contributed/traci4j/test/java/it/polito/appeal/traci/test/TraCITest.java",
"license": "gpl-3.0",
"size": 32242
} | [
"it.polito.appeal.traci.Repository",
"it.polito.appeal.traci.Vehicle",
"java.io.IOException",
"org.junit.Assert"
] | import it.polito.appeal.traci.Repository; import it.polito.appeal.traci.Vehicle; import java.io.IOException; import org.junit.Assert; | import it.polito.appeal.traci.*; import java.io.*; import org.junit.*; | [
"it.polito.appeal",
"java.io",
"org.junit"
] | it.polito.appeal; java.io; org.junit; | 1,797,875 |
public Observable<ServiceResponse<P2SVpnConnectionHealthInner>> getP2sVpnConnectionHealthDetailedWithServiceResponseAsync(String resourceGroupName, String gatewayName, P2SVpnConnectionHealthRequest request) {
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (gatewayName == null) {
throw new IllegalArgumentException("Parameter gatewayName is required and cannot be null.");
}
if (request == null) {
throw new IllegalArgumentException("Parameter request is required and cannot be null.");
}
Validator.validate(request);
final String apiVersion = "2019-11-01";
Observable<Response<ResponseBody>> observable = service.getP2sVpnConnectionHealthDetailed(this.client.subscriptionId(), resourceGroupName, gatewayName, request, apiVersion, this.client.acceptLanguage(), this.client.userAgent());
return client.getAzureClient().getPostOrDeleteResultAsync(observable, new LongRunningOperationOptions().withFinalStateVia(LongRunningFinalState.LOCATION), new TypeToken<P2SVpnConnectionHealthInner>() { }.getType());
} | Observable<ServiceResponse<P2SVpnConnectionHealthInner>> function(String resourceGroupName, String gatewayName, P2SVpnConnectionHealthRequest request) { if (this.client.subscriptionId() == null) { throw new IllegalArgumentException(STR); } if (resourceGroupName == null) { throw new IllegalArgumentException(STR); } if (gatewayName == null) { throw new IllegalArgumentException(STR); } if (request == null) { throw new IllegalArgumentException(STR); } Validator.validate(request); final String apiVersion = STR; Observable<Response<ResponseBody>> observable = service.getP2sVpnConnectionHealthDetailed(this.client.subscriptionId(), resourceGroupName, gatewayName, request, apiVersion, this.client.acceptLanguage(), this.client.userAgent()); return client.getAzureClient().getPostOrDeleteResultAsync(observable, new LongRunningOperationOptions().withFinalStateVia(LongRunningFinalState.LOCATION), new TypeToken<P2SVpnConnectionHealthInner>() { }.getType()); } | /**
* Gets the sas url to get the connection health detail of P2S clients of the virtual wan P2SVpnGateway in the specified resource group.
*
* @param resourceGroupName The name of the resource group.
* @param gatewayName The name of the P2SVpnGateway.
* @param request Request parameters supplied to get p2s vpn connections detailed health.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable for the request
*/ | Gets the sas url to get the connection health detail of P2S clients of the virtual wan P2SVpnGateway in the specified resource group | getP2sVpnConnectionHealthDetailedWithServiceResponseAsync | {
"repo_name": "selvasingh/azure-sdk-for-java",
"path": "sdk/network/mgmt-v2019_11_01/src/main/java/com/microsoft/azure/management/network/v2019_11_01/implementation/P2sVpnGatewaysInner.java",
"license": "mit",
"size": 129361
} | [
"com.google.common.reflect.TypeToken",
"com.microsoft.azure.LongRunningFinalState",
"com.microsoft.azure.LongRunningOperationOptions",
"com.microsoft.azure.management.network.v2019_11_01.P2SVpnConnectionHealthRequest",
"com.microsoft.rest.ServiceResponse",
"com.microsoft.rest.Validator"
] | import com.google.common.reflect.TypeToken; import com.microsoft.azure.LongRunningFinalState; import com.microsoft.azure.LongRunningOperationOptions; import com.microsoft.azure.management.network.v2019_11_01.P2SVpnConnectionHealthRequest; import com.microsoft.rest.ServiceResponse; import com.microsoft.rest.Validator; | import com.google.common.reflect.*; import com.microsoft.azure.*; import com.microsoft.azure.management.network.v2019_11_01.*; import com.microsoft.rest.*; | [
"com.google.common",
"com.microsoft.azure",
"com.microsoft.rest"
] | com.google.common; com.microsoft.azure; com.microsoft.rest; | 2,579,474 |
private void fillShuffledIndex() {
mShuffledIndex.clear();
Map<String, List<Integer>> artistMap = new HashMap<>();
List<String> artistNames = new ArrayList<>();
int shuffledTracksCount = 0;
boolean isPlayingFromQueue = mQueue.getIndexOfEntry(mCurrentEntry) >= 0;
int currentIndex = -1;
if (isPlayingFromQueue) {
currentIndex = mPlaylist.getIndexOfEntry(mCurrentEntry);
}
for (int i = 0; i < mPlaylist.size(); i++) {
if (isPlayingFromQueue || i != currentIndex) {
String artistName = mPlaylist.getArtistName(i);
if (artistMap.get(artistName) == null) {
artistMap.put(artistName, new ArrayList<Integer>());
artistNames.add(artistName);
}
artistMap.get(artistName).add(i);
shuffledTracksCount++;
}
}
if (!isPlayingFromQueue) {
mShuffledIndex.add(mCurrentIndex);
}
String lastArtistName = null;
while (shuffledTracksCount >= 0) {
// Get a random artistName out of all available ones
String artistName = null;
int tryCount = 0;
while (tryCount++ < 3 && (artistName == null || artistName.equals(lastArtistName))) {
// We try 3 times to get an artistName that is different from the one we picked
// previously
int randomPos = (int) (Math.random() * artistNames.size());
artistName = artistNames.get(randomPos);
}
// Now we can get the list of track indexes
List<Integer> indexes = artistMap.get(artistName);
int randomPos = (int) (Math.random() * indexes.size());
// Add the randomly picked track index to our shuffled index
mShuffledIndex.add(indexes.get(randomPos));
shuffledTracksCount--;
lastArtistName = artistName;
}
} | void function() { mShuffledIndex.clear(); Map<String, List<Integer>> artistMap = new HashMap<>(); List<String> artistNames = new ArrayList<>(); int shuffledTracksCount = 0; boolean isPlayingFromQueue = mQueue.getIndexOfEntry(mCurrentEntry) >= 0; int currentIndex = -1; if (isPlayingFromQueue) { currentIndex = mPlaylist.getIndexOfEntry(mCurrentEntry); } for (int i = 0; i < mPlaylist.size(); i++) { if (isPlayingFromQueue i != currentIndex) { String artistName = mPlaylist.getArtistName(i); if (artistMap.get(artistName) == null) { artistMap.put(artistName, new ArrayList<Integer>()); artistNames.add(artistName); } artistMap.get(artistName).add(i); shuffledTracksCount++; } } if (!isPlayingFromQueue) { mShuffledIndex.add(mCurrentIndex); } String lastArtistName = null; while (shuffledTracksCount >= 0) { String artistName = null; int tryCount = 0; while (tryCount++ < 3 && (artistName == null artistName.equals(lastArtistName))) { int randomPos = (int) (Math.random() * artistNames.size()); artistName = artistNames.get(randomPos); } List<Integer> indexes = artistMap.get(artistName); int randomPos = (int) (Math.random() * indexes.size()); mShuffledIndex.add(indexes.get(randomPos)); shuffledTracksCount--; lastArtistName = artistName; } } | /**
* Shuffles the list of tracks in the current playlist and fills the shuffled index accordingly.
* The shuffle method ensures that the shuffled list does only contain a minimum amount of
* tracks by the same artist in sequence.
*/ | Shuffles the list of tracks in the current playlist and fills the shuffled index accordingly. The shuffle method ensures that the shuffled list does only contain a minimum amount of tracks by the same artist in sequence | fillShuffledIndex | {
"repo_name": "andi34/tomahawk-android",
"path": "src/org/tomahawk/tomahawk_android/services/PlaybackService.java",
"license": "gpl-3.0",
"size": 58686
} | [
"java.util.ArrayList",
"java.util.HashMap",
"java.util.List",
"java.util.Map"
] | import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; | import java.util.*; | [
"java.util"
] | java.util; | 804,823 |
@Deprecated
@Implementation
protected void getRealMetrics(DisplayMetrics outMetrics) {
if (isJB()) {
getMetrics(outMetrics);
outMetrics.widthPixels = realWidth;
outMetrics.heightPixels = realHeight;
} else {
directlyOn(realObject, Display.class).getRealMetrics(outMetrics);
if (scaledDensity != null) {
outMetrics.scaledDensity = scaledDensity;
}
}
} | void function(DisplayMetrics outMetrics) { if (isJB()) { getMetrics(outMetrics); outMetrics.widthPixels = realWidth; outMetrics.heightPixels = realHeight; } else { directlyOn(realObject, Display.class).getRealMetrics(outMetrics); if (scaledDensity != null) { outMetrics.scaledDensity = scaledDensity; } } } | /**
* If {@link #setScaledDensity(float)} has been called, {@link DisplayMetrics#scaledDensity} will
* be modified to reflect the value specified. Note that this is not a realistic state.
*
* @deprecated This behavior is deprecated and will be removed in Robolectric 3.7.
*/ | If <code>#setScaledDensity(float)</code> has been called, <code>DisplayMetrics#scaledDensity</code> will be modified to reflect the value specified. Note that this is not a realistic state | getRealMetrics | {
"repo_name": "spotify/robolectric",
"path": "shadows/framework/src/main/java/org/robolectric/shadows/ShadowDisplay.java",
"license": "mit",
"size": 12838
} | [
"android.util.DisplayMetrics",
"android.view.Display",
"org.robolectric.shadow.api.Shadow"
] | import android.util.DisplayMetrics; import android.view.Display; import org.robolectric.shadow.api.Shadow; | import android.util.*; import android.view.*; import org.robolectric.shadow.api.*; | [
"android.util",
"android.view",
"org.robolectric.shadow"
] | android.util; android.view; org.robolectric.shadow; | 1,746,077 |
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof MeanAndStandardDeviation)) {
return false;
}
MeanAndStandardDeviation that = (MeanAndStandardDeviation) obj;
if (!ObjectUtilities.equal(this.mean, that.mean)) {
return false;
}
if (!ObjectUtilities.equal(
this.standardDeviation, that.standardDeviation)
) {
return false;
}
return true;
}
| boolean function(Object obj) { if (obj == this) { return true; } if (!(obj instanceof MeanAndStandardDeviation)) { return false; } MeanAndStandardDeviation that = (MeanAndStandardDeviation) obj; if (!ObjectUtilities.equal(this.mean, that.mean)) { return false; } if (!ObjectUtilities.equal( this.standardDeviation, that.standardDeviation) ) { return false; } return true; } | /**
* Tests this instance for equality with an arbitrary object.
*
* @param obj the object (<code>null</code> permitted).
*
* @return A boolean.
*/ | Tests this instance for equality with an arbitrary object | equals | {
"repo_name": "apetresc/JFreeChart",
"path": "src/main/java/org/jfree/data/statistics/MeanAndStandardDeviation.java",
"license": "lgpl-2.1",
"size": 5474
} | [
"org.jfree.util.ObjectUtilities"
] | import org.jfree.util.ObjectUtilities; | import org.jfree.util.*; | [
"org.jfree.util"
] | org.jfree.util; | 2,484,156 |
EClass getMConnectionSwitch(); | EClass getMConnectionSwitch(); | /**
* Returns the meta object for class '{@link es.uah.aut.srg.micobs.mclev.mclevmcad.MConnectionSwitch <em>MConnectionSwitch</em>}'.
* @return the meta object for class '<em>MConnectionSwitch</em>'.
* @see es.uah.aut.srg.micobs.mclev.mclevmcad.MConnectionSwitch
* @generated
*/ | Returns the meta object for class '<code>es.uah.aut.srg.micobs.mclev.mclevmcad.MConnectionSwitch MConnectionSwitch</code>' | getMConnectionSwitch | {
"repo_name": "parraman/micobs",
"path": "mclev/es.uah.aut.srg.micobs.mclev/src/es/uah/aut/srg/micobs/mclev/mclevmcad/mclevmcadPackage.java",
"license": "epl-1.0",
"size": 59510
} | [
"org.eclipse.emf.ecore.EClass"
] | import org.eclipse.emf.ecore.EClass; | import org.eclipse.emf.ecore.*; | [
"org.eclipse.emf"
] | org.eclipse.emf; | 116,653 |
public static MozuClient<com.mozu.api.contracts.productruntime.CategoryCollection> getCategoryTreeClient(String responseFields) throws Exception
{
MozuUrl url = com.mozu.api.urls.commerce.catalog.storefront.CategoryUrl.getCategoryTreeUrl(responseFields);
String verb = "GET";
Class<?> clz = com.mozu.api.contracts.productruntime.CategoryCollection.class;
MozuClient<com.mozu.api.contracts.productruntime.CategoryCollection> mozuClient = (MozuClient<com.mozu.api.contracts.productruntime.CategoryCollection>) MozuClientFactory.getInstance(clz);
mozuClient.setVerb(verb);
mozuClient.setResourceUrl(url);
return mozuClient;
} | static MozuClient<com.mozu.api.contracts.productruntime.CategoryCollection> function(String responseFields) throws Exception { MozuUrl url = com.mozu.api.urls.commerce.catalog.storefront.CategoryUrl.getCategoryTreeUrl(responseFields); String verb = "GET"; Class<?> clz = com.mozu.api.contracts.productruntime.CategoryCollection.class; MozuClient<com.mozu.api.contracts.productruntime.CategoryCollection> mozuClient = (MozuClient<com.mozu.api.contracts.productruntime.CategoryCollection>) MozuClientFactory.getInstance(clz); mozuClient.setVerb(verb); mozuClient.setResourceUrl(url); return mozuClient; } | /**
* Retrieves the list of product categories that appear on the storefront organized in a hierarchical format. Hidden categories do not appear in the list.
* <p><pre><code>
* MozuClient<com.mozu.api.contracts.productruntime.CategoryCollection> mozuClient=GetCategoryTreeClient( responseFields);
* client.setBaseAddress(url);
* client.executeRequest();
* CategoryCollection categoryCollection = client.Result();
* </code></pre></p>
* @param responseFields Filtering syntax appended to an API call to increase or decrease the amount of data returned inside a JSON object. For example, ) returns only the and items inside the array of the specified product.This paramter should only be used to retrieve data. Attempting to update data using this parmater may cause data loss.
* @return Mozu.Api.MozuClient <com.mozu.api.contracts.productruntime.CategoryCollection>
* @see com.mozu.api.contracts.productruntime.CategoryCollection
*/ | Retrieves the list of product categories that appear on the storefront organized in a hierarchical format. Hidden categories do not appear in the list. <code><code> MozuClient mozuClient=GetCategoryTreeClient( responseFields); client.setBaseAddress(url); client.executeRequest(); CategoryCollection categoryCollection = client.Result(); </code></code> | getCategoryTreeClient | {
"repo_name": "bhewett/mozu-java",
"path": "mozu-javaasync-core/src/main/java/com/mozu/api/clients/commerce/catalog/storefront/CategoryClient.java",
"license": "mit",
"size": 8674
} | [
"com.mozu.api.MozuClient",
"com.mozu.api.MozuClientFactory",
"com.mozu.api.MozuUrl"
] | import com.mozu.api.MozuClient; import com.mozu.api.MozuClientFactory; import com.mozu.api.MozuUrl; | import com.mozu.api.*; | [
"com.mozu.api"
] | com.mozu.api; | 2,310,381 |
long getTimeout(TimeUnit timeUnit); | long getTimeout(TimeUnit timeUnit); | /**
* Returns the setting for how long a scanner will automatically retry when a failure occurs.
*
* @return the timeout configured for this scanner
* @since 1.5.0
*/ | Returns the setting for how long a scanner will automatically retry when a failure occurs | getTimeout | {
"repo_name": "milleruntime/accumulo",
"path": "core/src/main/java/org/apache/accumulo/core/client/ScannerBase.java",
"license": "apache-2.0",
"size": 13521
} | [
"java.util.concurrent.TimeUnit"
] | import java.util.concurrent.TimeUnit; | import java.util.concurrent.*; | [
"java.util"
] | java.util; | 2,105,322 |
protected void execute(HttpServletRequest request,
HttpServletResponse response,
RequestContext context)
throws Exception {
// check for a request to return a full xml
String sGetXmlUuid = "";
String sParamsLC = Val.chkStr(request.getQueryString());
if (sParamsLC.indexOf("getxml=") != -1) {
sGetXmlUuid = Val.chkStr(request.getParameter("getxml"));
}
// return the full xml if requested
if (sGetXmlUuid.length() > 0) {
LOGGER.finer("Retrieving document: "+sGetXmlUuid);
String xml = "";
try {
xml = readFullXml(request,response,context,sGetXmlUuid);
} catch (NotAuthorizedException nae) {
throw nae;
} catch (Throwable t) {
LOGGER.warning("\nError retrieving document: "+sGetXmlUuid+"\n "+t.toString());
}
this.writeXmlResponse(response,Val.chkStr(xml));
// execute a normal CSW request
} else {
executeCSW(request,response,context);
}
}
| void function(HttpServletRequest request, HttpServletResponse response, RequestContext context) throws Exception { String sGetXmlUuid = STRgetxml=STRgetxmlSTRRetrieving document: STRSTR\nError retrieving document: STR\n "+t.toString()); } this.writeXmlResponse(response,Val.chkStr(xml)); } else { executeCSW(request,response,context); } } | /**
* Executes a request.
* @param request the HTTP servlet request
* @param response the HTTP servlet response
* @param context the request context
* @throws Exception if a processing exception occurs
*/ | Executes a request | execute | {
"repo_name": "usgin/usgin-geoportal",
"path": "src/com/esri/gpt/server/csw/provider/CswServlet.java",
"license": "apache-2.0",
"size": 8233
} | [
"com.esri.gpt.framework.context.RequestContext",
"com.esri.gpt.framework.util.Val",
"javax.servlet.http.HttpServletRequest",
"javax.servlet.http.HttpServletResponse"
] | import com.esri.gpt.framework.context.RequestContext; import com.esri.gpt.framework.util.Val; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; | import com.esri.gpt.framework.context.*; import com.esri.gpt.framework.util.*; import javax.servlet.http.*; | [
"com.esri.gpt",
"javax.servlet"
] | com.esri.gpt; javax.servlet; | 1,757,566 |
@Override
public List<Subscription> getAPISubscriptionsByAPI(String apiId) throws APIMgtDAOException {
final String getSubscriptionsByAPISql = "SELECT SUBS.UUID AS SUBS_UUID, SUBS.TIER_ID AS SUBS_TIER, " +
"SUBS.API_ID AS API_ID, SUBS.APPLICATION_ID AS APP_ID, SUBS.SUB_STATUS AS SUB_STATUS, " +
"SUBS.SUB_TYPE AS SUB_TYPE, APP.NAME AS APP_NAME, APP.APPLICATION_POLICY_ID AS APP_POLICY_ID, " +
"APP.APPLICATION_STATUS AS APP_STATUS, " +
"APP.CREATED_BY AS APP_OWNER, POLICY.NAME AS SUBS_POLICY " +
"FROM AM_SUBSCRIPTION SUBS, AM_APPLICATION APP, AM_SUBSCRIPTION_POLICY POLICY " +
"WHERE SUBS.API_ID = ? AND SUBS.APPLICATION_ID = APP.UUID AND SUBS.TIER_ID = POLICY.UUID " +
"AND SUBS.SUB_STATUS NOT IN (?,?)";
try (Connection conn = DAOUtil.getConnection();
PreparedStatement ps = conn.prepareStatement(getSubscriptionsByAPISql)) {
ps.setString(1, apiId);
ps.setString(2, SubscriptionStatus.ON_HOLD.name());
ps.setString(3, SubscriptionStatus.REJECTED.name());
try (ResultSet rs = ps.executeQuery()) {
return createSubscriptionsWithAppInformationOnly(rs);
}
} catch (SQLException e) {
log.error("Error while executing sql query", e);
throw new APIMgtDAOException(e);
}
} | List<Subscription> function(String apiId) throws APIMgtDAOException { final String getSubscriptionsByAPISql = STR + STR + STR + STR + STR + STR + STR + STR; try (Connection conn = DAOUtil.getConnection(); PreparedStatement ps = conn.prepareStatement(getSubscriptionsByAPISql)) { ps.setString(1, apiId); ps.setString(2, SubscriptionStatus.ON_HOLD.name()); ps.setString(3, SubscriptionStatus.REJECTED.name()); try (ResultSet rs = ps.executeQuery()) { return createSubscriptionsWithAppInformationOnly(rs); } } catch (SQLException e) { log.error(STR, e); throw new APIMgtDAOException(e); } } | /**
* Retrieve the list of subscriptions of an API
*
* @param apiId The UUID of API
* @return A list of {@link Subscription} objects
* @throws APIMgtDAOException If failed to get subscriptions.
*/ | Retrieve the list of subscriptions of an API | getAPISubscriptionsByAPI | {
"repo_name": "lalaji/carbon-apimgt",
"path": "components/apimgt/org.wso2.carbon.apimgt.core/src/main/java/org/wso2/carbon/apimgt/core/dao/impl/APISubscriptionDAOImpl.java",
"license": "apache-2.0",
"size": 39044
} | [
"java.sql.Connection",
"java.sql.PreparedStatement",
"java.sql.ResultSet",
"java.sql.SQLException",
"java.util.List",
"org.wso2.carbon.apimgt.core.exception.APIMgtDAOException",
"org.wso2.carbon.apimgt.core.models.Subscription",
"org.wso2.carbon.apimgt.core.util.APIMgtConstants"
] | import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.List; import org.wso2.carbon.apimgt.core.exception.APIMgtDAOException; import org.wso2.carbon.apimgt.core.models.Subscription; import org.wso2.carbon.apimgt.core.util.APIMgtConstants; | import java.sql.*; import java.util.*; import org.wso2.carbon.apimgt.core.exception.*; import org.wso2.carbon.apimgt.core.models.*; import org.wso2.carbon.apimgt.core.util.*; | [
"java.sql",
"java.util",
"org.wso2.carbon"
] | java.sql; java.util; org.wso2.carbon; | 327,356 |
public static MatrixCursor newClosedCursor() {
MatrixCursor cursor = new MatrixCursor(new String[0]);
cursor.close();
return cursor;
}// newClosedCursor()
/**
* Checks if {@code uri} is a directory.
*
* @param context
* {@link Context}.
* @param uri
* the URI you want to check.
* @return {@code true} if {@code uri} is a directory, {@code false} | static MatrixCursor function() { MatrixCursor cursor = new MatrixCursor(new String[0]); cursor.close(); return cursor; } /** * Checks if {@code uri} is a directory. * * @param context * {@link Context}. * @param uri * the URI you want to check. * @return {@code true} if {@code uri} is a directory, {@code false} | /**
* Creates new cursor, closes it and returns it ^^
*
* @return the newly closed cursor.
*/ | Creates new cursor, closes it and returns it ^^ | newClosedCursor | {
"repo_name": "wcmatthysen/android-filechooser",
"path": "code/src/group/pals/android/lib/ui/filechooser/providers/BaseFileProviderUtils.java",
"license": "mit",
"size": 20644
} | [
"android.content.Context",
"android.database.MatrixCursor"
] | import android.content.Context; import android.database.MatrixCursor; | import android.content.*; import android.database.*; | [
"android.content",
"android.database"
] | android.content; android.database; | 916,870 |
public static IRatedExecutor ratedExecutor(final long rate, final TimeUnit unit)
{
final TaskQueue queue = new TaskQueue();
final ThreadFactory factory = new RatedExecutorThreadFactory();
final IInternalExecutor executor = new ScheduledInternalExecutor(queue, rate, unit, factory);
return new RatedExecutor(queue, executor, new TaskWrapperFactory());
} | static IRatedExecutor function(final long rate, final TimeUnit unit) { final TaskQueue queue = new TaskQueue(); final ThreadFactory factory = new RatedExecutorThreadFactory(); final IInternalExecutor executor = new ScheduledInternalExecutor(queue, rate, unit, factory); return new RatedExecutor(queue, executor, new TaskWrapperFactory()); } | /**
* Creates a new rated executor.
* <P>
* This executor is single threaded, if a task takes longer than the executor rate it will delay
* scheduled tasks. The {@link Future}s returned by this executor do not support throwing
* {@link InterruptedException}s when cancelling running tasks.
*
* @param rate
* The rate of the executor
* @param unit
* The time unit of the rate
* @return The executor
*/ | Creates a new rated executor. This executor is single threaded, if a task takes longer than the executor rate it will delay scheduled tasks. The <code>Future</code>s returned by this executor do not support throwing <code>InterruptedException</code>s when cancelling running tasks | ratedExecutor | {
"repo_name": "mattunderscorechampion/rated-executor",
"path": "src/main/java/com/mattunderscore/rated/executor/RatedExecutors.java",
"license": "bsd-3-clause",
"size": 7489
} | [
"com.mattunderscore.executors.TaskWrapperFactory",
"java.util.concurrent.ThreadFactory",
"java.util.concurrent.TimeUnit"
] | import com.mattunderscore.executors.TaskWrapperFactory; import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; | import com.mattunderscore.executors.*; import java.util.concurrent.*; | [
"com.mattunderscore.executors",
"java.util"
] | com.mattunderscore.executors; java.util; | 1,567,920 |
@Test
public void testGetDateNoFormatPropertyDirectlySpecified() throws Exception {
conf.clearProperty(DataConfiguration.DATE_FORMAT_KEY);
assertEquals("Wrong result", expectedDate(), conf.getDate("date.string", DATE_PATTERN));
} | void function() throws Exception { conf.clearProperty(DataConfiguration.DATE_FORMAT_KEY); assertEquals(STR, expectedDate(), conf.getDate(STR, DATE_PATTERN)); } | /**
* Tests a conversion to a Date if no property is set with the date format, and the format is directly passed in.
*/ | Tests a conversion to a Date if no property is set with the date format, and the format is directly passed in | testGetDateNoFormatPropertyDirectlySpecified | {
"repo_name": "apache/commons-configuration",
"path": "src/test/java/org/apache/commons/configuration2/TestDataConfiguration.java",
"license": "apache-2.0",
"size": 89202
} | [
"org.junit.Assert"
] | import org.junit.Assert; | import org.junit.*; | [
"org.junit"
] | org.junit; | 1,410,857 |
@Override
public void visitUnary(EUnary userUnaryNode, SemanticScope semanticScope) {
Operation operation = userUnaryNode.getOperation();
if (semanticScope.getCondition(userUnaryNode, Write.class)) {
throw userUnaryNode.createError(new IllegalArgumentException(
"invalid assignment: cannot assign a value to " + operation.name + " operation " + "[" + operation.symbol + "]"));
}
if (semanticScope.getCondition(userUnaryNode, Read.class) == false) {
throw userUnaryNode.createError(new IllegalArgumentException(
"not a statement: result not used from " + operation.name + " operation " + "[" + operation.symbol + "]"));
}
AExpression userChildNode = userUnaryNode.getChildNode();
Class<?> valueType;
Class<?> unaryType = null;
if (operation == Operation.SUB && (userChildNode instanceof ENumeric || userChildNode instanceof EDecimal)) {
semanticScope.setCondition(userChildNode, Read.class);
semanticScope.copyDecoration(userUnaryNode, userChildNode, TargetType.class);
semanticScope.replicateCondition(userUnaryNode, userChildNode, Explicit.class);
semanticScope.replicateCondition(userUnaryNode, userChildNode, Internal.class);
semanticScope.setCondition(userChildNode, Negate.class);
checkedVisit(userChildNode, semanticScope);
if (semanticScope.hasDecoration(userUnaryNode, TargetType.class)) {
decorateWithCast(userChildNode, semanticScope);
}
valueType = semanticScope.getDecoration(userChildNode, ValueType.class).getValueType();
} else {
if (operation == Operation.NOT) {
semanticScope.setCondition(userChildNode, Read.class);
semanticScope.putDecoration(userChildNode, new TargetType(boolean.class));
checkedVisit(userChildNode, semanticScope);
decorateWithCast(userChildNode, semanticScope);
valueType = boolean.class;
} else if (operation == Operation.BWNOT || operation == Operation.ADD || operation == Operation.SUB) {
semanticScope.setCondition(userChildNode, Read.class);
checkedVisit(userChildNode, semanticScope);
Class<?> childValueType = semanticScope.getDecoration(userChildNode, ValueType.class).getValueType();
unaryType = AnalyzerCaster.promoteNumeric(childValueType, operation != Operation.BWNOT);
if (unaryType == null) {
throw userUnaryNode.createError(new ClassCastException("cannot apply the " + operation.name + " operator " +
"[" + operation.symbol + "] to the type " +
"[" + PainlessLookupUtility.typeToCanonicalTypeName(childValueType) + "]"));
}
semanticScope.putDecoration(userChildNode, new TargetType(unaryType));
decorateWithCast(userChildNode, semanticScope);
TargetType targetType = semanticScope.getDecoration(userUnaryNode, TargetType.class);
if (unaryType == def.class && targetType != null) {
valueType = targetType.getTargetType();
} else {
valueType = unaryType;
}
} else {
throw userUnaryNode.createError(new IllegalStateException("unexpected unary operation [" + operation.name + "]"));
}
}
semanticScope.putDecoration(userUnaryNode, new ValueType(valueType));
if (unaryType != null) {
semanticScope.putDecoration(userUnaryNode, new UnaryType(unaryType));
}
} | void function(EUnary userUnaryNode, SemanticScope semanticScope) { Operation operation = userUnaryNode.getOperation(); if (semanticScope.getCondition(userUnaryNode, Write.class)) { throw userUnaryNode.createError(new IllegalArgumentException( STR + operation.name + STR + "[" + operation.symbol + "]")); } if (semanticScope.getCondition(userUnaryNode, Read.class) == false) { throw userUnaryNode.createError(new IllegalArgumentException( STR + operation.name + STR + "[" + operation.symbol + "]")); } AExpression userChildNode = userUnaryNode.getChildNode(); Class<?> valueType; Class<?> unaryType = null; if (operation == Operation.SUB && (userChildNode instanceof ENumeric userChildNode instanceof EDecimal)) { semanticScope.setCondition(userChildNode, Read.class); semanticScope.copyDecoration(userUnaryNode, userChildNode, TargetType.class); semanticScope.replicateCondition(userUnaryNode, userChildNode, Explicit.class); semanticScope.replicateCondition(userUnaryNode, userChildNode, Internal.class); semanticScope.setCondition(userChildNode, Negate.class); checkedVisit(userChildNode, semanticScope); if (semanticScope.hasDecoration(userUnaryNode, TargetType.class)) { decorateWithCast(userChildNode, semanticScope); } valueType = semanticScope.getDecoration(userChildNode, ValueType.class).getValueType(); } else { if (operation == Operation.NOT) { semanticScope.setCondition(userChildNode, Read.class); semanticScope.putDecoration(userChildNode, new TargetType(boolean.class)); checkedVisit(userChildNode, semanticScope); decorateWithCast(userChildNode, semanticScope); valueType = boolean.class; } else if (operation == Operation.BWNOT operation == Operation.ADD operation == Operation.SUB) { semanticScope.setCondition(userChildNode, Read.class); checkedVisit(userChildNode, semanticScope); Class<?> childValueType = semanticScope.getDecoration(userChildNode, ValueType.class).getValueType(); unaryType = AnalyzerCaster.promoteNumeric(childValueType, operation != Operation.BWNOT); if (unaryType == null) { throw userUnaryNode.createError(new ClassCastException(STR + operation.name + STR + "[" + operation.symbol + STR + "[" + PainlessLookupUtility.typeToCanonicalTypeName(childValueType) + "]")); } semanticScope.putDecoration(userChildNode, new TargetType(unaryType)); decorateWithCast(userChildNode, semanticScope); TargetType targetType = semanticScope.getDecoration(userUnaryNode, TargetType.class); if (unaryType == def.class && targetType != null) { valueType = targetType.getTargetType(); } else { valueType = unaryType; } } else { throw userUnaryNode.createError(new IllegalStateException(STR + operation.name + "]")); } } semanticScope.putDecoration(userUnaryNode, new ValueType(valueType)); if (unaryType != null) { semanticScope.putDecoration(userUnaryNode, new UnaryType(unaryType)); } } | /**
* Visits a unary expression which special-cases a negative operator when the child
* is a constant expression to handle the maximum negative values appropriately.
* Checks: type validation
*/ | Visits a unary expression which special-cases a negative operator when the child is a constant expression to handle the maximum negative values appropriately. Checks: type validation | visitUnary | {
"repo_name": "nknize/elasticsearch",
"path": "modules/lang-painless/src/main/java/org/elasticsearch/painless/phase/DefaultSemanticAnalysisPhase.java",
"license": "apache-2.0",
"size": 149793
} | [
"org.elasticsearch.painless.AnalyzerCaster",
"org.elasticsearch.painless.Operation",
"org.elasticsearch.painless.lookup.PainlessLookupUtility",
"org.elasticsearch.painless.node.AExpression",
"org.elasticsearch.painless.node.EDecimal",
"org.elasticsearch.painless.node.ENumeric",
"org.elasticsearch.painless.node.EUnary",
"org.elasticsearch.painless.symbol.Decorations",
"org.elasticsearch.painless.symbol.SemanticScope"
] | import org.elasticsearch.painless.AnalyzerCaster; import org.elasticsearch.painless.Operation; import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.node.AExpression; import org.elasticsearch.painless.node.EDecimal; import org.elasticsearch.painless.node.ENumeric; import org.elasticsearch.painless.node.EUnary; import org.elasticsearch.painless.symbol.Decorations; import org.elasticsearch.painless.symbol.SemanticScope; | import org.elasticsearch.painless.*; import org.elasticsearch.painless.lookup.*; import org.elasticsearch.painless.node.*; import org.elasticsearch.painless.symbol.*; | [
"org.elasticsearch.painless"
] | org.elasticsearch.painless; | 470,055 |
public OffsetDateTime estimatedCompletionTime() {
return this.estimatedCompletionTime;
} | OffsetDateTime function() { return this.estimatedCompletionTime; } | /**
* Get the estimatedCompletionTime property: The estimated completion time of the operation.
*
* @return the estimatedCompletionTime value.
*/ | Get the estimatedCompletionTime property: The estimated completion time of the operation | estimatedCompletionTime | {
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/resourcemanager/azure-resourcemanager-sql/src/main/java/com/azure/resourcemanager/sql/fluent/models/ManagedInstanceOperationInner.java",
"license": "mit",
"size": 6358
} | [
"java.time.OffsetDateTime"
] | import java.time.OffsetDateTime; | import java.time.*; | [
"java.time"
] | java.time; | 2,093,668 |
void setNamespace(DSSNamespace namespace);
| void setNamespace(DSSNamespace namespace); | /**
* Specifies a namespace for the transformation elements
* @param namespace {@link DSSNamespace} uri
*/ | Specifies a namespace for the transformation elements | setNamespace | {
"repo_name": "openlimit-signcubes/dss",
"path": "dss-xades/src/main/java/eu/europa/esig/dss/xades/reference/DSSTransform.java",
"license": "lgpl-2.1",
"size": 2010
} | [
"eu.europa.esig.dss.definition.DSSNamespace"
] | import eu.europa.esig.dss.definition.DSSNamespace; | import eu.europa.esig.dss.definition.*; | [
"eu.europa.esig"
] | eu.europa.esig; | 1,330,282 |
@Autowired
public void configAuthBuilder(final AuthenticationManagerBuilder authenticationManagerBuilder)
throws Exception {
authenticationManagerBuilder.userDetailsService(authenticationUserService)
.passwordEncoder(passwordEncoderWrapper);
} | void function(final AuthenticationManagerBuilder authenticationManagerBuilder) throws Exception { authenticationManagerBuilder.userDetailsService(authenticationUserService) .passwordEncoder(passwordEncoderWrapper); } | /**
* Append extra information to the {@link AuthenticationManagerBuilder} to tell spring where to
* get the {@link org.springframework.security.core.userdetails.UserDetails} from and how the
* password is to be encrypted.
*
* @param authenticationManagerBuilder This is passed in by spring and will allow us to append
* operations
* @throws Exception This is not great but the exception will be thrown on any error. There are
* many possible exception types. Throwing {@link Exception} is generally
* considered bad practise.
* @see AuthenticationManagerBuilder
*/ | Append extra information to the <code>AuthenticationManagerBuilder</code> to tell spring where to get the <code>org.springframework.security.core.userdetails.UserDetails</code> from and how the password is to be encrypted | configAuthBuilder | {
"repo_name": "orangefoundry/spring-security-example",
"path": "src/main/java/com/orangefoundry/demo/springsecurity/security/SecurityConfiguration.java",
"license": "mit",
"size": 6719
} | [
"org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder"
] | import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder; | import org.springframework.security.config.annotation.authentication.builders.*; | [
"org.springframework.security"
] | org.springframework.security; | 376,817 |
public void processedPendingNotificationResponses() {
log.log(Level.WARNING, "Processing sent notifications.");
if (processedFailedNotificationsMethod == null) {
return;
}
try {
processedFailedNotificationsMethod.invoke(pushManager);
} catch (Exception e) {
// Catching all exception as the method requires handling 3+
// reflection related exceptions
// and 2+ JavaPNS exceptions. And there is nothing much that can be
// done when any of them
// happens other than logging the exception.
log.log(Level.WARNING, "Processing failed notifications failed", e);
}
} | void function() { log.log(Level.WARNING, STR); if (processedFailedNotificationsMethod == null) { return; } try { processedFailedNotificationsMethod.invoke(pushManager); } catch (Exception e) { log.log(Level.WARNING, STR, e); } } | /**
* Read and process any pending error-responses.
*/ | Read and process any pending error-responses | processedPendingNotificationResponses | {
"repo_name": "pschuette22/Zeppa-AppEngine",
"path": "zeppa-notifications/src/main/java/com/zeppamobile/notifications/PushNotificationSender.java",
"license": "apache-2.0",
"size": 7796
} | [
"java.util.logging.Level"
] | import java.util.logging.Level; | import java.util.logging.*; | [
"java.util"
] | java.util; | 2,170,176 |
private void processRelationships(Map<String, String> duplicatedNodes, Map<String, NodeTemplate> nodeTemplates, Topology topology, Csar csar) {
TopologyContext topologyContext = workflowBuilderService.buildTopologyContext(topology, csar);
duplicatedNodes.values().forEach(nodeName -> copyAndCleanRelationships(nodeName, duplicatedNodes, nodeTemplates, topologyContext));
} | void function(Map<String, String> duplicatedNodes, Map<String, NodeTemplate> nodeTemplates, Topology topology, Csar csar) { TopologyContext topologyContext = workflowBuilderService.buildTopologyContext(topology, csar); duplicatedNodes.values().forEach(nodeName -> copyAndCleanRelationships(nodeName, duplicatedNodes, nodeTemplates, topologyContext)); } | /**
* Process relationships of the duplicated nodes: Copy what we want to keep and discard the others
*
* @param duplicatedNodes Map of nodeToDuplicateName--> duplicatedNodeName
* @param nodeTemplates
* @param topology
*/ | Process relationships of the duplicated nodes: Copy what we want to keep and discard the others | processRelationships | {
"repo_name": "alien4cloud/alien4cloud",
"path": "alien4cloud-core/src/main/java/org/alien4cloud/tosca/editor/processors/nodetemplate/DuplicateNodeProcessor.java",
"license": "apache-2.0",
"size": 8414
} | [
"java.util.Map",
"org.alien4cloud.tosca.model.Csar",
"org.alien4cloud.tosca.model.templates.NodeTemplate",
"org.alien4cloud.tosca.model.templates.Topology"
] | import java.util.Map; import org.alien4cloud.tosca.model.Csar; import org.alien4cloud.tosca.model.templates.NodeTemplate; import org.alien4cloud.tosca.model.templates.Topology; | import java.util.*; import org.alien4cloud.tosca.model.*; import org.alien4cloud.tosca.model.templates.*; | [
"java.util",
"org.alien4cloud.tosca"
] | java.util; org.alien4cloud.tosca; | 244,345 |
@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
processRequest(request, response);
} | void function(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { processRequest(request, response); } | /**
* Handles the HTTP <code>POST</code> method.
* @param request servlet request
* @param response servlet response
* @throws ServletException if a servlet-specific error occurs
* @throws IOException if an I/O error occurs
*/ | Handles the HTTP <code>POST</code> method | doPost | {
"repo_name": "ccsu-cs416F15/CS416ClassDemos",
"path": "Lec7DemosV2/src/java/edu/ccsu/ChildInitParamsServlet.java",
"license": "mit",
"size": 2644
} | [
"java.io.IOException",
"javax.servlet.ServletException",
"javax.servlet.http.HttpServletRequest",
"javax.servlet.http.HttpServletResponse"
] | import java.io.IOException; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; | import java.io.*; import javax.servlet.*; import javax.servlet.http.*; | [
"java.io",
"javax.servlet"
] | java.io; javax.servlet; | 2,009,149 |
public void updateBPSProfile(BPSProfileDTO bpsProfileDTO)
throws RemoteException, WorkflowAdminServiceWorkflowException {
stub.updateBPSProfile(bpsProfileDTO);
} | void function(BPSProfileDTO bpsProfileDTO) throws RemoteException, WorkflowAdminServiceWorkflowException { stub.updateBPSProfile(bpsProfileDTO); } | /**
* Update BPS Profile
*
* @param bpsProfileDTO
* @throws RemoteException
* @throws WorkflowAdminServiceWorkflowException
*/ | Update BPS Profile | updateBPSProfile | {
"repo_name": "jacklotusho/carbon-identity",
"path": "components/workflow-mgt/org.wso2.carbon.identity.workflow.mgt.ui/src/main/java/org/wso2/carbon/identity/workflow/mgt/ui/WorkflowAdminServiceClient.java",
"license": "apache-2.0",
"size": 10768
} | [
"java.rmi.RemoteException",
"org.wso2.carbon.identity.workflow.mgt.stub.WorkflowAdminServiceWorkflowException",
"org.wso2.carbon.identity.workflow.mgt.stub.bean.BPSProfileDTO"
] | import java.rmi.RemoteException; import org.wso2.carbon.identity.workflow.mgt.stub.WorkflowAdminServiceWorkflowException; import org.wso2.carbon.identity.workflow.mgt.stub.bean.BPSProfileDTO; | import java.rmi.*; import org.wso2.carbon.identity.workflow.mgt.stub.*; import org.wso2.carbon.identity.workflow.mgt.stub.bean.*; | [
"java.rmi",
"org.wso2.carbon"
] | java.rmi; org.wso2.carbon; | 2,761,197 |
@ServiceMethod(returns = ReturnType.SINGLE)
Mono<RouteFilterInner> createOrUpdateAsync(
String resourceGroupName, String routeFilterName, RouteFilterInner routeFilterParameters); | @ServiceMethod(returns = ReturnType.SINGLE) Mono<RouteFilterInner> createOrUpdateAsync( String resourceGroupName, String routeFilterName, RouteFilterInner routeFilterParameters); | /**
* Creates or updates a route filter in a specified resource group.
*
* @param resourceGroupName The name of the resource group.
* @param routeFilterName The name of the route filter.
* @param routeFilterParameters Parameters supplied to the create or update route filter operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return route Filter Resource on successful completion of {@link Mono}.
*/ | Creates or updates a route filter in a specified resource group | createOrUpdateAsync | {
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/resourcemanager/azure-resourcemanager-network/src/main/java/com/azure/resourcemanager/network/fluent/RouteFiltersClient.java",
"license": "mit",
"size": 23978
} | [
"com.azure.core.annotation.ReturnType",
"com.azure.core.annotation.ServiceMethod",
"com.azure.resourcemanager.network.fluent.models.RouteFilterInner"
] | import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.resourcemanager.network.fluent.models.RouteFilterInner; | import com.azure.core.annotation.*; import com.azure.resourcemanager.network.fluent.models.*; | [
"com.azure.core",
"com.azure.resourcemanager"
] | com.azure.core; com.azure.resourcemanager; | 2,399,694 |
public CallbackRegistration addItemTapHandler(DataViewItemTapHandler handler) {
return this.addWidgetListener(Event.ITEM_TAP.getValue(), handler.getJsoPeer());
}
| CallbackRegistration function(DataViewItemTapHandler handler) { return this.addWidgetListener(Event.ITEM_TAP.getValue(), handler.getJsoPeer()); } | /**
* Fires whenever an item is tapped
*
* @param handler
*/ | Fires whenever an item is tapped | addItemTapHandler | {
"repo_name": "paulvi/touch4j",
"path": "src/com/emitrom/touch4j/client/ui/DataView.java",
"license": "apache-2.0",
"size": 16868
} | [
"com.emitrom.touch4j.client.core.config.Event",
"com.emitrom.touch4j.client.core.handlers.CallbackRegistration",
"com.emitrom.touch4j.client.core.handlers.dataview.DataViewItemTapHandler"
] | import com.emitrom.touch4j.client.core.config.Event; import com.emitrom.touch4j.client.core.handlers.CallbackRegistration; import com.emitrom.touch4j.client.core.handlers.dataview.DataViewItemTapHandler; | import com.emitrom.touch4j.client.core.config.*; import com.emitrom.touch4j.client.core.handlers.*; import com.emitrom.touch4j.client.core.handlers.dataview.*; | [
"com.emitrom.touch4j"
] | com.emitrom.touch4j; | 906,414 |
@Override
public String toString() {
String result;
int i;
result = "";
for (i = 0; i < m_Elements.size(); i++) {
if (i > 0)
result += ".";
result += m_Elements.get(i);
}
return result;
}
}
public static class PropertyContainer {
protected PropertyDescriptor m_Descriptor;
protected Method m_Read;
protected Method m_Write;
protected Object m_Object;
public PropertyContainer(PropertyDescriptor desc, Object obj) {
super();
m_Descriptor = desc;
m_Read = null;
m_Write = null;
m_Object = obj;
}
public PropertyContainer(Method read, Method write, Object obj) {
super();
m_Descriptor = null;
m_Read = read;
m_Write = write;
m_Object = obj;
} | String function() { String result; int i; result = STR."; result += m_Elements.get(i); } return result; } } public static class PropertyContainer { protected PropertyDescriptor m_Descriptor; protected Method m_Read; protected Method m_Write; protected Object m_Object; public PropertyContainer(PropertyDescriptor desc, Object obj) { super(); m_Descriptor = desc; m_Read = null; m_Write = null; m_Object = obj; } public PropertyContainer(Method read, Method write, Object obj) { super(); m_Descriptor = null; m_Read = read; m_Write = write; m_Object = obj; } | /**
* returns the structure again as a dot-path.
*
* @return the path structure as dot-path
*/ | returns the structure again as a dot-path | toString | {
"repo_name": "automenta/adams-core",
"path": "src/main/java/adams/gui/goe/PropertyPath.java",
"license": "gpl-3.0",
"size": 16580
} | [
"java.beans.PropertyDescriptor",
"java.lang.reflect.Method"
] | import java.beans.PropertyDescriptor; import java.lang.reflect.Method; | import java.beans.*; import java.lang.reflect.*; | [
"java.beans",
"java.lang"
] | java.beans; java.lang; | 286,183 |
public LoadBalancingRuleInner withBackendAddressPool(SubResource backendAddressPool) {
this.backendAddressPool = backendAddressPool;
return this;
} | LoadBalancingRuleInner function(SubResource backendAddressPool) { this.backendAddressPool = backendAddressPool; return this; } | /**
* Set a reference to a pool of DIPs. Inbound traffic is randomly load balanced across IPs in the backend IPs.
*
* @param backendAddressPool the backendAddressPool value to set
* @return the LoadBalancingRuleInner object itself.
*/ | Set a reference to a pool of DIPs. Inbound traffic is randomly load balanced across IPs in the backend IPs | withBackendAddressPool | {
"repo_name": "selvasingh/azure-sdk-for-java",
"path": "sdk/network/mgmt-v2019_08_01/src/main/java/com/microsoft/azure/management/network/v2019_08_01/implementation/LoadBalancingRuleInner.java",
"license": "mit",
"size": 14894
} | [
"com.microsoft.azure.SubResource"
] | import com.microsoft.azure.SubResource; | import com.microsoft.azure.*; | [
"com.microsoft.azure"
] | com.microsoft.azure; | 2,664,935 |
@SuppressWarnings("static-method")
protected boolean onBusItineraryHaltRemoved(BusItineraryHalt halt, int index, BusChangeEvent event) {
return false;
} | @SuppressWarnings(STR) boolean function(BusItineraryHalt halt, int index, BusChangeEvent event) { return false; } | /** Invoked when a bus itinerary halt was removed from the attached itinerary.
*
* <p>This function exists to allow be override to provide a specific behaviour
* when a bus itinerary halt has been removed.
*
* @param halt is the removed itinerary halt.
* @param index is the index of the bus halt.
* @param event is the source of the event.
* @return <code>true</code> if the events was fired, otherwise <code>false</code>.
*/ | Invoked when a bus itinerary halt was removed from the attached itinerary. This function exists to allow be override to provide a specific behaviour when a bus itinerary halt has been removed | onBusItineraryHaltRemoved | {
"repo_name": "gallandarakhneorg/afc",
"path": "advanced/gis/gisbus/src/main/java/org/arakhne/afc/gis/bus/layer/BusItineraryLayer.java",
"license": "apache-2.0",
"size": 14034
} | [
"org.arakhne.afc.gis.bus.network.BusChangeEvent",
"org.arakhne.afc.gis.bus.network.BusItineraryHalt"
] | import org.arakhne.afc.gis.bus.network.BusChangeEvent; import org.arakhne.afc.gis.bus.network.BusItineraryHalt; | import org.arakhne.afc.gis.bus.network.*; | [
"org.arakhne.afc"
] | org.arakhne.afc; | 2,323,702 |
protected Collection<K> getAllKeys0() {
return Collections.emptyList();
}
/**
* A helper method to build a KeyValueSource implementation based on the specified {@link IMap} | Collection<K> function() { return Collections.emptyList(); } /** * A helper method to build a KeyValueSource implementation based on the specified {@link IMap} | /**
* This method is meant to be overridden to implement collecting of all clusterwide available keys
* and return them from {@link #getAllKeys()}.
*
* @return a collection of all clusterwide available keys
*/ | This method is meant to be overridden to implement collecting of all clusterwide available keys and return them from <code>#getAllKeys()</code> | getAllKeys0 | {
"repo_name": "tufangorel/hazelcast",
"path": "hazelcast/src/main/java/com/hazelcast/mapreduce/KeyValueSource.java",
"license": "apache-2.0",
"size": 8950
} | [
"com.hazelcast.core.IMap",
"java.util.Collection",
"java.util.Collections"
] | import com.hazelcast.core.IMap; import java.util.Collection; import java.util.Collections; | import com.hazelcast.core.*; import java.util.*; | [
"com.hazelcast.core",
"java.util"
] | com.hazelcast.core; java.util; | 158,858 |
@Override
public void mapBoundsChanged(MapBoundsEvent event) {
paramsLock.writeLock().lock();
try {
int type = event.getType();
if ((type & MapBoundsEvent.COORDINATE_SYSTEM_MASK) != 0) {
setFullExtent();
reset();
}
} finally {
paramsLock.writeLock().unlock();
}
} | void function(MapBoundsEvent event) { paramsLock.writeLock().lock(); try { int type = event.getType(); if ((type & MapBoundsEvent.COORDINATE_SYSTEM_MASK) != 0) { setFullExtent(); reset(); } } finally { paramsLock.writeLock().unlock(); } } | /**
* Called by the map content's viewport when its bounds have changed. Used here to watch for a
* changed CRS, in which case the map is re-displayed at full extent.
*/ | Called by the map content's viewport when its bounds have changed. Used here to watch for a changed CRS, in which case the map is re-displayed at full extent | mapBoundsChanged | {
"repo_name": "geotools/geotools",
"path": "modules/unsupported/swing/src/main/java/org/geotools/swing/AbstractMapPane.java",
"license": "lgpl-2.1",
"size": 30319
} | [
"org.geotools.map.MapBoundsEvent"
] | import org.geotools.map.MapBoundsEvent; | import org.geotools.map.*; | [
"org.geotools.map"
] | org.geotools.map; | 564,075 |
private static TypeReference meetPhiType(Instruction s) {
TypeReference result = null;
for (int i = 0; i < Phi.getNumberOfValues(s); i++) {
Operand val = Phi.getValue(s, i);
if (val instanceof UnreachableOperand) continue;
TypeReference t = val.getType();
if (t == null) {
s.scratch = FOUND_NULL_TYPE;
} else if (result == null) {
result = t;
} else {
TypeReference meet = ClassLoaderProxy.findCommonSuperclass(result, t);
if (meet == null) {
// TODO: This horrific kludge should go away once we get rid of Address.toInt()
if ((result.isIntLikeType() && (t.isReferenceType() || t.isWordLikeType())) ||
((result.isReferenceType() || result.isWordLikeType()) && t.isIntLikeType())) {
meet = TypeReference.Int;
} else if (result.isReferenceType() && t.isWordLikeType()) {
meet = t;
} else if (result.isWordLikeType() && t.isReferenceType()) {
meet = result;
}
}
if (VM.VerifyAssertions && meet == null) {
VM._assert(VM.NOT_REACHED, result + " and " + t + " meet to null");
}
result = meet;
}
}
return result;
} | static TypeReference function(Instruction s) { TypeReference result = null; for (int i = 0; i < Phi.getNumberOfValues(s); i++) { Operand val = Phi.getValue(s, i); if (val instanceof UnreachableOperand) continue; TypeReference t = val.getType(); if (t == null) { s.scratch = FOUND_NULL_TYPE; } else if (result == null) { result = t; } else { TypeReference meet = ClassLoaderProxy.findCommonSuperclass(result, t); if (meet == null) { if ((result.isIntLikeType() && (t.isReferenceType() t.isWordLikeType())) ((result.isReferenceType() result.isWordLikeType()) && t.isIntLikeType())) { meet = TypeReference.Int; } else if (result.isReferenceType() && t.isWordLikeType()) { meet = t; } else if (result.isWordLikeType() && t.isReferenceType()) { meet = result; } } if (VM.VerifyAssertions && meet == null) { VM._assert(VM.NOT_REACHED, result + STR + t + STR); } result = meet; } } return result; } | /**
* Return the meet of the types on the rhs of a phi instruction
* <p>
* SIDE EFFECT: bashes the Instruction scratch field.
*
* @param s phi instruction
*/ | Return the meet of the types on the rhs of a phi instruction | meetPhiType | {
"repo_name": "CodeOffloading/JikesRVM-CCO",
"path": "jikesrvm-3.1.3/rvm/src/org/jikesrvm/compilers/opt/ssa/EnterSSA.java",
"license": "epl-1.0",
"size": 44804
} | [
"org.jikesrvm.classloader.TypeReference",
"org.jikesrvm.compilers.opt.ClassLoaderProxy",
"org.jikesrvm.compilers.opt.ir.Instruction",
"org.jikesrvm.compilers.opt.ir.Phi",
"org.jikesrvm.compilers.opt.ir.operand.Operand",
"org.jikesrvm.compilers.opt.ir.operand.UnreachableOperand"
] | import org.jikesrvm.classloader.TypeReference; import org.jikesrvm.compilers.opt.ClassLoaderProxy; import org.jikesrvm.compilers.opt.ir.Instruction; import org.jikesrvm.compilers.opt.ir.Phi; import org.jikesrvm.compilers.opt.ir.operand.Operand; import org.jikesrvm.compilers.opt.ir.operand.UnreachableOperand; | import org.jikesrvm.classloader.*; import org.jikesrvm.compilers.opt.*; import org.jikesrvm.compilers.opt.ir.*; import org.jikesrvm.compilers.opt.ir.operand.*; | [
"org.jikesrvm.classloader",
"org.jikesrvm.compilers"
] | org.jikesrvm.classloader; org.jikesrvm.compilers; | 1,513,587 |
boolean validatePatientAwarenessAwarenessCode(DiagnosticChain diagnostics, Map<Object, Object> context);
| boolean validatePatientAwarenessAwarenessCode(DiagnosticChain diagnostics, Map<Object, Object> context); | /**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* <!-- begin-model-doc -->
* not self.awarenessCode.oclIsUndefined()
* @param diagnostics The chain of diagnostics to which problems are to be appended.
* @param context The cache of context-specific information.
* <!-- end-model-doc -->
* @model annotation="http://www.eclipse.org/uml2/1.1.0/GenModel body='not self.awarenessCode.oclIsUndefined()'"
* @generated
*/ | not self.awarenessCode.oclIsUndefined() | validatePatientAwarenessAwarenessCode | {
"repo_name": "drbgfc/mdht",
"path": "cda/deprecated/org.openhealthtools.mdht.uml.cda.ccd/src/org/openhealthtools/mdht/uml/cda/ccd/PatientAwareness.java",
"license": "epl-1.0",
"size": 3547
} | [
"java.util.Map",
"org.eclipse.emf.common.util.DiagnosticChain"
] | import java.util.Map; import org.eclipse.emf.common.util.DiagnosticChain; | import java.util.*; import org.eclipse.emf.common.util.*; | [
"java.util",
"org.eclipse.emf"
] | java.util; org.eclipse.emf; | 2,324,448 |
private int getChunkSize() throws IOException {
// skip CRLF
if (!bof) {
int cr = in.read();
int lf = in.read();
if ((cr != HTTP.CR) || (lf != HTTP.LF)) {
throw new MalformedChunkCodingException(
"CRLF expected at end of chunk");
}
}
//parse data
this.buffer.clear();
int i = this.in.readLine(this.buffer);
if (i == -1) {
return 0;
}
int separator = this.buffer.indexOf(';');
if (separator < 0) {
separator = this.buffer.length();
}
try {
return Integer.parseInt(this.buffer.substringTrimmed(0, separator), 16);
} catch (NumberFormatException e) {
throw new MalformedChunkCodingException("Bad chunk header");
}
} | int function() throws IOException { if (!bof) { int cr = in.read(); int lf = in.read(); if ((cr != HTTP.CR) (lf != HTTP.LF)) { throw new MalformedChunkCodingException( STR); } } this.buffer.clear(); int i = this.in.readLine(this.buffer); if (i == -1) { return 0; } int separator = this.buffer.indexOf(';'); if (separator < 0) { separator = this.buffer.length(); } try { return Integer.parseInt(this.buffer.substringTrimmed(0, separator), 16); } catch (NumberFormatException e) { throw new MalformedChunkCodingException(STR); } } | /**
* Expects the stream to start with a chunksize in hex with optional
* comments after a semicolon. The line must end with a CRLF: "a3; some
* comment\r\n" Positions the stream at the start of the next line.
*
* @param in The new input stream.
* @param required <tt>true<tt/> if a valid chunk must be present,
* <tt>false<tt/> otherwise.
*
* @return the chunk size as integer
*
* @throws IOException when the chunk size could not be parsed
*/ | Expects the stream to start with a chunksize in hex with optional comment\r\n" Positions the stream at the start of the next line | getChunkSize | {
"repo_name": "alinvasile/httpcore",
"path": "httpcore/src/main/java/org/apache/http/impl/io/ChunkedInputStream.java",
"license": "apache-2.0",
"size": 8958
} | [
"java.io.IOException",
"org.apache.http.MalformedChunkCodingException"
] | import java.io.IOException; import org.apache.http.MalformedChunkCodingException; | import java.io.*; import org.apache.http.*; | [
"java.io",
"org.apache.http"
] | java.io; org.apache.http; | 2,067,580 |
public Observable<ServiceResponse<Page<MetadataEntityInner>>> listNextSinglePageAsync(final String nextPageLink) {
if (nextPageLink == null) {
throw new IllegalArgumentException("Parameter nextPageLink is required and cannot be null.");
} | Observable<ServiceResponse<Page<MetadataEntityInner>>> function(final String nextPageLink) { if (nextPageLink == null) { throw new IllegalArgumentException(STR); } | /**
* Gets the list of metadata entities.
*
ServiceResponse<PageImpl<MetadataEntityInner>> * @param nextPageLink The NextLink from the previous successful call to List operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the PagedList<MetadataEntityInner> object wrapped in {@link ServiceResponse} if successful.
*/ | Gets the list of metadata entities | listNextSinglePageAsync | {
"repo_name": "selvasingh/azure-sdk-for-java",
"path": "sdk/advisor/mgmt-v2017_04_19/src/main/java/com/microsoft/azure/management/advisor/v2017_04_19/implementation/RecommendationMetadatasInner.java",
"license": "mit",
"size": 18072
} | [
"com.microsoft.azure.Page",
"com.microsoft.rest.ServiceResponse"
] | import com.microsoft.azure.Page; import com.microsoft.rest.ServiceResponse; | import com.microsoft.azure.*; import com.microsoft.rest.*; | [
"com.microsoft.azure",
"com.microsoft.rest"
] | com.microsoft.azure; com.microsoft.rest; | 2,697,832 |
TraversalDescription relationships( RelationshipType type,
Direction direction ); | TraversalDescription relationships( RelationshipType type, Direction direction ); | /**
* Adds {@code type} to the list of relationship types to traverse in
* the given {@code direction}. There's no priority or order in which
* types to traverse.
*
* @param type the {@link RelationshipType} to add to the list of types
* to traverse.
* @param direction the {@link Direction} to traverse this type of
* relationship in.
* @return a new traversal description with the new modifications.
*/ | Adds type to the list of relationship types to traverse in the given direction. There's no priority or order in which types to traverse | relationships | {
"repo_name": "eldersantos/community",
"path": "kernel/src/main/java/org/neo4j/graphdb/traversal/TraversalDescription.java",
"license": "gpl-3.0",
"size": 9424
} | [
"org.neo4j.graphdb.Direction",
"org.neo4j.graphdb.RelationshipType"
] | import org.neo4j.graphdb.Direction; import org.neo4j.graphdb.RelationshipType; | import org.neo4j.graphdb.*; | [
"org.neo4j.graphdb"
] | org.neo4j.graphdb; | 2,624,841 |
public MappingType[] getMapsForSourceRelation(String rel)
{
ArrayList<MappingType> ms = mapsForSourceRel.get(rel);
return ms.toArray(new MappingType[ms.size()]);
}
| MappingType[] function(String rel) { ArrayList<MappingType> ms = mapsForSourceRel.get(rel); return ms.toArray(new MappingType[ms.size()]); } | /**
* Retrieves all the mappings associated with a specified relation.
* BORIS: changed to use hash table lookup.
*
* @author mdangelo
*
* @param rel The name of the relation
* @return An array of mappings
*
*/ | Retrieves all the mappings associated with a specified relation | getMapsForSourceRelation | {
"repo_name": "RJMillerLab/ibench",
"path": "src/org/vagabond/benchmark/model/TrampXMLModel.java",
"license": "apache-2.0",
"size": 14920
} | [
"java.util.ArrayList",
"org.vagabond.xmlmodel.MappingType"
] | import java.util.ArrayList; import org.vagabond.xmlmodel.MappingType; | import java.util.*; import org.vagabond.xmlmodel.*; | [
"java.util",
"org.vagabond.xmlmodel"
] | java.util; org.vagabond.xmlmodel; | 248,539 |
private void addRecords()
{
try
{
output = new BufferedWriter(writer);
text.write(output);
}
catch(IOException e)
{
JOptionPane.showMessageDialog(text, "Error in output of file", "Error in output of file", JOptionPane.ERROR_MESSAGE);
System.exit(1);
}//end catch
}
}//end classs
//class TextAreaOutputTest
//{
//public static void main(String[] args) {
// SwingUtilities.invokeLater(new Runnable()
// {
// public void run()
// {
// JFrame f = new JFrame();
// f.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
// f.setSize(500,500);
// f.setVisible(true);
// JTextArea text = new JTextArea(20,20);
// JButton button = new JButton("Save");
// f.add(new JScrollPane(text),BorderLayout.CENTER);
// text.setLineWrap(true);
// f.add(button,BorderLayout.SOUTH);
// button.addActionListener(new ActionListener()
// {
// public void actionPerformed(ActionEvent evt)
// {
// File file = new com.dreamer.file.FileFunctions().getOpenFile();
// TextAreaOutput t = new TextAreaOutput(text,file);
// }
// });
// }//end run
// });
//}
| void function() { try { output = new BufferedWriter(writer); text.write(output); } catch(IOException e) { JOptionPane.showMessageDialog(text, STR, STR, JOptionPane.ERROR_MESSAGE); System.exit(1); } } } | /**
* this method reads the current text within the text area and appends it to a text file
*/ | this method reads the current text within the text area and appends it to a text file | addRecords | {
"repo_name": "gerryDreamer/dreamerLib",
"path": "src/com/dreamer/file/TextAreaOutput.java",
"license": "gpl-2.0",
"size": 6465
} | [
"java.io.BufferedWriter",
"java.io.IOException",
"javax.swing.JOptionPane"
] | import java.io.BufferedWriter; import java.io.IOException; import javax.swing.JOptionPane; | import java.io.*; import javax.swing.*; | [
"java.io",
"javax.swing"
] | java.io; javax.swing; | 605,951 |
protected void buildSurtPrefixSet() {
SurtPrefixSet newSurtPrefixes = new SurtPrefixSet();
FileReader fr = null;
// read SURTs from file, if appropriate
String sourcePath = (String)getUncheckedAttribute(null,
ATTR_SURTS_SOURCE_FILE);
if (sourcePath.length() > 0) {
File source = new File(sourcePath);
if (!source.isAbsolute()) {
source = new File(getSettingsHandler().getOrder()
.getController().getDisk(), sourcePath);
}
try {
fr = new FileReader(source);
try {
newSurtPrefixes.importFromMixed(fr, true);
} finally {
fr.close();
}
} catch (IOException e) {
e.printStackTrace();
throw new RuntimeException(e);
}
}
// interpret seeds as surts, if appropriate
boolean deduceFromSeeds = ((Boolean)getUncheckedAttribute(null,
ATTR_SEEDS_AS_SURT_PREFIXES)).booleanValue();
if(deduceFromSeeds) {
try {
fr = new FileReader(getSeedfile());
try {
newSurtPrefixes.importFromMixed(fr, deduceFromSeeds);
} finally {
fr.close();
}
} catch (IOException e) {
e.printStackTrace();
throw new RuntimeException(e);
}
}
surtPrefixes = newSurtPrefixes;
} | void function() { SurtPrefixSet newSurtPrefixes = new SurtPrefixSet(); FileReader fr = null; String sourcePath = (String)getUncheckedAttribute(null, ATTR_SURTS_SOURCE_FILE); if (sourcePath.length() > 0) { File source = new File(sourcePath); if (!source.isAbsolute()) { source = new File(getSettingsHandler().getOrder() .getController().getDisk(), sourcePath); } try { fr = new FileReader(source); try { newSurtPrefixes.importFromMixed(fr, true); } finally { fr.close(); } } catch (IOException e) { e.printStackTrace(); throw new RuntimeException(e); } } boolean deduceFromSeeds = ((Boolean)getUncheckedAttribute(null, ATTR_SEEDS_AS_SURT_PREFIXES)).booleanValue(); if(deduceFromSeeds) { try { fr = new FileReader(getSeedfile()); try { newSurtPrefixes.importFromMixed(fr, deduceFromSeeds); } finally { fr.close(); } } catch (IOException e) { e.printStackTrace(); throw new RuntimeException(e); } } surtPrefixes = newSurtPrefixes; } | /**
* Construct the set of prefixes to use, from the seed list (
* which may include both URIs and '+'-prefixed directives).
*/ | Construct the set of prefixes to use, from the seed list ( which may include both URIs and '+'-prefixed directives) | buildSurtPrefixSet | {
"repo_name": "gaowangyizu/myHeritrix",
"path": "myHeritrix/src/org/archive/crawler/deciderules/SurtPrefixedDecideRule.java",
"license": "apache-2.0",
"size": 10559
} | [
"java.io.File",
"java.io.FileReader",
"java.io.IOException",
"org.archive.util.SurtPrefixSet"
] | import java.io.File; import java.io.FileReader; import java.io.IOException; import org.archive.util.SurtPrefixSet; | import java.io.*; import org.archive.util.*; | [
"java.io",
"org.archive.util"
] | java.io; org.archive.util; | 318,519 |
@Test
@TestOrder(4)
@Category(UITest.class)
public void test4PerfomSomeTestsWithSelection() {
FIBBrowserWidget<?, Object> w = (FIBBrowserWidget<?, Object>) controller.viewForComponent(browser);
w.resetSelection();
w.addToSelection(family);
// The selection is here empty because iterator class has been declared as Person, Family is not a Person, therefore the selection
// is null
assertEquals(Collections.emptyList(), w.getSelection());
w.resetSelection();
w.addToSelection(family.getChildren().get(0));
assertEquals(Collections.singletonList(family.getChildren().get(0)), w.getSelection());
// int[] indices = new int[3];
// indices[0] = 1;
// indices[1] = 2;
// indices[2] = 4;
// w7.getDynamicJComponent().setSelectedIndices(indices);
Person child1 = family.getChildren().get(1);
Person child2 = family.getChildren().get(2);
Person child4 = family.getChildren().get(4);
w.resetSelection();
w.addToSelection(child1);
w.addToSelection(child2);
w.addToSelection(child4);
List<Person> expectedSelection = new ArrayList<>();
expectedSelection.add(child1);
expectedSelection.add(child2);
expectedSelection.add(child4);
assertEquals(expectedSelection, w.getSelection());
controller.setFocusedWidget(w);
assertEquals(expectedSelection, controller.getSelectionLeader().getSelection());
} | @TestOrder(4) @Category(UITest.class) void function() { FIBBrowserWidget<?, Object> w = (FIBBrowserWidget<?, Object>) controller.viewForComponent(browser); w.resetSelection(); w.addToSelection(family); assertEquals(Collections.emptyList(), w.getSelection()); w.resetSelection(); w.addToSelection(family.getChildren().get(0)); assertEquals(Collections.singletonList(family.getChildren().get(0)), w.getSelection()); Person child1 = family.getChildren().get(1); Person child2 = family.getChildren().get(2); Person child4 = family.getChildren().get(4); w.resetSelection(); w.addToSelection(child1); w.addToSelection(child2); w.addToSelection(child4); List<Person> expectedSelection = new ArrayList<>(); expectedSelection.add(child1); expectedSelection.add(child2); expectedSelection.add(child4); assertEquals(expectedSelection, w.getSelection()); controller.setFocusedWidget(w); assertEquals(expectedSelection, controller.getSelectionLeader().getSelection()); } | /**
* Try to select some objects, check that selection is in sync with it
*/ | Try to select some objects, check that selection is in sync with it | test4PerfomSomeTestsWithSelection | {
"repo_name": "openflexo-team/gina",
"path": "gina-swing/src/test/java/org/openflexo/gina/swing/utils/swing/FIBBrowserWidgetTest.java",
"license": "gpl-3.0",
"size": 9550
} | [
"java.util.ArrayList",
"java.util.Collections",
"java.util.List",
"org.junit.Assert",
"org.junit.experimental.categories.Category",
"org.openflexo.gina.sampleData.Person",
"org.openflexo.gina.view.widget.FIBBrowserWidget",
"org.openflexo.test.TestOrder",
"org.openflexo.test.UITest"
] | import java.util.ArrayList; import java.util.Collections; import java.util.List; import org.junit.Assert; import org.junit.experimental.categories.Category; import org.openflexo.gina.sampleData.Person; import org.openflexo.gina.view.widget.FIBBrowserWidget; import org.openflexo.test.TestOrder; import org.openflexo.test.UITest; | import java.util.*; import org.junit.*; import org.junit.experimental.categories.*; import org.openflexo.gina.*; import org.openflexo.gina.view.widget.*; import org.openflexo.test.*; | [
"java.util",
"org.junit",
"org.junit.experimental",
"org.openflexo.gina",
"org.openflexo.test"
] | java.util; org.junit; org.junit.experimental; org.openflexo.gina; org.openflexo.test; | 2,414,230 |
static void show(Context context, UsbSerialPort port) {
sPort = port;
final Intent intent = new Intent(context, SentinelActivity.class);
intent.addFlags(Intent.FLAG_ACTIVITY_SINGLE_TOP | Intent.FLAG_ACTIVITY_NO_HISTORY);
context.startActivity(intent);
} | static void show(Context context, UsbSerialPort port) { sPort = port; final Intent intent = new Intent(context, SentinelActivity.class); intent.addFlags(Intent.FLAG_ACTIVITY_SINGLE_TOP Intent.FLAG_ACTIVITY_NO_HISTORY); context.startActivity(intent); } | /**
* Starts the activity, using the supplied driver instance.
*
* @param context the Context
* @param port the USB Serial port
*/ | Starts the activity, using the supplied driver instance | show | {
"repo_name": "gibsjose/RFCxSentinel",
"path": "Sentinel/src/main/java/org/rfcx/sentinel/SentinelActivity.java",
"license": "lgpl-2.1",
"size": 6059
} | [
"android.content.Context",
"android.content.Intent",
"com.hoho.android.usbserial.driver.UsbSerialPort"
] | import android.content.Context; import android.content.Intent; import com.hoho.android.usbserial.driver.UsbSerialPort; | import android.content.*; import com.hoho.android.usbserial.driver.*; | [
"android.content",
"com.hoho.android"
] | android.content; com.hoho.android; | 2,371,276 |
public MapColor getMapColor(IBlockState state)
{
BlockPlanks.EnumType blockplanks$enumtype = (BlockPlanks.EnumType)state.getValue(VARIANT);
switch ((BlockLog.EnumAxis)state.getValue(LOG_AXIS))
{
case X:
case Z:
case NONE:
default:
switch (blockplanks$enumtype)
{
case OAK:
default:
return BlockPlanks.EnumType.SPRUCE.func_181070_c();
case SPRUCE:
return BlockPlanks.EnumType.DARK_OAK.func_181070_c();
case BIRCH:
return MapColor.quartzColor;
case JUNGLE:
return BlockPlanks.EnumType.SPRUCE.func_181070_c();
}
case Y:
return blockplanks$enumtype.func_181070_c();
}
} | MapColor function(IBlockState state) { BlockPlanks.EnumType blockplanks$enumtype = (BlockPlanks.EnumType)state.getValue(VARIANT); switch ((BlockLog.EnumAxis)state.getValue(LOG_AXIS)) { case X: case Z: case NONE: default: switch (blockplanks$enumtype) { case OAK: default: return BlockPlanks.EnumType.SPRUCE.func_181070_c(); case SPRUCE: return BlockPlanks.EnumType.DARK_OAK.func_181070_c(); case BIRCH: return MapColor.quartzColor; case JUNGLE: return BlockPlanks.EnumType.SPRUCE.func_181070_c(); } case Y: return blockplanks$enumtype.func_181070_c(); } } | /**
* Get the MapColor for this Block and the given BlockState
*/ | Get the MapColor for this Block and the given BlockState | getMapColor | {
"repo_name": "SkidJava/BaseClient",
"path": "new_1.8.8/net/minecraft/block/BlockOldLog.java",
"license": "gpl-2.0",
"size": 4701
} | [
"net.minecraft.block.material.MapColor",
"net.minecraft.block.state.IBlockState"
] | import net.minecraft.block.material.MapColor; import net.minecraft.block.state.IBlockState; | import net.minecraft.block.material.*; import net.minecraft.block.state.*; | [
"net.minecraft.block"
] | net.minecraft.block; | 2,597,062 |
public void close() throws IOException
{
_stream.close();
} | void function() throws IOException { _stream.close(); } | /**
* Closes the stream.
*/ | Closes the stream | close | {
"repo_name": "moriyoshi/quercus-gae",
"path": "src/main/java/com/caucho/vfs/StreamImplOutputStream.java",
"license": "gpl-2.0",
"size": 2187
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 2,622,392 |
jToolBar1 = new javax.swing.JToolBar();
jButton1 = new javax.swing.JButton();
jButton2 = new javax.swing.JButton();
jButton3 = new javax.swing.JButton();
jPanel1 = new javax.swing.JPanel();
jScrollPane2 = new javax.swing.JScrollPane();
String[] colunas = {"ID", "Nome", "CPF", "Email"};
List<Cliente> clientes = (new ClienteService()).listaCliente();
modelo = new ClienteModelo(colunas, clientes);
jTable2 = new javax.swing.JTable();
setClosable(true);
setDefaultCloseOperation(javax.swing.WindowConstants.DISPOSE_ON_CLOSE);
setIconifiable(true);
setMaximizable(true);
setTitle("Menu de Clientes");
jToolBar1.setBackground(new java.awt.Color(255, 255, 255));
jToolBar1.setRollover(true); | jToolBar1 = new javax.swing.JToolBar(); jButton1 = new javax.swing.JButton(); jButton2 = new javax.swing.JButton(); jButton3 = new javax.swing.JButton(); jPanel1 = new javax.swing.JPanel(); jScrollPane2 = new javax.swing.JScrollPane(); String[] colunas = {"ID", "Nome", "CPF", "Email"}; List<Cliente> clientes = (new ClienteService()).listaCliente(); modelo = new ClienteModelo(colunas, clientes); jTable2 = new javax.swing.JTable(); setClosable(true); setDefaultCloseOperation(javax.swing.WindowConstants.DISPOSE_ON_CLOSE); setIconifiable(true); setMaximizable(true); setTitle(STR); jToolBar1.setBackground(new java.awt.Color(255, 255, 255)); jToolBar1.setRollover(true); | /**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/ | This method is called from within the constructor to initialize the form. regenerated by the Form Editor | initComponents | {
"repo_name": "kstkelvin/crud-tema-iv",
"path": "src/view/ClienteFrame.java",
"license": "epl-1.0",
"size": 19612
} | [
"java.util.List"
] | import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 1,508,960 |
public ClusterSchema findClusterSchema( String name ) {
for ( int i = 0; i < clusterSchemas.size(); i++ ) {
ClusterSchema schema = clusterSchemas.get( i );
if ( schema.getName().equalsIgnoreCase( name ) ) {
return schema;
}
}
return null;
} | ClusterSchema function( String name ) { for ( int i = 0; i < clusterSchemas.size(); i++ ) { ClusterSchema schema = clusterSchemas.get( i ); if ( schema.getName().equalsIgnoreCase( name ) ) { return schema; } } return null; } | /**
* Find a clustering schema using its name.
*
* @param name
* The name of the clustering schema to look for.
* @return the cluster schema with the specified name of null if nothing was found
*/ | Find a clustering schema using its name | findClusterSchema | {
"repo_name": "Advent51/pentaho-kettle",
"path": "engine/src/main/java/org/pentaho/di/trans/TransMeta.java",
"license": "apache-2.0",
"size": 225587
} | [
"org.pentaho.di.cluster.ClusterSchema"
] | import org.pentaho.di.cluster.ClusterSchema; | import org.pentaho.di.cluster.*; | [
"org.pentaho.di"
] | org.pentaho.di; | 2,713,055 |
@TargetApi(Build.VERSION_CODES.HONEYCOMB_MR2)
public void showProgress(final boolean show) {
// On Honeycomb MR2 we have the ViewPropertyAnimator APIs, which allow
// for very easy animations. If available, use these APIs to fade-in
// the progress spinner.
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB_MR2) {
int shortAnimTime = getResources().getInteger(android.R.integer.config_shortAnimTime); | @TargetApi(Build.VERSION_CODES.HONEYCOMB_MR2) void function(final boolean show) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB_MR2) { int shortAnimTime = getResources().getInteger(android.R.integer.config_shortAnimTime); | /**
* Shows the progress UI and hides the login form.
*/ | Shows the progress UI and hides the login form | showProgress | {
"repo_name": "khanrizwan/TravisGradleDemo",
"path": "app/src/main/java/in/rizwankhan/demo/LoginActivity.java",
"license": "apache-2.0",
"size": 10408
} | [
"android.annotation.TargetApi",
"android.os.Build"
] | import android.annotation.TargetApi; import android.os.Build; | import android.annotation.*; import android.os.*; | [
"android.annotation",
"android.os"
] | android.annotation; android.os; | 241,854 |
private CommandLineCompilerConfiguration
getBaseCompilerConfiguration(final Map<String, TargetInfo> targets) {
//
// find first target with an gcc or bcc compilation
//
CommandLineCompilerConfiguration compilerConfig = null;
//
// get the first target and assume that it is representative
//
Iterator<TargetInfo> targetIter = targets.values().iterator();
while (targetIter.hasNext()) {
TargetInfo targetInfo = targetIter.next();
ProcessorConfiguration config = targetInfo.getConfiguration();
String identifier = config.getIdentifier();
//
// for the first gcc or bcc compiler
//
if (config instanceof CommandLineCompilerConfiguration) {
compilerConfig = (CommandLineCompilerConfiguration) config;
if (compilerConfig.getCompiler() instanceof GccCCompiler ||
compilerConfig.getCompiler() instanceof BorlandCCompiler) {
return compilerConfig;
}
}
}
return null;
} | CommandLineCompilerConfiguration function(final Map<String, TargetInfo> targets) { while (targetIter.hasNext()) { TargetInfo targetInfo = targetIter.next(); ProcessorConfiguration config = targetInfo.getConfiguration(); String identifier = config.getIdentifier(); compilerConfig = (CommandLineCompilerConfiguration) config; if (compilerConfig.getCompiler() instanceof GccCCompiler compilerConfig.getCompiler() instanceof BorlandCCompiler) { return compilerConfig; } } } return null; } | /**
* Gets the first recognized compiler from the
* compilation targets.
* @param targets compilation targets
* @return representative (hopefully) compiler configuration
*/ | Gets the first recognized compiler from the compilation targets | getBaseCompilerConfiguration | {
"repo_name": "muzare/nar-maven-plugin",
"path": "src/main/java/com/github/maven_nar/cpptasks/borland/CBuilderXProjectWriter.java",
"license": "apache-2.0",
"size": 20369
} | [
"com.github.maven_nar.cpptasks.TargetInfo",
"com.github.maven_nar.cpptasks.compiler.CommandLineCompilerConfiguration",
"com.github.maven_nar.cpptasks.compiler.ProcessorConfiguration",
"com.github.maven_nar.cpptasks.gcc.GccCCompiler",
"java.util.Map"
] | import com.github.maven_nar.cpptasks.TargetInfo; import com.github.maven_nar.cpptasks.compiler.CommandLineCompilerConfiguration; import com.github.maven_nar.cpptasks.compiler.ProcessorConfiguration; import com.github.maven_nar.cpptasks.gcc.GccCCompiler; import java.util.Map; | import com.github.maven_nar.cpptasks.*; import com.github.maven_nar.cpptasks.compiler.*; import com.github.maven_nar.cpptasks.gcc.*; import java.util.*; | [
"com.github.maven_nar",
"java.util"
] | com.github.maven_nar; java.util; | 213,807 |
List<TdSmOrganization> orgList = null;
try {
orgList = executor.queryList(TdSmOrganization.class, "tdSmOrgAllData");
} catch (Exception e) {
logger.error("get organization list error", e);
}
return orgList;
} | List<TdSmOrganization> orgList = null; try { orgList = executor.queryList(TdSmOrganization.class, STR); } catch (Exception e) { logger.error(STR, e); } return orgList; } | /**
* get all organization information
* @return List<TdSmOrganization>
*/ | get all organization information | findOrgList | {
"repo_name": "tzou24/BPS",
"path": "BPS/src/com/sany/masterdata/hr/dao/TdSmOrganizationDao.java",
"license": "apache-2.0",
"size": 2972
} | [
"com.sany.masterdata.hr.entity.TdSmOrganization",
"java.util.List"
] | import com.sany.masterdata.hr.entity.TdSmOrganization; import java.util.List; | import com.sany.masterdata.hr.entity.*; import java.util.*; | [
"com.sany.masterdata",
"java.util"
] | com.sany.masterdata; java.util; | 1,511,878 |
public static void join( StringBuffer buffer, Collection collection, String delim )
{
join( buffer, collection, delim, false );
} | static void function( StringBuffer buffer, Collection collection, String delim ) { join( buffer, collection, delim, false ); } | /**
* This method joins each value in the collection with the given delimiter. All results are appended to the
* given {@link StringBuffer} instance.
*
* @param buffer
* @param collection
* @param delim
*/ | This method joins each value in the collection with the given delimiter. All results are appended to the given <code>StringBuffer</code> instance | join | {
"repo_name": "codeaudit/semantic-vectors-lucene-tools",
"path": "src/main/java/io/seldon/util/CollectionUtils.java",
"license": "apache-2.0",
"size": 4763
} | [
"java.util.Collection"
] | import java.util.Collection; | import java.util.*; | [
"java.util"
] | java.util; | 1,753,578 |
boolean hasEnchant(Enchantment ench); | boolean hasEnchant(Enchantment ench); | /**
* Checks for existence of the specified enchantment.
*
* @param ench enchantment to check
* @return true if this enchantment exists for this meta
*/ | Checks for existence of the specified enchantment | hasEnchant | {
"repo_name": "MrDiamond123/Spigot-API",
"path": "src/main/java/org/bukkit/inventory/meta/ItemMeta.java",
"license": "gpl-3.0",
"size": 4163
} | [
"org.bukkit.enchantments.Enchantment"
] | import org.bukkit.enchantments.Enchantment; | import org.bukkit.enchantments.*; | [
"org.bukkit.enchantments"
] | org.bukkit.enchantments; | 1,508,420 |
public List<C> findAllChildren() {
List<String> objects = getChildrenList();
List<C> children = new ArrayList<C>();
try {
Model dummy = (Model) childClazz.newInstance();
dummy.setContext(context);
for (String id : objects) {
children.add((C) dummy.find(id));
}
} catch (IllegalAccessException e) {
e.printStackTrace();
} catch (InstantiationException e) {
e.printStackTrace();
}
return children;
} | List<C> function() { List<String> objects = getChildrenList(); List<C> children = new ArrayList<C>(); try { Model dummy = (Model) childClazz.newInstance(); dummy.setContext(context); for (String id : objects) { children.add((C) dummy.find(id)); } } catch (IllegalAccessException e) { e.printStackTrace(); } catch (InstantiationException e) { e.printStackTrace(); } return children; } | /**
* Find all objects from the child list.
*
* TODO: Figure out how to make this accesible without...
* creating a dummy instance.
*
* @throws com.mauriciogiordano.easydb.exception.NoContextFoundException in case of null context.
* @return A list of all children.
*/ | Find all objects from the child list. creating a dummy instance | findAllChildren | {
"repo_name": "mauriciogior/android-easy-db",
"path": "src/main/java/com/mauriciogiordano/easydb/bean/HasManyModel.java",
"license": "gpl-2.0",
"size": 6549
} | [
"java.util.ArrayList",
"java.util.List"
] | import java.util.ArrayList; import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 1,055,743 |
private void writePositions(@NonNull XmlSerializer serializer)
throws IOException, IllegalArgumentException, IllegalStateException {
try (Cursor cursor = db.getPositions()) {
serializer.startTag(null, "trkseg");
while (cursor.moveToNext()) {
serializer.startTag(null, "trkpt");
serializer.attribute(null, "lat", DbAccess.getLatitude(cursor));
serializer.attribute(null, "lon", DbAccess.getLongitude(cursor));
if (DbAccess.hasAltitude(cursor)) {
writeTag(serializer, "ele", DbAccess.getAltitude(cursor));
}
writeTag(serializer, "time", DbAccess.getTimeISO8601(cursor));
writeTag(serializer, "name", DbAccess.getID(cursor));
if (DbAccess.hasComment(cursor)) {
writeTag(serializer, "desc", DbAccess.getComment(cursor));
}
// ulogger extensions (accuracy, speed, bearing, provider)
serializer.startTag(null, "extensions");
if (DbAccess.hasAccuracy(cursor)) {
writeTag(serializer, "accuracy", DbAccess.getAccuracy(cursor), ns_ulogger);
}
if (DbAccess.hasSpeed(cursor)) {
writeTag(serializer, "speed", DbAccess.getSpeed(cursor), ns_ulogger);
}
if (DbAccess.hasBearing(cursor)) {
writeTag(serializer, "bearing", DbAccess.getBearing(cursor), ns_ulogger);
}
if (DbAccess.hasProvider(cursor)) {
writeTag(serializer, "provider", DbAccess.getProvider(cursor), ns_ulogger);
}
serializer.endTag(null, "extensions");
serializer.endTag(null, "trkpt");
}
serializer.endTag(null, "trkseg");
}
} | void function(@NonNull XmlSerializer serializer) throws IOException, IllegalArgumentException, IllegalStateException { try (Cursor cursor = db.getPositions()) { serializer.startTag(null, STR); while (cursor.moveToNext()) { serializer.startTag(null, "trkpt"); serializer.attribute(null, "lat", DbAccess.getLatitude(cursor)); serializer.attribute(null, "lon", DbAccess.getLongitude(cursor)); if (DbAccess.hasAltitude(cursor)) { writeTag(serializer, "ele", DbAccess.getAltitude(cursor)); } writeTag(serializer, "time", DbAccess.getTimeISO8601(cursor)); writeTag(serializer, "name", DbAccess.getID(cursor)); if (DbAccess.hasComment(cursor)) { writeTag(serializer, "desc", DbAccess.getComment(cursor)); } serializer.startTag(null, STR); if (DbAccess.hasAccuracy(cursor)) { writeTag(serializer, STR, DbAccess.getAccuracy(cursor), ns_ulogger); } if (DbAccess.hasSpeed(cursor)) { writeTag(serializer, "speed", DbAccess.getSpeed(cursor), ns_ulogger); } if (DbAccess.hasBearing(cursor)) { writeTag(serializer, STR, DbAccess.getBearing(cursor), ns_ulogger); } if (DbAccess.hasProvider(cursor)) { writeTag(serializer, STR, DbAccess.getProvider(cursor), ns_ulogger); } serializer.endTag(null, STR); serializer.endTag(null, "trkpt"); } serializer.endTag(null, STR); } } | /**
* Write <trkseg> tag
*
* @param serializer XmlSerializer
* @throws IOException IO exception
* @throws IllegalArgumentException Xml illegal argument
* @throws IllegalStateException Xml illegal state
*/ | Write tag | writePositions | {
"repo_name": "bfabiszewski/ulogger-android",
"path": "app/src/main/java/net/fabiszewski/ulogger/GpxExportTask.java",
"license": "gpl-3.0",
"size": 10326
} | [
"android.database.Cursor",
"androidx.annotation.NonNull",
"java.io.IOException",
"org.xmlpull.v1.XmlSerializer"
] | import android.database.Cursor; import androidx.annotation.NonNull; import java.io.IOException; import org.xmlpull.v1.XmlSerializer; | import android.database.*; import androidx.annotation.*; import java.io.*; import org.xmlpull.v1.*; | [
"android.database",
"androidx.annotation",
"java.io",
"org.xmlpull.v1"
] | android.database; androidx.annotation; java.io; org.xmlpull.v1; | 2,099,034 |
@Deprecated
public void setMarketDataSpecificationRepository(NamedMarketDataSpecificationRepository marketDataSpecificationRepository) {
this._marketDataSpecificationRepository = marketDataSpecificationRepository;
} | void function(NamedMarketDataSpecificationRepository marketDataSpecificationRepository) { this._marketDataSpecificationRepository = marketDataSpecificationRepository; } | /**
* Sets for looking up market data provider specifications by name.
*
* @deprecated use liveMarketDataProviderFactory
* @param marketDataSpecificationRepository the new value of the property
*/ | Sets for looking up market data provider specifications by name | setMarketDataSpecificationRepository | {
"repo_name": "McLeodMoores/starling",
"path": "projects/component-rest/src/main/java/com/opengamma/component/factory/web/WebsiteBasicsComponentFactory.java",
"license": "apache-2.0",
"size": 73331
} | [
"com.opengamma.engine.marketdata.NamedMarketDataSpecificationRepository"
] | import com.opengamma.engine.marketdata.NamedMarketDataSpecificationRepository; | import com.opengamma.engine.marketdata.*; | [
"com.opengamma.engine"
] | com.opengamma.engine; | 394,330 |
BgpVrfAfs bgpVrfAfs();
interface BgpVrfBuilder { | BgpVrfAfs bgpVrfAfs(); interface BgpVrfBuilder { | /**
* Returns the attribute bgpVrfAfs.
*
* @return value of bgpVrfAfs
*/ | Returns the attribute bgpVrfAfs | bgpVrfAfs | {
"repo_name": "mengmoya/onos",
"path": "apps/l3vpn/nel3vpn/nemgr/src/main/java/org/onosproject/yang/gen/v1/ne/bgpcomm/rev20141225/nebgpcomm/bgpcomm/bgpvrfs/BgpVrf.java",
"license": "apache-2.0",
"size": 2118
} | [
"org.onosproject.yang.gen.v1.ne.bgpcomm.rev20141225.nebgpcomm.bgpcomm.bgpvrfs.bgpvrf.BgpVrfAfs"
] | import org.onosproject.yang.gen.v1.ne.bgpcomm.rev20141225.nebgpcomm.bgpcomm.bgpvrfs.bgpvrf.BgpVrfAfs; | import org.onosproject.yang.gen.v1.ne.bgpcomm.rev20141225.nebgpcomm.bgpcomm.bgpvrfs.bgpvrf.*; | [
"org.onosproject.yang"
] | org.onosproject.yang; | 2,795,909 |
@ApiModelProperty(example = "null", value = "Amount information")
public Float getAmount() {
return amount;
} | @ApiModelProperty(example = "null", value = STR) Float function() { return amount; } | /**
* Amount information
* @return amount
**/ | Amount information | getAmount | {
"repo_name": "SDU-Software-Engineering/opn",
"path": "ws/BankingJava/src/main/java/dk/sdu/mmmi/opn/swaggerbank/model/DepositOrWithdrawDTO.java",
"license": "gpl-3.0",
"size": 3938
} | [
"io.swagger.annotations.ApiModelProperty"
] | import io.swagger.annotations.ApiModelProperty; | import io.swagger.annotations.*; | [
"io.swagger.annotations"
] | io.swagger.annotations; | 895,414 |
public static Set<String> authorityListToSet(Collection<? extends GrantedAuthority> userAuthorities) {
Set<String> set = new HashSet<String>(userAuthorities.size());
for (GrantedAuthority authority: userAuthorities) {
set.add(authority.getAuthority());
}
return set;
}
| static Set<String> function(Collection<? extends GrantedAuthority> userAuthorities) { Set<String> set = new HashSet<String>(userAuthorities.size()); for (GrantedAuthority authority: userAuthorities) { set.add(authority.getAuthority()); } return set; } | /**
* Converts an array of GrantedAuthority objects to a Set.
* @return a Set of the Strings obtained from each call to GrantedAuthority.getAuthority()
*/ | Converts an array of GrantedAuthority objects to a Set | authorityListToSet | {
"repo_name": "xushaomin/apple-security",
"path": "apple-security-auth/src/main/java/com/appleframework/security/auth/utils/AuthorityUtils.java",
"license": "apache-2.0",
"size": 2042
} | [
"com.appleframework.security.core.auth.GrantedAuthority",
"java.util.Collection",
"java.util.HashSet",
"java.util.Set"
] | import com.appleframework.security.core.auth.GrantedAuthority; import java.util.Collection; import java.util.HashSet; import java.util.Set; | import com.appleframework.security.core.auth.*; import java.util.*; | [
"com.appleframework.security",
"java.util"
] | com.appleframework.security; java.util; | 2,262,790 |
protected void handleContentOverflow(int contentCacheLimit) {
}
private class ContentCachingInputStream extends ServletInputStream {
private final ServletInputStream is;
private boolean overflow = false;
public ContentCachingInputStream(ServletInputStream is) {
this.is = is;
} | void function(int contentCacheLimit) { } private class ContentCachingInputStream extends ServletInputStream { private final ServletInputStream is; private boolean overflow = false; public ContentCachingInputStream(ServletInputStream is) { this.is = is; } | /**
* Template method for handling a content overflow: specifically, a request
* body being read that exceeds the specified content cache limit.
* <p>The default implementation is empty. Subclasses may override this to
* throw a payload-too-large exception or the like.
* @param contentCacheLimit the maximum number of bytes to cache per request
* which has just been exceeded
* @since 4.3.6
* @see #ContentCachingRequestWrapper(HttpServletRequest, int)
*/ | Template method for handling a content overflow: specifically, a request body being read that exceeds the specified content cache limit. The default implementation is empty. Subclasses may override this to throw a payload-too-large exception or the like | handleContentOverflow | {
"repo_name": "lamsfoundation/lams",
"path": "3rdParty_sources/spring/org/springframework/web/util/ContentCachingRequestWrapper.java",
"license": "gpl-2.0",
"size": 7016
} | [
"javax.servlet.ServletInputStream"
] | import javax.servlet.ServletInputStream; | import javax.servlet.*; | [
"javax.servlet"
] | javax.servlet; | 1,254,445 |
@Test
public void testAsn1BitString_00C465() {
Asn1BitString testObject = new Asn1BitString(HexString.toByteArray("030300C465"));
assertEquals(16 , testObject.getNumberOfBits());
assertEquals(true , testObject.getBit(0));
assertEquals(false, testObject.getBit(1));
assertEquals(true , testObject.getBit(2));
assertEquals(false, testObject.getBit(3));
assertEquals(false, testObject.getBit(4));
assertEquals(true , testObject.getBit(5));
assertEquals(true , testObject.getBit(6));
assertEquals(false, testObject.getBit(7));
assertEquals(false, testObject.getBit(8));
assertEquals(false, testObject.getBit(9));
assertEquals(true , testObject.getBit(10));
assertEquals(false, testObject.getBit(11));
assertEquals(false, testObject.getBit(12));
assertEquals(false, testObject.getBit(13));
assertEquals(true , testObject.getBit(14));
assertEquals(true , testObject.getBit(15));
}
| void function() { Asn1BitString testObject = new Asn1BitString(HexString.toByteArray(STR)); assertEquals(16 , testObject.getNumberOfBits()); assertEquals(true , testObject.getBit(0)); assertEquals(false, testObject.getBit(1)); assertEquals(true , testObject.getBit(2)); assertEquals(false, testObject.getBit(3)); assertEquals(false, testObject.getBit(4)); assertEquals(true , testObject.getBit(5)); assertEquals(true , testObject.getBit(6)); assertEquals(false, testObject.getBit(7)); assertEquals(false, testObject.getBit(8)); assertEquals(false, testObject.getBit(9)); assertEquals(true , testObject.getBit(10)); assertEquals(false, testObject.getBit(11)); assertEquals(false, testObject.getBit(12)); assertEquals(false, testObject.getBit(13)); assertEquals(true , testObject.getBit(14)); assertEquals(true , testObject.getBit(15)); } | /**
* positive test: two bytes
*/ | positive test: two bytes | testAsn1BitString_00C465 | {
"repo_name": "PersoSim/de.persosim.simulator",
"path": "de.persosim.simulator.test/src/de/persosim/simulator/tlv/Asn1BitStringTest.java",
"license": "gpl-3.0",
"size": 4721
} | [
"de.persosim.simulator.utils.HexString",
"org.junit.Assert"
] | import de.persosim.simulator.utils.HexString; import org.junit.Assert; | import de.persosim.simulator.utils.*; import org.junit.*; | [
"de.persosim.simulator",
"org.junit"
] | de.persosim.simulator; org.junit; | 2,510,040 |
protected boolean performIteration(int iteration,
boolean stageIntermediateClassifier, Configuration conf)
throws IOException, DistributedWekaException {
String jobName = environmentSubstitute(getJobName());
if (jobName.lastIndexOf(" - ") > 0) {
jobName = jobName.substring(0, jobName.lastIndexOf(" - "));
}
// WekaClassifierMapTask tmpMap = new WekaClassifierMapTask();
// String classifierPlusOptions = "";
// try {
// tmpMap.setOptions(Utils.splitOptions(getClassifierMapTaskOptions()));
// Classifier c = tmpMap.getClassifier();
// classifierPlusOptions = c.getClass().getName();
// if (c instanceof OptionHandler) {
// classifierPlusOptions += " "
// + Utils.joinOptions(((OptionHandler) c).getOptions());
// }
// } catch (Exception ex) {
// throw new DistributedWekaException(ex);
// }
// add the aggregated ARFF header to the distributed cache
String pathToHeader =
environmentSubstitute(m_arffHeaderJob.getAggregatedHeaderPath());
if (conf == null) {
conf = new Configuration();
}
HDFSUtils.addFileToDistributedCache(m_mrConfig.getHDFSConfig(), conf,
pathToHeader, m_env);
String fileNameOnly =
pathToHeader.substring(pathToHeader.lastIndexOf("/") + 1,
pathToHeader.length());
List<String> classifierMapOptions = new ArrayList<String>();
classifierMapOptions.add("-arff-header");
classifierMapOptions.add(fileNameOnly);
if (!DistributedJobConfig.isEmpty(getClassAttribute())) {
classifierMapOptions.add("-class");
classifierMapOptions.add(environmentSubstitute(getClassAttribute()));
}
if (!DistributedJobConfig.isEmpty(getPathToPreconstructedFilter())) {
String filterFilenameOnly = handlePreconstructedFilter(conf);
classifierMapOptions.add("-preconstructed-filter");
classifierMapOptions.add(filterFilenameOnly);
}
if (iteration > 0) {
classifierMapOptions.add("-continue-training-updateable");
if (stageIntermediateClassifier) {
// Add the model from the previous iteration to the
// distributed cache. Need to first copy it to a staging
// directory (since it will be in our output directory
// and will be deleted when we clean output before launching)
stageIntermediateClassifier(conf);
}
classifierMapOptions.add("-model-file-name");
classifierMapOptions.add(environmentSubstitute(getModelFileName()));
}
if (!DistributedJobConfig.isEmpty(getClassifierMapTaskOptions())) {
try {
String cmo = environmentSubstitute(getClassifierMapTaskOptions());
String[] parts = Utils.splitOptions(cmo);
for (String p : parts) {
classifierMapOptions.add(p);
}
} catch (Exception ex) {
throw new DistributedWekaException(ex);
}
}
m_mrConfig.setUserSuppliedProperty(
WekaClassifierHadoopMapper.CLASSIFIER_MAP_TASK_OPTIONS,
environmentSubstitute(Utils.joinOptions(classifierMapOptions
.toArray(new String[classifierMapOptions.size()]))));
setJobName(jobName
+ " - iteration: "
+ (iteration + 1)
+ " "
+ Utils.joinOptions(classifierMapOptions
.toArray(new String[classifierMapOptions.size()])));
// Need these for row parsing via open-csv
m_mrConfig.setUserSuppliedProperty(
CSVToArffHeaderHadoopMapper.CSV_TO_ARFF_HEADER_MAP_TASK_OPTIONS,
environmentSubstitute(getCSVMapTaskOptions()));
// install the weka libraries and any user-selected packages
// to HDFS and add to the distributed cache/classpath for
// the job
if (iteration == 0) {
installWekaLibrariesInHDFS(conf);
} else {
addWekaLibrariesToClasspath(conf);
addWekaPackageLibrariesToClasspath(
determinePackageJars(getAdditionalWekaPackageNames(m_mrConfig), true),
conf);
}
Job job = null;
try {
job = m_mrConfig.configureForHadoop(getJobName(), conf, m_env);
} catch (ClassNotFoundException e) {
throw new DistributedWekaException(e);
}
cleanOutputDirectory(job);
statusMessage("Submitting iteration " + (iteration + 1) + " of job: "
+ getJobName());
logMessage("Submitting iteration " + (iteration + 1) + " of job: "
+ getJobName());
boolean success = runJob(job);
if (!success) {
statusMessage("Weka classifier job failed - check logs on Hadoop");
logMessage("Weka classifier job failed - check logs on Hadoop");
}
return success;
} | boolean function(int iteration, boolean stageIntermediateClassifier, Configuration conf) throws IOException, DistributedWekaException { String jobName = environmentSubstitute(getJobName()); if (jobName.lastIndexOf(STR) > 0) { jobName = jobName.substring(0, jobName.lastIndexOf(STR)); } String pathToHeader = environmentSubstitute(m_arffHeaderJob.getAggregatedHeaderPath()); if (conf == null) { conf = new Configuration(); } HDFSUtils.addFileToDistributedCache(m_mrConfig.getHDFSConfig(), conf, pathToHeader, m_env); String fileNameOnly = pathToHeader.substring(pathToHeader.lastIndexOf("/") + 1, pathToHeader.length()); List<String> classifierMapOptions = new ArrayList<String>(); classifierMapOptions.add(STR); classifierMapOptions.add(fileNameOnly); if (!DistributedJobConfig.isEmpty(getClassAttribute())) { classifierMapOptions.add(STR); classifierMapOptions.add(environmentSubstitute(getClassAttribute())); } if (!DistributedJobConfig.isEmpty(getPathToPreconstructedFilter())) { String filterFilenameOnly = handlePreconstructedFilter(conf); classifierMapOptions.add(STR); classifierMapOptions.add(filterFilenameOnly); } if (iteration > 0) { classifierMapOptions.add(STR); if (stageIntermediateClassifier) { stageIntermediateClassifier(conf); } classifierMapOptions.add(STR); classifierMapOptions.add(environmentSubstitute(getModelFileName())); } if (!DistributedJobConfig.isEmpty(getClassifierMapTaskOptions())) { try { String cmo = environmentSubstitute(getClassifierMapTaskOptions()); String[] parts = Utils.splitOptions(cmo); for (String p : parts) { classifierMapOptions.add(p); } } catch (Exception ex) { throw new DistributedWekaException(ex); } } m_mrConfig.setUserSuppliedProperty( WekaClassifierHadoopMapper.CLASSIFIER_MAP_TASK_OPTIONS, environmentSubstitute(Utils.joinOptions(classifierMapOptions .toArray(new String[classifierMapOptions.size()])))); setJobName(jobName + STR + (iteration + 1) + " " + Utils.joinOptions(classifierMapOptions .toArray(new String[classifierMapOptions.size()]))); m_mrConfig.setUserSuppliedProperty( CSVToArffHeaderHadoopMapper.CSV_TO_ARFF_HEADER_MAP_TASK_OPTIONS, environmentSubstitute(getCSVMapTaskOptions())); if (iteration == 0) { installWekaLibrariesInHDFS(conf); } else { addWekaLibrariesToClasspath(conf); addWekaPackageLibrariesToClasspath( determinePackageJars(getAdditionalWekaPackageNames(m_mrConfig), true), conf); } Job job = null; try { job = m_mrConfig.configureForHadoop(getJobName(), conf, m_env); } catch (ClassNotFoundException e) { throw new DistributedWekaException(e); } cleanOutputDirectory(job); statusMessage(STR + (iteration + 1) + STR + getJobName()); logMessage(STR + (iteration + 1) + STR + getJobName()); boolean success = runJob(job); if (!success) { statusMessage(STR); logMessage(STR); } return success; } | /**
* Perform an iteration of the model building phase
*
* @param iteration the iteration to perform
* @param stageIntermediateClassifier true if the intermediate classifier from
* the last iteration should be pushed out to the nodes via the
* distributed cache
* @param conf the Configuration of the job
* @return true if the job succeeds
* @throws IOException if a problem occurs
* @throws DistributedWekaException if a problem occurs
*/ | Perform an iteration of the model building phase | performIteration | {
"repo_name": "mydzigear/weka.kmeanspp.silhouette_score",
"path": "wekafiles/packages/distributedWekaHadoopCore/src/main/java/weka/distributed/hadoop/WekaClassifierHadoopJob.java",
"license": "gpl-3.0",
"size": 37154
} | [
"java.io.IOException",
"java.util.ArrayList",
"java.util.List",
"org.apache.hadoop.conf.Configuration",
"org.apache.hadoop.mapreduce.Job"
] | import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.Job; | import java.io.*; import java.util.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.mapreduce.*; | [
"java.io",
"java.util",
"org.apache.hadoop"
] | java.io; java.util; org.apache.hadoop; | 1,992,288 |
public Observable<ServiceResponse<SyncMemberInner>> createOrUpdateWithServiceResponseAsync(String resourceGroupName, String serverName, String databaseName, String syncGroupName, String syncMemberName, SyncMemberInner parameters) {
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (serverName == null) {
throw new IllegalArgumentException("Parameter serverName is required and cannot be null.");
}
if (databaseName == null) {
throw new IllegalArgumentException("Parameter databaseName is required and cannot be null.");
}
if (syncGroupName == null) {
throw new IllegalArgumentException("Parameter syncGroupName is required and cannot be null.");
}
if (syncMemberName == null) {
throw new IllegalArgumentException("Parameter syncMemberName is required and cannot be null.");
}
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (parameters == null) {
throw new IllegalArgumentException("Parameter parameters is required and cannot be null.");
}
if (this.client.apiVersion() == null) {
throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null.");
}
Validator.validate(parameters);
Observable<Response<ResponseBody>> observable = service.createOrUpdate(resourceGroupName, serverName, databaseName, syncGroupName, syncMemberName, this.client.subscriptionId(), parameters, this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent());
return client.getAzureClient().getPutOrPatchResultAsync(observable, new TypeToken<SyncMemberInner>() { }.getType());
} | Observable<ServiceResponse<SyncMemberInner>> function(String resourceGroupName, String serverName, String databaseName, String syncGroupName, String syncMemberName, SyncMemberInner parameters) { if (resourceGroupName == null) { throw new IllegalArgumentException(STR); } if (serverName == null) { throw new IllegalArgumentException(STR); } if (databaseName == null) { throw new IllegalArgumentException(STR); } if (syncGroupName == null) { throw new IllegalArgumentException(STR); } if (syncMemberName == null) { throw new IllegalArgumentException(STR); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException(STR); } if (parameters == null) { throw new IllegalArgumentException(STR); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException(STR); } Validator.validate(parameters); Observable<Response<ResponseBody>> observable = service.createOrUpdate(resourceGroupName, serverName, databaseName, syncGroupName, syncMemberName, this.client.subscriptionId(), parameters, this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent()); return client.getAzureClient().getPutOrPatchResultAsync(observable, new TypeToken<SyncMemberInner>() { }.getType()); } | /**
* Creates or updates a sync member.
*
* @param resourceGroupName The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
* @param serverName The name of the server.
* @param databaseName The name of the database on which the sync group is hosted.
* @param syncGroupName The name of the sync group on which the sync member is hosted.
* @param syncMemberName The name of the sync member.
* @param parameters The requested sync member resource state.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable for the request
*/ | Creates or updates a sync member | createOrUpdateWithServiceResponseAsync | {
"repo_name": "selvasingh/azure-sdk-for-java",
"path": "sdk/sql/mgmt-v2015_05_01_preview/src/main/java/com/microsoft/azure/management/sql/v2015_05_01_preview/implementation/SyncMembersInner.java",
"license": "mit",
"size": 104025
} | [
"com.google.common.reflect.TypeToken",
"com.microsoft.rest.ServiceResponse",
"com.microsoft.rest.Validator"
] | import com.google.common.reflect.TypeToken; import com.microsoft.rest.ServiceResponse; import com.microsoft.rest.Validator; | import com.google.common.reflect.*; import com.microsoft.rest.*; | [
"com.google.common",
"com.microsoft.rest"
] | com.google.common; com.microsoft.rest; | 141,930 |
public void waitTableDisabled(byte[] table, long timeoutMillis)
throws InterruptedException, IOException {
waitTableDisabled(getHBaseAdmin(), table, timeoutMillis);
} | void function(byte[] table, long timeoutMillis) throws InterruptedException, IOException { waitTableDisabled(getHBaseAdmin(), table, timeoutMillis); } | /**
* Waits for a table to be 'disabled'. Disabled means that table is set as 'disabled'
* @see #waitTableAvailable(byte[])
* @param table Table to wait on.
* @param timeoutMillis Time to wait on it being marked disabled.
* @throws InterruptedException
* @throws IOException
*/ | Waits for a table to be 'disabled'. Disabled means that table is set as 'disabled' | waitTableDisabled | {
"repo_name": "toshimasa-nasu/hbase",
"path": "hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java",
"license": "apache-2.0",
"size": 134883
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 1,981,162 |
public static <T> String stringImg(T[] v) {
if(v == null) return "null";
StringBuffer buffer = new StringBuffer();
Arrays.sort(v, new UComp<T>());
for(int i=0; i<v.length; i++) {
buffer.append(v[i]);
buffer.append("\n");
}
return buffer.toString();
} | static <T> String function(T[] v) { if(v == null) return "null"; StringBuffer buffer = new StringBuffer(); Arrays.sort(v, new UComp<T>()); for(int i=0; i<v.length; i++) { buffer.append(v[i]); buffer.append("\n"); } return buffer.toString(); } | /** Returns a string representation of all elements from an array,
in increasing lexicographic order of their string
representations. */ | Returns a string representation of all elements from an array | stringImg | {
"repo_name": "protegeproject/jpaul",
"path": "src/main/java/jpaul/Misc/Debug.java",
"license": "bsd-3-clause",
"size": 1865
} | [
"java.util.Arrays"
] | import java.util.Arrays; | import java.util.*; | [
"java.util"
] | java.util; | 579,756 |
public static int getMonth(Date date) {
// This is zero-based (Why?.. I don't know.)
return dateToCalendar(date).get(Calendar.MONTH) + 1;
} | static int function(Date date) { return dateToCalendar(date).get(Calendar.MONTH) + 1; } | /**
* The month of the year.
*
* @param date The date
*
* @return The month of the year, in the range 1-12
*/ | The month of the year | getMonth | {
"repo_name": "tcmoore32/sheer-madness",
"path": "gosu-core-api/src/main/java/gw/date/GosuDateUtil.java",
"license": "apache-2.0",
"size": 7501
} | [
"java.util.Calendar",
"java.util.Date"
] | import java.util.Calendar; import java.util.Date; | import java.util.*; | [
"java.util"
] | java.util; | 1,768,427 |
public void setDateOfComment(Date dateOfComment) {
this.dateOfComment = dateOfComment;
}
| void function(Date dateOfComment) { this.dateOfComment = dateOfComment; } | /**
* Setter for attribute 'dateOfComment'.
* @param dateOfComment
* new value for 'dateOfComment '
*/ | Setter for attribute 'dateOfComment' | setDateOfComment | {
"repo_name": "doanduyhai/killrvideo-java",
"path": "src/main/java/killrvideo/entity/CommentsByVideo.java",
"license": "apache-2.0",
"size": 5319
} | [
"java.util.Date"
] | import java.util.Date; | import java.util.*; | [
"java.util"
] | java.util; | 1,380 |
public DateTimeField secondOfMinute() {
return UnsupportedDateTimeField.getInstance(DateTimeFieldType.secondOfMinute(), seconds());
} | DateTimeField function() { return UnsupportedDateTimeField.getInstance(DateTimeFieldType.secondOfMinute(), seconds()); } | /**
* Get the second of minute field for this chronology.
*
* @return DateTimeField or UnsupportedDateTimeField if unsupported
*/ | Get the second of minute field for this chronology | secondOfMinute | {
"repo_name": "aparo/scalajs-joda",
"path": "src/main/scala/org/joda/time/chrono/BaseChronology.java",
"license": "apache-2.0",
"size": 24594
} | [
"org.joda.time.DateTimeField",
"org.joda.time.DateTimeFieldType",
"org.joda.time.field.UnsupportedDateTimeField"
] | import org.joda.time.DateTimeField; import org.joda.time.DateTimeFieldType; import org.joda.time.field.UnsupportedDateTimeField; | import org.joda.time.*; import org.joda.time.field.*; | [
"org.joda.time"
] | org.joda.time; | 1,469,643 |
public Collection<ForeignKeyConstraint> getForeignKeys() {
return Collections.unmodifiableCollection(foreignKeys.values());
} | Collection<ForeignKeyConstraint> function() { return Collections.unmodifiableCollection(foreignKeys.values()); } | /**
* Get the foreign keys associated with this table
*
* @return
*/ | Get the foreign keys associated with this table | getForeignKeys | {
"repo_name": "pellcorp/schemaspy",
"path": "src/main/java/net/sourceforge/schemaspy/model/Table.java",
"license": "lgpl-2.1",
"size": 40712
} | [
"java.util.Collection",
"java.util.Collections"
] | import java.util.Collection; import java.util.Collections; | import java.util.*; | [
"java.util"
] | java.util; | 1,262,276 |
@SuppressLint("NewApi")
public static void tryAccessibilityAnnounce(View view, CharSequence text) {
if (isJellybeanOrLater() && view != null && text != null) {
view.announceForAccessibility(text);
}
} | @SuppressLint(STR) static void function(View view, CharSequence text) { if (isJellybeanOrLater() && view != null && text != null) { view.announceForAccessibility(text); } } | /**
* Try to speak the specified text, for accessibility. Only available on JB or later.
* @param text Text to announce.
*/ | Try to speak the specified text, for accessibility. Only available on JB or later | tryAccessibilityAnnounce | {
"repo_name": "KouChengjian/CamelCrown_Map",
"path": "CamelCrown/CamelCrown/src/mirko/android/datetimepicker/Utils.java",
"license": "gpl-2.0",
"size": 4955
} | [
"android.annotation.SuppressLint",
"android.view.View"
] | import android.annotation.SuppressLint; import android.view.View; | import android.annotation.*; import android.view.*; | [
"android.annotation",
"android.view"
] | android.annotation; android.view; | 30,814 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.