method
stringlengths 13
441k
| clean_method
stringlengths 7
313k
| doc
stringlengths 17
17.3k
| comment
stringlengths 3
1.42k
| method_name
stringlengths 1
273
| extra
dict | imports
sequence | imports_info
stringlengths 19
34.8k
| cluster_imports_info
stringlengths 15
3.66k
| libraries
sequence | libraries_info
stringlengths 6
661
| id
int64 0
2.92M
|
---|---|---|---|---|---|---|---|---|---|---|---|
public boolean isAbstractSelfOrSuperDelegation() {
return !isSelfDelegation() ||
extending instanceof Constructor && ((Constructor)extending).isAbstract();
} | boolean function() { return !isSelfDelegation() extending instanceof Constructor && ((Constructor)extending).isAbstract(); } | /**
* true if this delegation is delegating to a superclass initializer or constructor,
* or is delegating to an abstract constructor of the same class
* @return
*/ | true if this delegation is delegating to a superclass initializer or constructor, or is delegating to an abstract constructor of the same class | isAbstractSelfOrSuperDelegation | {
"repo_name": "ceylon/ceylon",
"path": "compiler-java/src/org/eclipse/ceylon/compiler/java/codegen/CtorDelegation.java",
"license": "apache-2.0",
"size": 5665
} | [
"org.eclipse.ceylon.model.typechecker.model.Constructor"
] | import org.eclipse.ceylon.model.typechecker.model.Constructor; | import org.eclipse.ceylon.model.typechecker.model.*; | [
"org.eclipse.ceylon"
] | org.eclipse.ceylon; | 535,522 |
private void removeAllowedValue(TypeDescription td, AllowedValue av) {
td.setAllowedValues((AllowedValue[]) Utility.removeEqualElementFromArray(td.getAllowedValues(),
av, AllowedValue.class));
} | void function(TypeDescription td, AllowedValue av) { td.setAllowedValues((AllowedValue[]) Utility.removeEqualElementFromArray(td.getAllowedValues(), av, AllowedValue.class)); } | /**
* Removes the allowed value.
*
* @param td
* - local or merged (2 callers)
* @param av
* the av
*/ | Removes the allowed value | removeAllowedValue | {
"repo_name": "apache/uima-uimaj",
"path": "uimaj-ep-configurator/src/main/java/org/apache/uima/taeconfigurator/editors/ui/TypeSection.java",
"license": "apache-2.0",
"size": 67505
} | [
"org.apache.uima.resource.metadata.AllowedValue",
"org.apache.uima.resource.metadata.TypeDescription"
] | import org.apache.uima.resource.metadata.AllowedValue; import org.apache.uima.resource.metadata.TypeDescription; | import org.apache.uima.resource.metadata.*; | [
"org.apache.uima"
] | org.apache.uima; | 1,936,434 |
public static Double rubyToPig(RubyFloat rbObject) {
return rbObject.getDoubleValue();
} | static Double function(RubyFloat rbObject) { return rbObject.getDoubleValue(); } | /**
* A type specific conversion routine.
*
* @param rbObject object to convert
* @return analogous Pig type
*/ | A type specific conversion routine | rubyToPig | {
"repo_name": "ljl1988com/pig",
"path": "src/org/apache/pig/scripting/jruby/PigJrubyLibrary.java",
"license": "apache-2.0",
"size": 17340
} | [
"org.jruby.RubyFloat"
] | import org.jruby.RubyFloat; | import org.jruby.*; | [
"org.jruby"
] | org.jruby; | 1,315,160 |
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<Response<Flux<ByteBuffer>>> updateByIdWithResponseAsync(
String resourceId, String apiVersion, GenericResourceInner parameters, Context context) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (resourceId == null) {
return Mono.error(new IllegalArgumentException("Parameter resourceId is required and cannot be null."));
}
if (apiVersion == null) {
return Mono.error(new IllegalArgumentException("Parameter apiVersion is required and cannot be null."));
}
if (parameters == null) {
return Mono.error(new IllegalArgumentException("Parameter parameters is required and cannot be null."));
} else {
parameters.validate();
}
final String accept = "application/json";
context = this.client.mergeContext(context);
return service.updateById(this.client.getEndpoint(), resourceId, apiVersion, parameters, accept, context);
} | @ServiceMethod(returns = ReturnType.SINGLE) Mono<Response<Flux<ByteBuffer>>> function( String resourceId, String apiVersion, GenericResourceInner parameters, Context context) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( STR)); } if (resourceId == null) { return Mono.error(new IllegalArgumentException(STR)); } if (apiVersion == null) { return Mono.error(new IllegalArgumentException(STR)); } if (parameters == null) { return Mono.error(new IllegalArgumentException(STR)); } else { parameters.validate(); } final String accept = STR; context = this.client.mergeContext(context); return service.updateById(this.client.getEndpoint(), resourceId, apiVersion, parameters, accept, context); } | /**
* Updates a resource by ID.
*
* @param resourceId The fully qualified ID of the resource, including the resource name and resource type. Use the
* format,
* /subscriptions/{guid}/resourceGroups/{resource-group-name}/{resource-provider-namespace}/{resource-type}/{resource-name}.
* @param apiVersion The API version to use for the operation.
* @param parameters Update resource parameters.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return resource information along with {@link Response} on successful completion of {@link Mono}.
*/ | Updates a resource by ID | updateByIdWithResponseAsync | {
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/resourcemanager/azure-resourcemanager-resources/src/main/java/com/azure/resourcemanager/resources/implementation/ResourcesClientImpl.java",
"license": "mit",
"size": 230225
} | [
"com.azure.core.annotation.ReturnType",
"com.azure.core.annotation.ServiceMethod",
"com.azure.core.http.rest.Response",
"com.azure.core.util.Context",
"com.azure.resourcemanager.resources.fluent.models.GenericResourceInner",
"java.nio.ByteBuffer"
] | import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.http.rest.Response; import com.azure.core.util.Context; import com.azure.resourcemanager.resources.fluent.models.GenericResourceInner; import java.nio.ByteBuffer; | import com.azure.core.annotation.*; import com.azure.core.http.rest.*; import com.azure.core.util.*; import com.azure.resourcemanager.resources.fluent.models.*; import java.nio.*; | [
"com.azure.core",
"com.azure.resourcemanager",
"java.nio"
] | com.azure.core; com.azure.resourcemanager; java.nio; | 573,617 |
public List<CountryDTO> findAll(); | List<CountryDTO> function(); | /**
* get all the countrys.
* @return the list of entities
*/ | get all the countrys | findAll | {
"repo_name": "sandor-balazs/nosql-java",
"path": "mongodb/src/main/java/com/github/sandor_balazs/nosql_java/service/CountryService.java",
"license": "bsd-2-clause",
"size": 773
} | [
"com.github.sandor_balazs.nosql_java.web.rest.dto.CountryDTO",
"java.util.List"
] | import com.github.sandor_balazs.nosql_java.web.rest.dto.CountryDTO; import java.util.List; | import com.github.sandor_balazs.nosql_java.web.rest.dto.*; import java.util.*; | [
"com.github.sandor_balazs",
"java.util"
] | com.github.sandor_balazs; java.util; | 1,342,966 |
public EpollSocketChannelConfig setTcpFastOpenConnect(boolean fastOpenConnect) {
try {
channel.socket.setTcpFastOpenConnect(fastOpenConnect);
return this;
} catch (IOException e) {
throw new ChannelException(e);
}
} | EpollSocketChannelConfig function(boolean fastOpenConnect) { try { channel.socket.setTcpFastOpenConnect(fastOpenConnect); return this; } catch (IOException e) { throw new ChannelException(e); } } | /**
* Set the {@code TCP_FASTOPEN_CONNECT} option on the socket. Requires Linux kernel 4.11 or later.
* See
* <a href="https://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git/commit/?id=19f6d3f3">this commit</a>
* for more details.
*/ | Set the TCP_FASTOPEN_CONNECT option on the socket. Requires Linux kernel 4.11 or later. See this commit for more details | setTcpFastOpenConnect | {
"repo_name": "ngocdaothanh/netty",
"path": "transport-native-epoll/src/main/java/io/netty/channel/epoll/EpollSocketChannelConfig.java",
"license": "apache-2.0",
"size": 20852
} | [
"io.netty.channel.ChannelException",
"java.io.IOException"
] | import io.netty.channel.ChannelException; import java.io.IOException; | import io.netty.channel.*; import java.io.*; | [
"io.netty.channel",
"java.io"
] | io.netty.channel; java.io; | 2,903,918 |
public static String findPropertyOrDie(Properties properties
, DataStore<?, ?> store, String baseKey) throws IOException {
String val = findProperty(properties, store, baseKey, null);
if(val == null) {
throw new IOException("Property with base name \""+baseKey+"\" could not be found, make " +
"sure to include this property in gora.properties file");
}
return val;
} | static String function(Properties properties , DataStore<?, ?> store, String baseKey) throws IOException { String val = findProperty(properties, store, baseKey, null); if(val == null) { throw new IOException(STRSTR\STR + STR); } return val; } | /**
* Tries to find a property with the given baseKey. First the property
* key constructed as "gora.<classname>.<baseKey>" is searched.
* If not found, the property keys for all superclasses is recursively
* tested. Lastly, the property key constructed as
* "gora.datastore.<baseKey>" is searched.
* @return the first found value, or throws IOException
*/ | Tries to find a property with the given baseKey. First the property key constructed as "gora.<classname>.<baseKey>" is searched. If not found, the property keys for all superclasses is recursively tested. Lastly, the property key constructed as "gora.datastore.<baseKey>" is searched | findPropertyOrDie | {
"repo_name": "enis/gora",
"path": "gora-core/src/main/java/org/gora/store/DataStoreFactory.java",
"license": "apache-2.0",
"size": 10884
} | [
"java.io.IOException",
"java.util.Properties"
] | import java.io.IOException; import java.util.Properties; | import java.io.*; import java.util.*; | [
"java.io",
"java.util"
] | java.io; java.util; | 1,885,760 |
public File createNewDataFile(final InputStream stream,
final String url,
final String digest,
final String hint)
throws IOException {
// check if we already have this data
if ( digest != null ) {
synchronized ( this.digestCache ) {
final CacheEntry storedDigest = this.digestCache.get(url);
if ( storedDigest != null && storedDigest.digest.equals(digest) ) {
return storedDigest.file;
}
}
}
final int pos = url.lastIndexOf('/');
final String name = url.substring(pos + 1);
final String filename = (hint == null ? "rsrc" : hint) + '-' + name + '-' + getNextSerialNumber() + ".ser";
//replace special characters from the filename that are not allowed by the OS
final String filename2 = filename.replaceAll("[\\*\"/\\\\\\[\\]\\:\\;\\|\\=\\,]+", "_"); // Windows
final File file = this.getDataFile(filename2);
this.copyToLocalStorage(stream, file);
if ( digest != null ) {
synchronized ( this.digestCache ) {
this.digestCache.put(url, new CacheEntry(file, digest));
}
}
return file;
} | File function(final InputStream stream, final String url, final String digest, final String hint) throws IOException { if ( digest != null ) { synchronized ( this.digestCache ) { final CacheEntry storedDigest = this.digestCache.get(url); if ( storedDigest != null && storedDigest.digest.equals(digest) ) { return storedDigest.file; } } } final int pos = url.lastIndexOf('/'); final String name = url.substring(pos + 1); final String filename = (hint == null ? "rsrc" : hint) + '-' + name + '-' + getNextSerialNumber() + ".ser"; final String filename2 = filename.replaceAll("[\\*\"/\\\\\\[\\]\\:\\;\\ \\=\\,]+STR_"); final File file = this.getDataFile(filename2); this.copyToLocalStorage(stream, file); if ( digest != null ) { synchronized ( this.digestCache ) { this.digestCache.put(url, new CacheEntry(file, digest)); } } return file; } | /**
* Create a new unique data file.
*/ | Create a new unique data file | createNewDataFile | {
"repo_name": "Nimco/sling",
"path": "installer/core/src/main/java/org/apache/sling/installer/core/impl/FileDataStore.java",
"license": "apache-2.0",
"size": 9846
} | [
"java.io.File",
"java.io.IOException",
"java.io.InputStream"
] | import java.io.File; import java.io.IOException; import java.io.InputStream; | import java.io.*; | [
"java.io"
] | java.io; | 1,010,764 |
@Override
public void write(int oneChar) throws IOException {
synchronized (lock) {
checkStatus();
CharBuffer chars = CharBuffer.wrap(new char[] { (char) oneChar });
convert(chars);
}
} | void function(int oneChar) throws IOException { synchronized (lock) { checkStatus(); CharBuffer chars = CharBuffer.wrap(new char[] { (char) oneChar }); convert(chars); } } | /**
* Writes out the character <code>oneChar</code> to this Writer. The
* low-order 2 bytes are immediately converted to bytes by the character
* converter and stored in a local buffer. If the buffer becomes full as a
* result of this write, this Writer is flushed.
*
* @param oneChar
* the character to write
*
* @throws IOException
* If this OutputStreamWriter has already been closed or some
* other IOException occurs.
*/ | Writes out the character <code>oneChar</code> to this Writer. The low-order 2 bytes are immediately converted to bytes by the character converter and stored in a local buffer. If the buffer becomes full as a result of this write, this Writer is flushed | write | {
"repo_name": "freeVM/freeVM",
"path": "enhanced/archive/classlib/java6/modules/luni/src/main/java/java/io/OutputStreamWriter.java",
"license": "apache-2.0",
"size": 11174
} | [
"java.nio.CharBuffer"
] | import java.nio.CharBuffer; | import java.nio.*; | [
"java.nio"
] | java.nio; | 619,967 |
private void addExternalComponents(QueryDescription query) {
if (isEnableExternalSearch && !query.isSearchBySpace()) {
for (ExternalSPConfigVO extServerCfg : this.externalServers) {
// Loop on each directory in order to add all the external components
List<String> filteredComponents = extServerCfg.getComponents();
if (filteredComponents != null && !filteredComponents.isEmpty()) {
browseExternalServerDirectory(query, extServerCfg);
}
}
}
} | void function(QueryDescription query) { if (isEnableExternalSearch && !query.isSearchBySpace()) { for (ExternalSPConfigVO extServerCfg : this.externalServers) { List<String> filteredComponents = extServerCfg.getComponents(); if (filteredComponents != null && !filteredComponents.isEmpty()) { browseExternalServerDirectory(query, extServerCfg); } } } } | /**
* Main method to add external components to a query description object
*
* @param query the query description used to build Lucene query
*/ | Main method to add external components to a query description object | addExternalComponents | {
"repo_name": "NicolasEYSSERIC/Silverpeas-Core",
"path": "war-core/src/main/java/com/stratelia/silverpeas/pdcPeas/control/PdcSearchSessionController.java",
"license": "agpl-3.0",
"size": 112160
} | [
"com.stratelia.silverpeas.pdcPeas.vo.ExternalSPConfigVO",
"java.util.List",
"org.silverpeas.search.searchEngine.model.QueryDescription"
] | import com.stratelia.silverpeas.pdcPeas.vo.ExternalSPConfigVO; import java.util.List; import org.silverpeas.search.searchEngine.model.QueryDescription; | import com.stratelia.silverpeas.*; import java.util.*; import org.silverpeas.search.*; | [
"com.stratelia.silverpeas",
"java.util",
"org.silverpeas.search"
] | com.stratelia.silverpeas; java.util; org.silverpeas.search; | 621,742 |
public InputStream getTemplate() throws IOException{
//System.out.println("[] get template");
// ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
ClassLoader classLoader = this.getClass().getClassLoader();
URL url = classLoader.getResource("/htdocs/template.html");
return url.openConnection().getInputStream();
} | InputStream function() throws IOException{ ClassLoader classLoader = this.getClass().getClassLoader(); URL url = classLoader.getResource(STR); return url.openConnection().getInputStream(); } | /** gets an html template which is the content of the pages written to the browser
* @throws IOException if an exception is thrown
* @return the template
*/ | gets an html template which is the content of the pages written to the browser | getTemplate | {
"repo_name": "apache/openejb",
"path": "server/openejb-webadmin/src/main/java/org/apache/openejb/webadmin/WebAdminBean.java",
"license": "apache-2.0",
"size": 18553
} | [
"java.io.IOException",
"java.io.InputStream"
] | import java.io.IOException; import java.io.InputStream; | import java.io.*; | [
"java.io"
] | java.io; | 467,228 |
static boolean performAction(ActionEvent e){
ActionRouter.getInstance().doActionNow(new ActionEvent(e.getSource(), e.getID(), ActionNames.CHECK_DIRTY));
GuiPackage guiPackage = GuiPackage.getInstance();
if (guiPackage.isDirty()) {
int response;
if ((response=JOptionPane.showConfirmDialog(GuiPackage.getInstance().getMainFrame(),
JMeterUtils.getResString("cancel_new_to_save"), // $NON-NLS-1$
JMeterUtils.getResString("save?"), // $NON-NLS-1$
JOptionPane.YES_NO_CANCEL_OPTION,
JOptionPane.QUESTION_MESSAGE)) == JOptionPane.YES_OPTION) {
ActionRouter.getInstance().doActionNow(new ActionEvent(e.getSource(), e.getID(), ActionNames.SAVE));
}
if (response == JOptionPane.CLOSED_OPTION || response == JOptionPane.CANCEL_OPTION) {
return false; // Don't clear the plan
}
}
ActionRouter.getInstance().doActionNow(new ActionEvent(e.getSource(), e.getID(), ActionNames.STOP_THREAD));
closeProject(e);
return true;
}
| static boolean performAction(ActionEvent e){ ActionRouter.getInstance().doActionNow(new ActionEvent(e.getSource(), e.getID(), ActionNames.CHECK_DIRTY)); GuiPackage guiPackage = GuiPackage.getInstance(); if (guiPackage.isDirty()) { int response; if ((response=JOptionPane.showConfirmDialog(GuiPackage.getInstance().getMainFrame(), JMeterUtils.getResString(STR), JMeterUtils.getResString("save?"), JOptionPane.YES_NO_CANCEL_OPTION, JOptionPane.QUESTION_MESSAGE)) == JOptionPane.YES_OPTION) { ActionRouter.getInstance().doActionNow(new ActionEvent(e.getSource(), e.getID(), ActionNames.SAVE)); } if (response == JOptionPane.CLOSED_OPTION response == JOptionPane.CANCEL_OPTION) { return false; } } ActionRouter.getInstance().doActionNow(new ActionEvent(e.getSource(), e.getID(), ActionNames.STOP_THREAD)); closeProject(e); return true; } | /**
* Helper routine to allow action to be shared by LOAD.
*
* @param e event
* @return true if Close was not cancelled
*/ | Helper routine to allow action to be shared by LOAD | performAction | {
"repo_name": "botelhojp/apache-jmeter-2.10",
"path": "src/core/org/apache/jmeter/gui/action/Close.java",
"license": "apache-2.0",
"size": 3732
} | [
"java.awt.event.ActionEvent",
"javax.swing.JOptionPane",
"org.apache.jmeter.gui.GuiPackage",
"org.apache.jmeter.util.JMeterUtils"
] | import java.awt.event.ActionEvent; import javax.swing.JOptionPane; import org.apache.jmeter.gui.GuiPackage; import org.apache.jmeter.util.JMeterUtils; | import java.awt.event.*; import javax.swing.*; import org.apache.jmeter.gui.*; import org.apache.jmeter.util.*; | [
"java.awt",
"javax.swing",
"org.apache.jmeter"
] | java.awt; javax.swing; org.apache.jmeter; | 2,652,105 |
public void removeModule(int courseId, int moduleId) {
String courseName = this.amadeusFacade.getCourse(courseId).getName();
String moduleName = this.amadeusFacade.getModule(moduleId).getNome();
String sms = this.createMessage("mobile.course.deletedModule", courseName);
sms = sms + this.truncate(moduleName, 130-sms.length());
java.util.Date date = new java.util.Date(System.currentTimeMillis());
NoticeMobile notice = new NoticeMobile("Remoção de Módulo", sms, moduleId, courseId, 1, false, -1, date);
RemoveModuleThread thread = new RemoveModuleThread(notice, courseName);
this.threads.add(thread);
thread.start();
}
| void function(int courseId, int moduleId) { String courseName = this.amadeusFacade.getCourse(courseId).getName(); String moduleName = this.amadeusFacade.getModule(moduleId).getNome(); String sms = this.createMessage(STR, courseName); sms = sms + this.truncate(moduleName, 130-sms.length()); java.util.Date date = new java.util.Date(System.currentTimeMillis()); NoticeMobile notice = new NoticeMobile(STR, sms, moduleId, courseId, 1, false, -1, date); RemoveModuleThread thread = new RemoveModuleThread(notice, courseName); this.threads.add(thread); thread.start(); } | /**
* Method that Removes a Module in a Course
* @param courseId - Course Id
* @param moduleId - Module Id
*/ | Method that Removes a Module in a Course | removeModule | {
"repo_name": "ProjetoAmadeus/AmadeusLMS",
"path": "src/br/ufpe/cin/amadeus/amadeus_mobile/sms/Receiver.java",
"license": "gpl-2.0",
"size": 14523
} | [
"br.ufpe.cin.amadeus.amadeus_mobile.basics.NoticeMobile"
] | import br.ufpe.cin.amadeus.amadeus_mobile.basics.NoticeMobile; | import br.ufpe.cin.amadeus.amadeus_mobile.basics.*; | [
"br.ufpe.cin"
] | br.ufpe.cin; | 382,680 |
public byte[][] createShares(byte[] secret, int shares, int threshold, Random rnd) {
if (secret == null)
throw new IllegalArgumentException("null secret");
int m = secret.length;
if (m == 0)
throw new IllegalArgumentException("invalid secret length: 0");
if (m > MAX_SECRET_BYTES)
throw new IllegalArgumentException("invalid secret length: " + m + "(gt " + MAX_SECRET_BYTES + " bytes)");
if (shares < 1)
throw new IllegalArgumentException("not enought shares: " + shares);
if (shares > MAX_SHARES)
throw new IllegalArgumentException("too many shares: " + shares + "(gt" + MAX_SHARES + ")");
if (threshold > shares)
throw new IllegalArgumentException("threshold > shares: " + threshold + " > " + shares);
byte[][] share = new byte[shares][m + 1];
for (int i = 0; i < shares; i++)
share[i][0] = (byte) (i + 1);
byte[] a = null;
try {
a = new byte[threshold];
for (int i = 0; i < m; i++) {
rnd.nextBytes(a);
a[0] = secret[i];
for (int j = 0; j < shares; j++)
share[j][i + 1] = (byte) eval(share[j][0], a);
}
} finally {
if (a != null)
Arrays.fill(a, (byte) 0);
}
return share;
} | byte[][] function(byte[] secret, int shares, int threshold, Random rnd) { if (secret == null) throw new IllegalArgumentException(STR); int m = secret.length; if (m == 0) throw new IllegalArgumentException(STR); if (m > MAX_SECRET_BYTES) throw new IllegalArgumentException(STR + m + STR + MAX_SECRET_BYTES + STR); if (shares < 1) throw new IllegalArgumentException(STR + shares); if (shares > MAX_SHARES) throw new IllegalArgumentException(STR + shares + "(gt" + MAX_SHARES + ")"); if (threshold > shares) throw new IllegalArgumentException(STR + threshold + STR + shares); byte[][] share = new byte[shares][m + 1]; for (int i = 0; i < shares; i++) share[i][0] = (byte) (i + 1); byte[] a = null; try { a = new byte[threshold]; for (int i = 0; i < m; i++) { rnd.nextBytes(a); a[0] = secret[i]; for (int j = 0; j < shares; j++) share[j][i + 1] = (byte) eval(share[j][0], a); } } finally { if (a != null) Arrays.fill(a, (byte) 0); } return share; } | /**
* Generate a set of shares from the secret provided. Secret reconstruction will require 'threshold' shares in order to reconstruct correctly a secret
*
* @param secret
* byte array of len 1..65536
* @param shares
* number of shares to generate
* @param threshold
* number of required shares in order to reconstruct a secret
* @param rnd
* Source for random numbers (this must be a good random number generator)
* @return byte array of byte arrays (a list of shares as raw bytes)
*/ | Generate a set of shares from the secret provided. Secret reconstruction will require 'threshold' shares in order to reconstruct correctly a secret | createShares | {
"repo_name": "antik10ud/threshold-secret-sharing",
"path": "src/main/java/com/k10ud/cryptography/tss/core/ThresholdSecretSharing.java",
"license": "mit",
"size": 9711
} | [
"java.util.Arrays",
"java.util.Random"
] | import java.util.Arrays; import java.util.Random; | import java.util.*; | [
"java.util"
] | java.util; | 2,803,085 |
public PathExpr bindSelect(QueryParser parser, String id)
{
_fromItem = bindSubPath(parser);
return this;
} | PathExpr function(QueryParser parser, String id) { _fromItem = bindSubPath(parser); return this; } | /**
* Binds the expression as a select item.
*/ | Binds the expression as a select item | bindSelect | {
"repo_name": "mdaniel/svn-caucho-com-resin",
"path": "modules/resin/src/com/caucho/amber/expr/EmbeddedExpr.java",
"license": "gpl-2.0",
"size": 6027
} | [
"com.caucho.amber.query.QueryParser"
] | import com.caucho.amber.query.QueryParser; | import com.caucho.amber.query.*; | [
"com.caucho.amber"
] | com.caucho.amber; | 1,253,922 |
public void start() throws IOException {
try {
int port = 0;
int oriPort = listener.getPort(); // The original requested port
while (true) {
try {
port = webServer.getConnectors()[0].getLocalPort();
LOG.info("Port returned by webServer.getConnectors()[0]." +
"getLocalPort() before open() is "+ port +
". Opening the listener on " + oriPort);
listener.open();
port = listener.getLocalPort();
LOG.info("listener.getLocalPort() returned " + listener.getLocalPort() +
" webServer.getConnectors()[0].getLocalPort() returned " +
webServer.getConnectors()[0].getLocalPort());
//Workaround to handle the problem reported in HADOOP-4744
if (port < 0) {
Thread.sleep(100);
int numRetries = 1;
while (port < 0) {
LOG.warn("listener.getLocalPort returned " + port);
if (numRetries++ > MAX_RETRIES) {
throw new Exception(" listener.getLocalPort is returning " +
"less than 0 even after " +numRetries+" resets");
}
for (int i = 0; i < 2; i++) {
LOG.info("Retrying listener.getLocalPort()");
port = listener.getLocalPort();
if (port > 0) {
break;
}
Thread.sleep(200);
}
if (port > 0) {
break;
}
LOG.info("Bouncing the listener");
listener.close();
Thread.sleep(1000);
listener.setPort(oriPort == 0 ? 0 : (oriPort += 1));
listener.open();
Thread.sleep(100);
port = listener.getLocalPort();
}
} //Workaround end
LOG.info("Jetty bound to port " + port);
webServer.start();
// Workaround for HADOOP-6386
port = listener.getLocalPort();
if (port < 0) {
LOG.warn("Bounds port is " + port + " after webserver start");
for (int i = 0; i < MAX_RETRIES/2; i++) {
try {
webServer.stop();
} catch (Exception e) {
LOG.warn("Can't stop web-server", e);
}
Thread.sleep(1000);
listener.setPort(oriPort == 0 ? 0 : (oriPort += 1));
listener.open();
Thread.sleep(100);
webServer.start();
LOG.info(i + "attempts to restart webserver");
port = listener.getLocalPort();
if (port > 0)
break;
}
if (port < 0)
throw new Exception("listener.getLocalPort() is returning " +
"less than 0 even after " +MAX_RETRIES+" resets");
}
// End of HADOOP-6386 workaround
break;
} catch (IOException ex) {
// if this is a bind exception,
// then try the next port number.
if (ex instanceof BindException) {
if (!findPort) {
throw (BindException) ex;
}
} else {
LOG.info("HttpServer.start() threw a non Bind IOException");
throw ex;
}
} catch (MultiException ex) {
LOG.info("HttpServer.start() threw a MultiException");
throw ex;
}
listener.setPort((oriPort += 1));
}
} catch (IOException e) {
throw e;
} catch (Exception e) {
throw new IOException("Problem starting http server", e);
}
} | void function() throws IOException { try { int port = 0; int oriPort = listener.getPort(); while (true) { try { port = webServer.getConnectors()[0].getLocalPort(); LOG.info(STR + STR+ port + STR + oriPort); listener.open(); port = listener.getLocalPort(); LOG.info(STR + listener.getLocalPort() + STR + webServer.getConnectors()[0].getLocalPort()); if (port < 0) { Thread.sleep(100); int numRetries = 1; while (port < 0) { LOG.warn(STR + port); if (numRetries++ > MAX_RETRIES) { throw new Exception(STR + STR +numRetries+STR); } for (int i = 0; i < 2; i++) { LOG.info(STR); port = listener.getLocalPort(); if (port > 0) { break; } Thread.sleep(200); } if (port > 0) { break; } LOG.info(STR); listener.close(); Thread.sleep(1000); listener.setPort(oriPort == 0 ? 0 : (oriPort += 1)); listener.open(); Thread.sleep(100); port = listener.getLocalPort(); } } LOG.info(STR + port); webServer.start(); port = listener.getLocalPort(); if (port < 0) { LOG.warn(STR + port + STR); for (int i = 0; i < MAX_RETRIES/2; i++) { try { webServer.stop(); } catch (Exception e) { LOG.warn(STR, e); } Thread.sleep(1000); listener.setPort(oriPort == 0 ? 0 : (oriPort += 1)); listener.open(); Thread.sleep(100); webServer.start(); LOG.info(i + STR); port = listener.getLocalPort(); if (port > 0) break; } if (port < 0) throw new Exception(STR + STR +MAX_RETRIES+STR); } break; } catch (IOException ex) { if (ex instanceof BindException) { if (!findPort) { throw (BindException) ex; } } else { LOG.info(STR); throw ex; } } catch (MultiException ex) { LOG.info(STR); throw ex; } listener.setPort((oriPort += 1)); } } catch (IOException e) { throw e; } catch (Exception e) { throw new IOException(STR, e); } } | /**
* Start the server. Does not wait for the server to start.
*/ | Start the server. Does not wait for the server to start | start | {
"repo_name": "shakamunyi/hadoop-20",
"path": "src/core/org/apache/hadoop/http/HttpServer.java",
"license": "apache-2.0",
"size": 30649
} | [
"java.io.IOException",
"java.net.BindException",
"org.mortbay.util.MultiException"
] | import java.io.IOException; import java.net.BindException; import org.mortbay.util.MultiException; | import java.io.*; import java.net.*; import org.mortbay.util.*; | [
"java.io",
"java.net",
"org.mortbay.util"
] | java.io; java.net; org.mortbay.util; | 2,193,055 |
@Transactional(readOnly=true)
public ControlSchedule getControlJob(int sessionId, Integer id) throws PluginException, ApplicationException,
PermissionException, SessionNotFoundException, SessionTimeoutException {
AuthzSubject subject = sessionManager.getSubject(sessionId);
return controlScheduleManager.getControlJob(subject, id);
} | @Transactional(readOnly=true) ControlSchedule function(int sessionId, Integer id) throws PluginException, ApplicationException, PermissionException, SessionNotFoundException, SessionTimeoutException { AuthzSubject subject = sessionManager.getSubject(sessionId); return controlScheduleManager.getControlJob(subject, id); } | /**
* Obtain a ControlJob based on an id
*
* @param triggerName The control trigger name
*
* @return The control job that was requested
*/ | Obtain a ControlJob based on an id | getControlJob | {
"repo_name": "cc14514/hq6",
"path": "hq-server/src/main/java/org/hyperic/hq/bizapp/server/session/ControlBossImpl.java",
"license": "unlicense",
"size": 22025
} | [
"org.hyperic.hq.auth.shared.SessionNotFoundException",
"org.hyperic.hq.auth.shared.SessionTimeoutException",
"org.hyperic.hq.authz.server.session.AuthzSubject",
"org.hyperic.hq.authz.shared.PermissionException",
"org.hyperic.hq.common.ApplicationException",
"org.hyperic.hq.control.server.session.ControlSchedule",
"org.hyperic.hq.product.PluginException",
"org.springframework.transaction.annotation.Transactional"
] | import org.hyperic.hq.auth.shared.SessionNotFoundException; import org.hyperic.hq.auth.shared.SessionTimeoutException; import org.hyperic.hq.authz.server.session.AuthzSubject; import org.hyperic.hq.authz.shared.PermissionException; import org.hyperic.hq.common.ApplicationException; import org.hyperic.hq.control.server.session.ControlSchedule; import org.hyperic.hq.product.PluginException; import org.springframework.transaction.annotation.Transactional; | import org.hyperic.hq.auth.shared.*; import org.hyperic.hq.authz.server.session.*; import org.hyperic.hq.authz.shared.*; import org.hyperic.hq.common.*; import org.hyperic.hq.control.server.session.*; import org.hyperic.hq.product.*; import org.springframework.transaction.annotation.*; | [
"org.hyperic.hq",
"org.springframework.transaction"
] | org.hyperic.hq; org.springframework.transaction; | 992,285 |
public LongSparseArrayIterable<V> values(Iterable<?> values); | LongSparseArrayIterable<V> function(Iterable<?> values); | /**
* Creates a filter matching the entry values returned by the specified iterable.
*
* @param values The iterable of values to match.
* @return The filtered iterable.
*/ | Creates a filter matching the entry values returned by the specified iterable | values | {
"repo_name": "davide-maestroni/robo-fashion",
"path": "library/src/main/java/com/github/dm/rf/android/filter/LongSparseArrayFilterBuilder.java",
"license": "apache-2.0",
"size": 3023
} | [
"com.github.dm.rf.android.iterator.LongSparseArrayIterable"
] | import com.github.dm.rf.android.iterator.LongSparseArrayIterable; | import com.github.dm.rf.android.iterator.*; | [
"com.github.dm"
] | com.github.dm; | 29,189 |
public byte[] unwrap(final byte[] byteArray, final int n, final int n1)
throws SaslException {
return null;
} | byte[] function(final byte[] byteArray, final int n, final int n1) throws SaslException { return null; } | /**
* Describe <code>unwrap</code> method here.
*
* @param byteArray a <code>byte[]</code> value
* @param n an <code>int</code> value
* @param n1 an <code>int</code> value
* @return a <code>byte[]</code> value
* @exception SaslException if an error occurs
*/ | Describe <code>unwrap</code> method here | unwrap | {
"repo_name": "zooldk/tigase-server",
"path": "src/main/java/tigase/auth/SaslAnonymous.java",
"license": "agpl-3.0",
"size": 4173
} | [
"javax.security.sasl.SaslException"
] | import javax.security.sasl.SaslException; | import javax.security.sasl.*; | [
"javax.security"
] | javax.security; | 1,764,685 |
public static UserBoardRole getUserBoardRole(Long boardId, String googleAccId) {
Cache cache = getCacheInstance();
String cacheKey = Long.toString(boardId) + "-" + googleAccId;
UserBoardRole role = (UserBoardRole)cache.get(cacheKey);
if (role == null) {
// role is not in cache yet, so load it and put into cache
// get current user
User user = ofy().load().type(User.class).filter("googleAccId", googleAccId).first().now();
// find the first role with the user and board
role = ofy().load().type(UserBoardRole.class).ancestor(Key.create(Whiteboard.class, boardId)).filter("user", user).first().now();
// store role in cache
// of course if role doesn't exist it will store null in cache, and later calls will not utilize cache
// for now to keep things simpler let's not think about it
cache.put(cacheKey, role);
}
return role;
} | static UserBoardRole function(Long boardId, String googleAccId) { Cache cache = getCacheInstance(); String cacheKey = Long.toString(boardId) + "-" + googleAccId; UserBoardRole role = (UserBoardRole)cache.get(cacheKey); if (role == null) { User user = ofy().load().type(User.class).filter(STR, googleAccId).first().now(); role = ofy().load().type(UserBoardRole.class).ancestor(Key.create(Whiteboard.class, boardId)).filter("user", user).first().now(); cache.put(cacheKey, role); } return role; } | /**
* Returns a UserBoardRole object for given board and user with given google acc id.
* Tries to get value from cache, if it is empty gets value from datastore and updates cache
*/ | Returns a UserBoardRole object for given board and user with given google acc id. Tries to get value from cache, if it is empty gets value from datastore and updates cache | getUserBoardRole | {
"repo_name": "googleinterns/step257-2020",
"path": "sticknotesbackend/src/main/java/com/google/sticknotesbackend/FastStorage.java",
"license": "apache-2.0",
"size": 4838
} | [
"com.google.sticknotesbackend.models.User",
"com.google.sticknotesbackend.models.UserBoardRole",
"com.google.sticknotesbackend.models.Whiteboard",
"com.googlecode.objectify.Key",
"com.googlecode.objectify.ObjectifyService",
"javax.cache.Cache"
] | import com.google.sticknotesbackend.models.User; import com.google.sticknotesbackend.models.UserBoardRole; import com.google.sticknotesbackend.models.Whiteboard; import com.googlecode.objectify.Key; import com.googlecode.objectify.ObjectifyService; import javax.cache.Cache; | import com.google.sticknotesbackend.models.*; import com.googlecode.objectify.*; import javax.cache.*; | [
"com.google.sticknotesbackend",
"com.googlecode.objectify",
"javax.cache"
] | com.google.sticknotesbackend; com.googlecode.objectify; javax.cache; | 2,444,408 |
public static double calcWidthToHeight(ChartViewer myChart, double chartHeight) {
makeChartResizable(myChart);
myChart.getCanvas().draw();
XYPlot plot = (XYPlot) myChart.getChart().getPlot();
ChartRenderingInfo info = myChart.getRenderingInfo();
Rectangle2D dataArea = info.getPlotInfo().getDataArea();
Rectangle2D chartArea = info.getChartArea();
// calc title space: will be added later to the right plot size
double titleWidth = chartArea.getWidth() - dataArea.getWidth();
double titleHeight = chartArea.getHeight() - dataArea.getHeight();
// calc right plot size with axis dim.
// real plot width is given by factor;
double realPH = chartHeight - titleHeight;
// ranges
ValueAxis domainAxis = plot.getDomainAxis();
org.jfree.data.Range x = domainAxis.getRange();
ValueAxis rangeAxis = plot.getRangeAxis();
org.jfree.data.Range y = rangeAxis.getRange();
// real plot height can be calculated by
double realPW = realPH / y.getLength() * x.getLength();
double width = realPW + titleWidth;
return width;
} | static double function(ChartViewer myChart, double chartHeight) { makeChartResizable(myChart); myChart.getCanvas().draw(); XYPlot plot = (XYPlot) myChart.getChart().getPlot(); ChartRenderingInfo info = myChart.getRenderingInfo(); Rectangle2D dataArea = info.getPlotInfo().getDataArea(); Rectangle2D chartArea = info.getChartArea(); double titleWidth = chartArea.getWidth() - dataArea.getWidth(); double titleHeight = chartArea.getHeight() - dataArea.getHeight(); double realPH = chartHeight - titleHeight; ValueAxis domainAxis = plot.getDomainAxis(); org.jfree.data.Range x = domainAxis.getRange(); ValueAxis rangeAxis = plot.getRangeAxis(); org.jfree.data.Range y = rangeAxis.getRange(); double realPW = realPH / y.getLength() * x.getLength(); double width = realPW + titleWidth; return width; } | /**
* Domain and Range axes need to share the same unit (e.g. mm)
*
* @param myChart
* @return
*/ | Domain and Range axes need to share the same unit (e.g. mm) | calcWidthToHeight | {
"repo_name": "mzmine/mzmine3",
"path": "src/main/java/io/github/mzmine/gui/chartbasics/ChartLogicsFX.java",
"license": "gpl-2.0",
"size": 18257
} | [
"java.awt.geom.Rectangle2D",
"org.jfree.chart.ChartRenderingInfo",
"org.jfree.chart.axis.ValueAxis",
"org.jfree.chart.fx.ChartViewer",
"org.jfree.chart.plot.XYPlot",
"org.jfree.data.Range"
] | import java.awt.geom.Rectangle2D; import org.jfree.chart.ChartRenderingInfo; import org.jfree.chart.axis.ValueAxis; import org.jfree.chart.fx.ChartViewer; import org.jfree.chart.plot.XYPlot; import org.jfree.data.Range; | import java.awt.geom.*; import org.jfree.chart.*; import org.jfree.chart.axis.*; import org.jfree.chart.fx.*; import org.jfree.chart.plot.*; import org.jfree.data.*; | [
"java.awt",
"org.jfree.chart",
"org.jfree.data"
] | java.awt; org.jfree.chart; org.jfree.data; | 976,536 |
public String getSQLValue( ValueMetaInterface valueMeta, Object valueData, String dateFormat ) throws KettleValueException; | String function( ValueMetaInterface valueMeta, Object valueData, String dateFormat ) throws KettleValueException; | /**
* Convert a value in the SQL equivalent. For example, convert String "Pentaho" into 'Pentaho' or into Oracle date
* format TO_DATE('2012/08/16 15:36:59', 'YYYY/MM/DD HH24:MI:SS')
*
* @param valueMeta
* The description of the value. The date format used is taken from this value unless dateFormat is specified
* (not null or empty)
* @param valueData
* The data to convert.
* @return The value SQL clause
* @throws KettleValueException
* in case there is a data conversion error.
*/ | Convert a value in the SQL equivalent. For example, convert String "Pentaho" into 'Pentaho' or into Oracle date | getSQLValue | {
"repo_name": "lgrill-pentaho/pentaho-kettle",
"path": "core/src/main/java/org/pentaho/di/core/database/DatabaseInterface.java",
"license": "apache-2.0",
"size": 37186
} | [
"org.pentaho.di.core.exception.KettleValueException",
"org.pentaho.di.core.row.ValueMetaInterface"
] | import org.pentaho.di.core.exception.KettleValueException; import org.pentaho.di.core.row.ValueMetaInterface; | import org.pentaho.di.core.exception.*; import org.pentaho.di.core.row.*; | [
"org.pentaho.di"
] | org.pentaho.di; | 410,080 |
protected String shouldUrlDecodeHeader(SparkConfiguration configuration, String headerName, Object value, String charset) throws
UnsupportedEncodingException {
// do not decode Content-Type
if (Exchange.CONTENT_TYPE.equals(headerName)) {
return value.toString();
} else if (configuration.isUrlDecodeHeaders()) {
return URLDecoder.decode(value.toString(), charset);
} else {
return value.toString();
}
} | String function(SparkConfiguration configuration, String headerName, Object value, String charset) throws UnsupportedEncodingException { if (Exchange.CONTENT_TYPE.equals(headerName)) { return value.toString(); } else if (configuration.isUrlDecodeHeaders()) { return URLDecoder.decode(value.toString(), charset); } else { return value.toString(); } } | /**
* Decodes the header if needed to, or returns the header value as is.
*
* @param configuration the configuration
* @param headerName the header name
* @param value the current header value
* @param charset the charset to use for decoding
* @return the decoded value (if decoded was needed) or a <tt>toString</tt> representation of the value.
* @throws java.io.UnsupportedEncodingException is thrown if error decoding.
*/ | Decodes the header if needed to, or returns the header value as is | shouldUrlDecodeHeader | {
"repo_name": "jlpedrosa/camel",
"path": "components/camel-spark-rest/src/main/java/org/apache/camel/component/sparkrest/DefaultSparkBinding.java",
"license": "apache-2.0",
"size": 10579
} | [
"java.io.UnsupportedEncodingException",
"java.net.URLDecoder",
"org.apache.camel.Exchange"
] | import java.io.UnsupportedEncodingException; import java.net.URLDecoder; import org.apache.camel.Exchange; | import java.io.*; import java.net.*; import org.apache.camel.*; | [
"java.io",
"java.net",
"org.apache.camel"
] | java.io; java.net; org.apache.camel; | 161,216 |
protected void copyStats(ZcStat userStat, ZcStat cachedStat) {
Preconditions.checkState(!closed);
if (userStat != null && cachedStat != null) {
userStat.set(cachedStat);
}
} | void function(ZcStat userStat, ZcStat cachedStat) { Preconditions.checkState(!closed); if (userStat != null && cachedStat != null) { userStat.set(cachedStat); } } | /**
* Helper method to copy stats from the cached stat into userStat
*
* @param userStat
* user Stat object
* @param cachedStat
* cached statistic, that is or will be cached
*/ | Helper method to copy stats from the cached stat into userStat | copyStats | {
"repo_name": "mjwall/accumulo",
"path": "core/src/main/java/org/apache/accumulo/fate/zookeeper/ZooCache.java",
"license": "apache-2.0",
"size": 18547
} | [
"com.google.common.base.Preconditions"
] | import com.google.common.base.Preconditions; | import com.google.common.base.*; | [
"com.google.common"
] | com.google.common; | 2,148,758 |
if (this.validations != null) {
for (ValidateInfo validate : getValidations()) {
if (validate.required
&& !request.getAttributes().containsKey(
validate.attribute)) {
response.setStatus(
Status.CLIENT_ERROR_BAD_REQUEST,
"Unable to find the \""
+ validate.attribute
+ "\" attribute in the request. Please check your request.");
} else if (validate.format != null) {
Object value = request.getAttributes().get(
validate.attribute);
if ((value != null)
&& !Pattern.matches(validate.format,
value.toString())) {
response.setStatus(
Status.CLIENT_ERROR_BAD_REQUEST,
"Unable to validate the value of the \""
+ validate.attribute
+ "\" attribute. The expected format is: "
+ validate.format
+ " (Java Regex). Please check your request.");
}
}
}
}
return CONTINUE;
} | if (this.validations != null) { for (ValidateInfo validate : getValidations()) { if (validate.required && !request.getAttributes().containsKey( validate.attribute)) { response.setStatus( Status.CLIENT_ERROR_BAD_REQUEST, STRSTR\STR); } else if (validate.format != null) { Object value = request.getAttributes().get( validate.attribute); if ((value != null) && !Pattern.matches(validate.format, value.toString())) { response.setStatus( Status.CLIENT_ERROR_BAD_REQUEST, STRSTR\STR + validate.format + STR); } } } } return CONTINUE; } | /**
* Allows filtering before its handling by the target Restlet. By default it
* parses the template variable, adjust the base reference, then extracts
* the attributes from form parameters (query, cookies, entity) and finally
* tries to validate the variables as indicated by the
* {@link #validate(String, boolean, String)} method.
*
* @param request
* The request to filter.
* @param response
* The response to filter.
* @return The {@link Filter#CONTINUE} status.
*/ | Allows filtering before its handling by the target Restlet. By default it parses the template variable, adjust the base reference, then extracts the attributes from form parameters (query, cookies, entity) and finally tries to validate the variables as indicated by the <code>#validate(String, boolean, String)</code> method | beforeHandle | {
"repo_name": "zhangjunfang/eclipse-dir",
"path": "restlet/src/org/restlet/routing/Validator.java",
"license": "bsd-2-clause",
"size": 7779
} | [
"java.util.regex.Pattern",
"org.restlet.data.Status"
] | import java.util.regex.Pattern; import org.restlet.data.Status; | import java.util.regex.*; import org.restlet.data.*; | [
"java.util",
"org.restlet.data"
] | java.util; org.restlet.data; | 2,123,410 |
public DocumentTypeEBO getFinancialSystemDocumentType() {
if ( StringUtils.isBlank( financialSystemDocumentTypeCode ) ) {
financialSystemDocumentType = null;
} else {
if ( financialSystemDocumentType == null || !StringUtils.equals(financialSystemDocumentTypeCode, financialSystemDocumentType.getName() ) ) {
org.kuali.rice.kew.api.doctype.DocumentType temp = SpringContext.getBean(DocumentTypeService.class).getDocumentTypeByName(financialSystemDocumentTypeCode);
if ( temp != null ) {
financialSystemDocumentType = DocumentType.from( temp );
} else {
financialSystemDocumentType = null;
}
}
}
return financialSystemDocumentType;
} | DocumentTypeEBO function() { if ( StringUtils.isBlank( financialSystemDocumentTypeCode ) ) { financialSystemDocumentType = null; } else { if ( financialSystemDocumentType == null !StringUtils.equals(financialSystemDocumentTypeCode, financialSystemDocumentType.getName() ) ) { org.kuali.rice.kew.api.doctype.DocumentType temp = SpringContext.getBean(DocumentTypeService.class).getDocumentTypeByName(financialSystemDocumentTypeCode); if ( temp != null ) { financialSystemDocumentType = DocumentType.from( temp ); } else { financialSystemDocumentType = null; } } } return financialSystemDocumentType; } | /**
* Gets the financialSystemDocumentType attribute.
*
* @return Returns the financialSystemDocumentType.
*/ | Gets the financialSystemDocumentType attribute | getFinancialSystemDocumentType | {
"repo_name": "ua-eas/ua-kfs-5.3",
"path": "work/src/org/kuali/kfs/sec/businessobject/SecurityDefinitionDocumentType.java",
"license": "agpl-3.0",
"size": 4942
} | [
"org.apache.commons.lang.StringUtils",
"org.kuali.kfs.sys.context.SpringContext",
"org.kuali.rice.kew.api.doctype.DocumentTypeService",
"org.kuali.rice.kew.doctype.bo.DocumentType",
"org.kuali.rice.kew.doctype.bo.DocumentTypeEBO"
] | import org.apache.commons.lang.StringUtils; import org.kuali.kfs.sys.context.SpringContext; import org.kuali.rice.kew.api.doctype.DocumentTypeService; import org.kuali.rice.kew.doctype.bo.DocumentType; import org.kuali.rice.kew.doctype.bo.DocumentTypeEBO; | import org.apache.commons.lang.*; import org.kuali.kfs.sys.context.*; import org.kuali.rice.kew.api.doctype.*; import org.kuali.rice.kew.doctype.bo.*; | [
"org.apache.commons",
"org.kuali.kfs",
"org.kuali.rice"
] | org.apache.commons; org.kuali.kfs; org.kuali.rice; | 2,694,022 |
protected Hobby getRandomHobby(int empID) {
boolean matched = false;
int index = 0;
for (int number_of_tries = 0; !matched; number_of_tries++) {
index = nameRandomizer.nextInt(HOBBIES.length - 1);
if (!EMP_HOBBIES.contains(empID + "_" + index)) {
EMP_HOBBIES.add(empID + "_" + index);
matched = true;
}
if (number_of_tries >= MAX_NUMBER_OF_TRIES) {
throw new RuntimeException("Can't find anymore randomly distributed hobbies");
}
}
return HOBBIES[index];
} | Hobby function(int empID) { boolean matched = false; int index = 0; for (int number_of_tries = 0; !matched; number_of_tries++) { index = nameRandomizer.nextInt(HOBBIES.length - 1); if (!EMP_HOBBIES.contains(empID + "_" + index)) { EMP_HOBBIES.add(empID + "_" + index); matched = true; } if (number_of_tries >= MAX_NUMBER_OF_TRIES) { throw new RuntimeException(STR); } } return HOBBIES[index]; } | /**
* returns a hobby for given employee, it is not possible to assign a hobby more than once to a employee
*
* @param empID
* the ID of the employee
* @return a hobby for the given employee ID
* @throws Exception
* is thrown if there are no hobbies left which have not already been assigned to this employee
*/ | returns a hobby for given employee, it is not possible to assign a hobby more than once to a employee | getRandomHobby | {
"repo_name": "RallySoftware/eclipselink.runtime",
"path": "jpa/eclipselink.jpa.wdf.test/src/org/eclipse/persistence/testing/tests/wdf/jpa1/query/TestExtendedQueries.java",
"license": "epl-1.0",
"size": 23632
} | [
"org.eclipse.persistence.testing.models.wdf.jpa1.employee.Hobby"
] | import org.eclipse.persistence.testing.models.wdf.jpa1.employee.Hobby; | import org.eclipse.persistence.testing.models.wdf.jpa1.employee.*; | [
"org.eclipse.persistence"
] | org.eclipse.persistence; | 927,297 |
public void testToString() {
NavigableSet q = populatedSet(SIZE);
String s = q.toString();
for (int i = 0; i < SIZE; ++i) {
assertTrue(s.contains(String.valueOf(i)));
}
} | void function() { NavigableSet q = populatedSet(SIZE); String s = q.toString(); for (int i = 0; i < SIZE; ++i) { assertTrue(s.contains(String.valueOf(i))); } } | /**
* toString contains toStrings of elements
*/ | toString contains toStrings of elements | testToString | {
"repo_name": "google/desugar_jdk_libs",
"path": "jdk11/src/libcore/ojluni/src/test/java/util/concurrent/tck/TreeSubSetTest.java",
"license": "gpl-2.0",
"size": 31878
} | [
"java.util.NavigableSet"
] | import java.util.NavigableSet; | import java.util.*; | [
"java.util"
] | java.util; | 1,730,434 |
public void upgrade(DownloadJob job, File src, File dst) throws IOException {
job.replace(dst, src);
} | void function(DownloadJob job, File src, File dst) throws IOException { job.replace(dst, src); } | /**
* Called after an upgrade has been downloaded to move it into its final
* location. The default implementation is a file rename.
*
* @param job The upgrade job that is invoking this strategy.
* @param src The temporary location of the upgrade.
* @param dst The final destination to install the upgrade to.
* @throws IOException if there are problems installing the resource.
*/ | Called after an upgrade has been downloaded to move it into its final location. The default implementation is a file rename | upgrade | {
"repo_name": "vivek/hudson",
"path": "core/src/main/java/hudson/model/UpdateCenter.java",
"license": "mit",
"size": 33444
} | [
"java.io.File",
"java.io.IOException"
] | import java.io.File; import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 1,380,750 |
public boolean exists(String filename) {
return (new File(filename)).exists();
} | boolean function(String filename) { return (new File(filename)).exists(); } | /**
* Return true or false based on whether the named file exists.
*/ | Return true or false based on whether the named file exists | exists | {
"repo_name": "wolffcm/voltdb",
"path": "src/hsqldb19b3/org/hsqldb_voltpatches/lib/FileUtil.java",
"license": "agpl-3.0",
"size": 10159
} | [
"java.io.File"
] | import java.io.File; | import java.io.*; | [
"java.io"
] | java.io; | 2,237,675 |
public void addMetrics(Optional<String> optResourceName, String resType) {
Set<ControlMetricType> metricTypeSet = Sets.newHashSet();
String resourceName = optResourceName.isPresent() ?
optResourceName.get() : DEFAULT_RESOURCE_NAME;
MetricsComponent metricsComponent = metricsService.registerComponent(resourceName);
if (optResourceName.isPresent()) {
if (!diskMap.containsKey(resourceName) && DISK_RESOURCE_TYPE.equals(resType)) {
metricTypeSet.addAll(ControlResource.DISK_METRICS);
diskMap.putIfAbsent(resourceName,
getMeterMap(metricTypeSet, metricsComponent, metricsService));
metricsService.notifyReporters();
} else if (!networkMap.containsKey(resourceName) && NETWORK_RESOURCE_TYPE.equals(resType)) {
metricTypeSet.addAll(ControlResource.NETWORK_METRICS);
networkMap.putIfAbsent(resourceName,
getMeterMap(metricTypeSet, metricsComponent, metricsService));
metricsService.notifyReporters();
} else {
return;
}
} else {
if (systemMap.isEmpty()) {
metricTypeSet.addAll(ControlResource.MEMORY_METRICS);
metricTypeSet.addAll(ControlResource.CPU_METRICS);
systemMap.putAll(getMeterMap(metricTypeSet, metricsComponent, metricsService));
metricsService.notifyReporters();
}
}
} | void function(Optional<String> optResourceName, String resType) { Set<ControlMetricType> metricTypeSet = Sets.newHashSet(); String resourceName = optResourceName.isPresent() ? optResourceName.get() : DEFAULT_RESOURCE_NAME; MetricsComponent metricsComponent = metricsService.registerComponent(resourceName); if (optResourceName.isPresent()) { if (!diskMap.containsKey(resourceName) && DISK_RESOURCE_TYPE.equals(resType)) { metricTypeSet.addAll(ControlResource.DISK_METRICS); diskMap.putIfAbsent(resourceName, getMeterMap(metricTypeSet, metricsComponent, metricsService)); metricsService.notifyReporters(); } else if (!networkMap.containsKey(resourceName) && NETWORK_RESOURCE_TYPE.equals(resType)) { metricTypeSet.addAll(ControlResource.NETWORK_METRICS); networkMap.putIfAbsent(resourceName, getMeterMap(metricTypeSet, metricsComponent, metricsService)); metricsService.notifyReporters(); } else { return; } } else { if (systemMap.isEmpty()) { metricTypeSet.addAll(ControlResource.MEMORY_METRICS); metricTypeSet.addAll(ControlResource.CPU_METRICS); systemMap.putAll(getMeterMap(metricTypeSet, metricsComponent, metricsService)); metricsService.notifyReporters(); } } } | /**
* Adds a set of new monitoring metric types.
*
* @param optResourceName optional resource name, null denotes system metric
* @param resType resource type
*/ | Adds a set of new monitoring metric types | addMetrics | {
"repo_name": "gkatsikas/onos",
"path": "apps/cpman/app/src/main/java/org/onosproject/cpman/impl/SystemMetricsAggregator.java",
"license": "apache-2.0",
"size": 5629
} | [
"com.google.common.collect.Sets",
"java.util.Optional",
"java.util.Set",
"org.onlab.metrics.MetricsComponent",
"org.onosproject.cpman.ControlMetricType",
"org.onosproject.cpman.ControlResource"
] | import com.google.common.collect.Sets; import java.util.Optional; import java.util.Set; import org.onlab.metrics.MetricsComponent; import org.onosproject.cpman.ControlMetricType; import org.onosproject.cpman.ControlResource; | import com.google.common.collect.*; import java.util.*; import org.onlab.metrics.*; import org.onosproject.cpman.*; | [
"com.google.common",
"java.util",
"org.onlab.metrics",
"org.onosproject.cpman"
] | com.google.common; java.util; org.onlab.metrics; org.onosproject.cpman; | 418,193 |
@Override
public void request(long request) {
if (!Operators.validate(request)) {
logger.atWarning()
.addKeyValue(NUMBER_OF_REQUESTED_MESSAGES_KEY, request)
.log("Invalid request");
return;
}
Operators.addCap(REQUESTED, this, request);
final AmqpReceiveLink link = currentLink;
if (link == null) {
return;
}
checkAndAddCredits(link);
drain();
} | void function(long request) { if (!Operators.validate(request)) { logger.atWarning() .addKeyValue(NUMBER_OF_REQUESTED_MESSAGES_KEY, request) .log(STR); return; } Operators.addCap(REQUESTED, this, request); final AmqpReceiveLink link = currentLink; if (link == null) { return; } checkAndAddCredits(link); drain(); } | /**
* When downstream subscriber makes a back-pressure request.
*/ | When downstream subscriber makes a back-pressure request | request | {
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/servicebus/azure-messaging-servicebus/src/main/java/com/azure/messaging/servicebus/implementation/ServiceBusReceiveLinkProcessor.java",
"license": "mit",
"size": 23322
} | [
"com.azure.core.amqp.implementation.AmqpReceiveLink"
] | import com.azure.core.amqp.implementation.AmqpReceiveLink; | import com.azure.core.amqp.implementation.*; | [
"com.azure.core"
] | com.azure.core; | 2,692,006 |
PolYlevVastus findPolYlev(Long identifikaator, String vin) throws XRoadServiceConsumptionException; | PolYlevVastus findPolYlev(Long identifikaator, String vin) throws XRoadServiceConsumptionException; | /**
* <code>liiklusregister.pol_ylev.v1</code> service.
*/ | <code>liiklusregister.pol_ylev.v1</code> service | findPolYlev | {
"repo_name": "nortal/j-road",
"path": "client-service/liiklusregister/src/main/java/com/nortal/jroad/client/liiklusregister/LiiklusregisterXTeeService.java",
"license": "apache-2.0",
"size": 2875
} | [
"com.nortal.jroad.client.exception.XRoadServiceConsumptionException",
"com.nortal.jroad.client.liiklusregister.types.ee.riik.xtee.liiklusregister.producers.producer.liiklusregister.PolYlevVastus"
] | import com.nortal.jroad.client.exception.XRoadServiceConsumptionException; import com.nortal.jroad.client.liiklusregister.types.ee.riik.xtee.liiklusregister.producers.producer.liiklusregister.PolYlevVastus; | import com.nortal.jroad.client.exception.*; import com.nortal.jroad.client.liiklusregister.types.ee.riik.xtee.liiklusregister.producers.producer.liiklusregister.*; | [
"com.nortal.jroad"
] | com.nortal.jroad; | 529,105 |
void validateDelete(SqlDelete delete); | void validateDelete(SqlDelete delete); | /**
* Validates a DELETE statement.
*
* @param delete DELETE statement
*/ | Validates a DELETE statement | validateDelete | {
"repo_name": "wanglan/calcite",
"path": "core/src/main/java/org/apache/calcite/sql/validate/SqlValidator.java",
"license": "apache-2.0",
"size": 24553
} | [
"org.apache.calcite.sql.SqlDelete"
] | import org.apache.calcite.sql.SqlDelete; | import org.apache.calcite.sql.*; | [
"org.apache.calcite"
] | org.apache.calcite; | 1,626,302 |
@Test
public void testToDerivative() {
MetalFutureOptionDefinition first = new MetalFutureOptionDefinition(EXPIRY_DATE, AN_UNDERLYING, 100, ExerciseDecisionType.EUROPEAN, true);
MetalFutureOptionDefinition second = new MetalFutureOptionDefinition(EXPIRY_DATE, AN_UNDERLYING, 100, ExerciseDecisionType.AMERICAN, false);
MetalFutureOption firstDerivative = first.toDerivative(A_DATE);
MetalFutureOption secondDerivative = second.toDerivative(A_DATE);
assertEquals(firstDerivative.getStrike(), 100.);
assertEquals(firstDerivative.getExerciseType(), ExerciseDecisionType.EUROPEAN);
assertEquals(firstDerivative.getUnderlying(), AN_UNDERLYING.toDerivative(A_DATE));
assertTrue(firstDerivative.isCall());
assertEquals(secondDerivative.getExerciseType(), ExerciseDecisionType.AMERICAN);
assertFalse(secondDerivative.isCall());
MetalFutureOption firstDerivative2 = new MetalFutureOption(0.0027397260273972603, AN_UNDERLYING.toDerivative(A_DATE), 100, ExerciseDecisionType.EUROPEAN, true);
assertEquals(firstDerivative.hashCode(), firstDerivative2.hashCode());
assertEquals(firstDerivative, firstDerivative2);
} | void function() { MetalFutureOptionDefinition first = new MetalFutureOptionDefinition(EXPIRY_DATE, AN_UNDERLYING, 100, ExerciseDecisionType.EUROPEAN, true); MetalFutureOptionDefinition second = new MetalFutureOptionDefinition(EXPIRY_DATE, AN_UNDERLYING, 100, ExerciseDecisionType.AMERICAN, false); MetalFutureOption firstDerivative = first.toDerivative(A_DATE); MetalFutureOption secondDerivative = second.toDerivative(A_DATE); assertEquals(firstDerivative.getStrike(), 100.); assertEquals(firstDerivative.getExerciseType(), ExerciseDecisionType.EUROPEAN); assertEquals(firstDerivative.getUnderlying(), AN_UNDERLYING.toDerivative(A_DATE)); assertTrue(firstDerivative.isCall()); assertEquals(secondDerivative.getExerciseType(), ExerciseDecisionType.AMERICAN); assertFalse(secondDerivative.isCall()); MetalFutureOption firstDerivative2 = new MetalFutureOption(0.0027397260273972603, AN_UNDERLYING.toDerivative(A_DATE), 100, ExerciseDecisionType.EUROPEAN, true); assertEquals(firstDerivative.hashCode(), firstDerivative2.hashCode()); assertEquals(firstDerivative, firstDerivative2); } | /**
* Test method for {@link com.opengamma.analytics.financial.commodity.definition.MetalFutureOptionDefinition#toDerivative(javax.time.calendar.ZonedDateTime)}.
*/ | Test method for <code>com.opengamma.analytics.financial.commodity.definition.MetalFutureOptionDefinition#toDerivative(javax.time.calendar.ZonedDateTime)</code> | testToDerivative | {
"repo_name": "McLeodMoores/starling",
"path": "projects/analytics/src/test/java/com/opengamma/analytics/financial/commodity/definition/MetalFutureOptionDefinitionTest.java",
"license": "apache-2.0",
"size": 4298
} | [
"com.opengamma.analytics.financial.ExerciseDecisionType",
"com.opengamma.analytics.financial.commodity.derivative.MetalFutureOption",
"org.testng.AssertJUnit"
] | import com.opengamma.analytics.financial.ExerciseDecisionType; import com.opengamma.analytics.financial.commodity.derivative.MetalFutureOption; import org.testng.AssertJUnit; | import com.opengamma.analytics.financial.*; import com.opengamma.analytics.financial.commodity.derivative.*; import org.testng.*; | [
"com.opengamma.analytics",
"org.testng"
] | com.opengamma.analytics; org.testng; | 1,474,303 |
public ZipEntry getZipEntry(int rowIndex) {
return this.rows[rowIndex];
} | ZipEntry function(int rowIndex) { return this.rows[rowIndex]; } | /**
* Zip-File entry at the given row index.
*
* @param rowIndex row index
* @return Zip file entry
*/ | Zip-File entry at the given row index | getZipEntry | {
"repo_name": "fc7/jabref",
"path": "src/main/java/net/sf/jabref/importer/ZipFileChooser.java",
"license": "gpl-2.0",
"size": 9432
} | [
"java.util.zip.ZipEntry"
] | import java.util.zip.ZipEntry; | import java.util.zip.*; | [
"java.util"
] | java.util; | 1,615,836 |
@SuppressWarnings("unchecked")
public Object convertFromCommandToValue(Item item, Command command) {
if (command == null) {
return null;
}
return convert(item, (OPENHAB_TYPE) command);
} | @SuppressWarnings(STR) Object function(Item item, Command command) { if (command == null) { return null; } return convert(item, (OPENHAB_TYPE) command); } | /**
* Converts a an OpenHab command to a Z-Wave value.
*
* @param command the {@link Command} to convert.
* @param item the item to convert the command for.
* @return the Z-Wave value to convert to.
*/ | Converts a an OpenHab command to a Z-Wave value | convertFromCommandToValue | {
"repo_name": "theoweiss/openhab",
"path": "bundles/binding/org.openhab.binding.zwave/src/main/java/org/openhab/binding/zwave/internal/converter/command/ZWaveCommandConverter.java",
"license": "epl-1.0",
"size": 2041
} | [
"org.openhab.core.items.Item",
"org.openhab.core.types.Command"
] | import org.openhab.core.items.Item; import org.openhab.core.types.Command; | import org.openhab.core.items.*; import org.openhab.core.types.*; | [
"org.openhab.core"
] | org.openhab.core; | 2,665,896 |
private Object createNewObject(byte[] byteArray) throws IOException, ClassNotFoundException {
ByteArrayInputStream bais = new ByteArrayInputStream(byteArray);
ObjectInputStream ois = new ObjectInputStream(bais);
Object newObject = null;
try {
newObject = ois.readObject();
} finally {
ois.close();
}
return newObject;
}
| Object function(byte[] byteArray) throws IOException, ClassNotFoundException { ByteArrayInputStream bais = new ByteArrayInputStream(byteArray); ObjectInputStream ois = new ObjectInputStream(bais); Object newObject = null; try { newObject = ois.readObject(); } finally { ois.close(); } return newObject; } | /**
* Creates a new object due to byte array.
*
* @param byteArray
* New object data.
* @return New object based on byte array.
* @throws IOException
* Throws when an exception occurs during stream operation.
* @throws ClassNotFoundException
* Throws when an exception occurs during class stream
* operation.
*/ | Creates a new object due to byte array | createNewObject | {
"repo_name": "bozanfaruk/JClonerRep",
"path": "JCloner/src/jcloner/ByteBasedCloner.java",
"license": "gpl-3.0",
"size": 2253
} | [
"java.io.ByteArrayInputStream",
"java.io.IOException",
"java.io.ObjectInputStream"
] | import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.ObjectInputStream; | import java.io.*; | [
"java.io"
] | java.io; | 1,949,392 |
Bitmap createSnapshot(Bitmap.Config quality, int backgroundColor, boolean skipChildren) {
int width = mRight - mLeft;
int height = mBottom - mTop;
final AttachInfo attachInfo = mAttachInfo;
final float scale = attachInfo != null ? attachInfo.mApplicationScale : 1.0f;
width = (int) ((width * scale) + 0.5f);
height = (int) ((height * scale) + 0.5f);
Bitmap bitmap = Bitmap.createBitmap(width > 0 ? width : 1, height > 0 ? height : 1, quality);
if (bitmap == null) {
throw new OutOfMemoryError();
}
bitmap.setDensity(getResources().getDisplayMetrics().densityDpi);
Canvas canvas;
if (attachInfo != null) {
canvas = attachInfo.mCanvas;
if (canvas == null) {
canvas = new Canvas();
}
canvas.setBitmap(bitmap);
// Temporarily clobber the cached Canvas in case one of our children
// is also using a drawing cache. Without this, the children would
// steal the canvas by attaching their own bitmap to it and bad, bad
// things would happen (invisible views, corrupted drawings, etc.)
attachInfo.mCanvas = null;
} else {
// This case should hopefully never or seldom happen
canvas = new Canvas(bitmap);
}
if ((backgroundColor & 0xff000000) != 0) {
bitmap.eraseColor(backgroundColor);
}
computeScroll();
final int restoreCount = canvas.save();
canvas.scale(scale, scale);
canvas.translate(-mScrollX, -mScrollY);
// Temporarily remove the dirty mask
int flags = mPrivateFlags;
mPrivateFlags &= ~DIRTY_MASK;
// Fast path for layouts with no backgrounds
if ((mPrivateFlags & SKIP_DRAW) == SKIP_DRAW) {
dispatchDraw(canvas);
} else {
draw(canvas);
}
mPrivateFlags = flags;
canvas.restoreToCount(restoreCount);
if (attachInfo != null) {
// Restore the cached Canvas for our siblings
attachInfo.mCanvas = canvas;
}
return bitmap;
} | Bitmap createSnapshot(Bitmap.Config quality, int backgroundColor, boolean skipChildren) { int width = mRight - mLeft; int height = mBottom - mTop; final AttachInfo attachInfo = mAttachInfo; final float scale = attachInfo != null ? attachInfo.mApplicationScale : 1.0f; width = (int) ((width * scale) + 0.5f); height = (int) ((height * scale) + 0.5f); Bitmap bitmap = Bitmap.createBitmap(width > 0 ? width : 1, height > 0 ? height : 1, quality); if (bitmap == null) { throw new OutOfMemoryError(); } bitmap.setDensity(getResources().getDisplayMetrics().densityDpi); Canvas canvas; if (attachInfo != null) { canvas = attachInfo.mCanvas; if (canvas == null) { canvas = new Canvas(); } canvas.setBitmap(bitmap); attachInfo.mCanvas = null; } else { canvas = new Canvas(bitmap); } if ((backgroundColor & 0xff000000) != 0) { bitmap.eraseColor(backgroundColor); } computeScroll(); final int restoreCount = canvas.save(); canvas.scale(scale, scale); canvas.translate(-mScrollX, -mScrollY); int flags = mPrivateFlags; mPrivateFlags &= ~DIRTY_MASK; if ((mPrivateFlags & SKIP_DRAW) == SKIP_DRAW) { dispatchDraw(canvas); } else { draw(canvas); } mPrivateFlags = flags; canvas.restoreToCount(restoreCount); if (attachInfo != null) { attachInfo.mCanvas = canvas; } return bitmap; } | /**
* Create a snapshot of the view into a bitmap. We should probably make
* some form of this public, but should think about the API.
*/ | Create a snapshot of the view into a bitmap. We should probably make some form of this public, but should think about the API | createSnapshot | {
"repo_name": "mateor/PDroidHistory",
"path": "frameworks/base/core/java/android/view/View.java",
"license": "gpl-3.0",
"size": 347830
} | [
"android.graphics.Bitmap",
"android.graphics.Canvas",
"android.util.Config"
] | import android.graphics.Bitmap; import android.graphics.Canvas; import android.util.Config; | import android.graphics.*; import android.util.*; | [
"android.graphics",
"android.util"
] | android.graphics; android.util; | 251,971 |
@Test
public void testAddParameterNoParam(){
String uriString = PATH;
assertEquals(PATH + "?label=2-5-8-12", addParameter(uriString, "label", "2-5-8-12", UTF8));
} | void function(){ String uriString = PATH; assertEquals(PATH + STR, addParameter(uriString, "label", STR, UTF8)); } | /**
* Test add parameter no param.
*/ | Test add parameter no param | testAddParameterNoParam | {
"repo_name": "venusdrogon/feilong-core",
"path": "src/test/java/com/feilong/core/net/paramutiltest/AddParameterTest.java",
"license": "apache-2.0",
"size": 4721
} | [
"com.feilong.core.net.ParamUtil",
"org.junit.Assert"
] | import com.feilong.core.net.ParamUtil; import org.junit.Assert; | import com.feilong.core.net.*; import org.junit.*; | [
"com.feilong.core",
"org.junit"
] | com.feilong.core; org.junit; | 1,304,574 |
public T caseNamedElement(NamedElement object)
{
return null;
} | T function(NamedElement object) { return null; } | /**
* Returns the result of interpreting the object as an instance of '<em>Named Element</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Named Element</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/ | Returns the result of interpreting the object as an instance of 'Named Element'. This implementation returns null; returning a non-null result will terminate the switch. | caseNamedElement | {
"repo_name": "fikovnik/ttc14-fixml-sigma",
"path": "ttc14-fixml-extension-2/src-gen/fr/inria/spirals/sigma/ttc14/fixml/objlang/util/ObjLangSwitch.java",
"license": "epl-1.0",
"size": 19617
} | [
"fr.inria.spirals.sigma.ttc14.fixml.objlang.NamedElement"
] | import fr.inria.spirals.sigma.ttc14.fixml.objlang.NamedElement; | import fr.inria.spirals.sigma.ttc14.fixml.objlang.*; | [
"fr.inria.spirals"
] | fr.inria.spirals; | 2,178,803 |
if (!(Minecraft.getMinecraft().ingameGUI instanceof GuiIngameStickyNote))
Minecraft.getMinecraft().ingameGUI = new GuiIngameStickyNote(Minecraft.getMinecraft());
} | if (!(Minecraft.getMinecraft().ingameGUI instanceof GuiIngameStickyNote)) Minecraft.getMinecraft().ingameGUI = new GuiIngameStickyNote(Minecraft.getMinecraft()); } | /**
* Event to change the ingameGui.
*
* @param event
*/ | Event to change the ingameGui | onClientTick | {
"repo_name": "AMinecraftplayer/stickyNote-1.11.2",
"path": "common/net/oskyedz/lib/event/ClientTickHandler.java",
"license": "lgpl-3.0",
"size": 604
} | [
"net.minecraft.client.Minecraft",
"net.oskyedz.lib.gui.override.GuiIngameStickyNote"
] | import net.minecraft.client.Minecraft; import net.oskyedz.lib.gui.override.GuiIngameStickyNote; | import net.minecraft.client.*; import net.oskyedz.lib.gui.override.*; | [
"net.minecraft.client",
"net.oskyedz.lib"
] | net.minecraft.client; net.oskyedz.lib; | 2,378,594 |
private void handleChecksumInconsistency(Collection<FileInfo> infos, String fileID) throws StepFailedException {
Map<String, List<String>> checksumMap = getChecksumMapping(infos);
String pillarID = findSingleInconsistentPillar(checksumMap);
createAuditForInconsistentChecksum(pillarID, fileID);
try {
if(pillarID == null) {
allPillarChecksumErrors++;
for(FileInfo info : infos) {
reporter.reportChecksumIssue(fileID, info.getPillarId());
}
} else {
pillarChecksumErrors.put(pillarID, pillarChecksumErrors.get(pillarID) + 1);
reporter.reportChecksumIssue(fileID, pillarID);
}
} catch (IOException e) {
throw new StepFailedException("Failed to report file: " + fileID + " as having a checksum issue", e);
}
} | void function(Collection<FileInfo> infos, String fileID) throws StepFailedException { Map<String, List<String>> checksumMap = getChecksumMapping(infos); String pillarID = findSingleInconsistentPillar(checksumMap); createAuditForInconsistentChecksum(pillarID, fileID); try { if(pillarID == null) { allPillarChecksumErrors++; for(FileInfo info : infos) { reporter.reportChecksumIssue(fileID, info.getPillarId()); } } else { pillarChecksumErrors.put(pillarID, pillarChecksumErrors.get(pillarID) + 1); reporter.reportChecksumIssue(fileID, pillarID); } } catch (IOException e) { throw new StepFailedException(STR + fileID + STR, e); } } | /**
* Locates the source of the checksum inconsistency.
* If only a single pillar is inconsistent with the majority, then it alone will be set to checksum error for the
* file.
* Otherwise all the pillars will be set to checksum error for the file.
* @param infos The FileInfos
* @param fileID The id of the file.
* @throws StepFailedException
*/ | Locates the source of the checksum inconsistency. If only a single pillar is inconsistent with the majority, then it alone will be set to checksum error for the file. Otherwise all the pillars will be set to checksum error for the file | handleChecksumInconsistency | {
"repo_name": "bitrepository/reference",
"path": "bitrepository-integrity-service/src/main/java/org/bitrepository/integrityservice/workflow/step/HandleChecksumValidationStep.java",
"license": "lgpl-2.1",
"size": 8907
} | [
"java.io.IOException",
"java.util.Collection",
"java.util.List",
"java.util.Map",
"org.bitrepository.integrityservice.cache.FileInfo",
"org.bitrepository.service.exception.StepFailedException"
] | import java.io.IOException; import java.util.Collection; import java.util.List; import java.util.Map; import org.bitrepository.integrityservice.cache.FileInfo; import org.bitrepository.service.exception.StepFailedException; | import java.io.*; import java.util.*; import org.bitrepository.integrityservice.cache.*; import org.bitrepository.service.exception.*; | [
"java.io",
"java.util",
"org.bitrepository.integrityservice",
"org.bitrepository.service"
] | java.io; java.util; org.bitrepository.integrityservice; org.bitrepository.service; | 509,249 |
@Generated
@CVariable()
@MappedReturn(ObjCStringMapper.class)
public static native String UIMenuSubstitutionOptions(); | @CVariable() @MappedReturn(ObjCStringMapper.class) static native String function(); | /**
* Smart Copy, Smart Paste, Smart Quotes, and other substitution options menu
*/ | Smart Copy, Smart Paste, Smart Quotes, and other substitution options menu | UIMenuSubstitutionOptions | {
"repo_name": "multi-os-engine/moe-core",
"path": "moe.apple/moe.platform.ios/src/main/java/apple/uikit/c/UIKit.java",
"license": "apache-2.0",
"size": 134869
} | [
"org.moe.natj.c.ann.CVariable",
"org.moe.natj.general.ann.MappedReturn",
"org.moe.natj.objc.map.ObjCStringMapper"
] | import org.moe.natj.c.ann.CVariable; import org.moe.natj.general.ann.MappedReturn; import org.moe.natj.objc.map.ObjCStringMapper; | import org.moe.natj.c.ann.*; import org.moe.natj.general.ann.*; import org.moe.natj.objc.map.*; | [
"org.moe.natj"
] | org.moe.natj; | 1,741,773 |
public Map<String, Node> getDefineReplacements() {
return getReplacementsHelper(defineReplacements);
} | Map<String, Node> function() { return getReplacementsHelper(defineReplacements); } | /**
* Returns the map of define replacements.
*/ | Returns the map of define replacements | getDefineReplacements | {
"repo_name": "PengXing/closure-compiler",
"path": "src/com/google/javascript/jscomp/CompilerOptions.java",
"license": "apache-2.0",
"size": 67488
} | [
"com.google.javascript.rhino.Node",
"java.util.Map"
] | import com.google.javascript.rhino.Node; import java.util.Map; | import com.google.javascript.rhino.*; import java.util.*; | [
"com.google.javascript",
"java.util"
] | com.google.javascript; java.util; | 956,133 |
@DELETE
@Path("{foreignSource}/nodes/{foreignId}")
@Transactional
public Response deleteNode(@PathParam("foreignSource") String foreignSource, @PathParam("foreignId") String foreignId) {
m_accessService.deleteNode(foreignSource, foreignId);
return Response.ok().build();
} | @Path(STR) Response function(@PathParam(STR) String foreignSource, @PathParam(STR) String foreignId) { m_accessService.deleteNode(foreignSource, foreignId); return Response.ok().build(); } | /**
* Delete the node with the given foreign ID for the specified foreign source
*
* @param foreignSource a {@link java.lang.String} object.
* @param foreignId a {@link java.lang.String} object.
* @return a {@link javax.ws.rs.core.Response} object.
*/ | Delete the node with the given foreign ID for the specified foreign source | deleteNode | {
"repo_name": "peternixon/opennms-mirror",
"path": "opennms-webapp/src/main/java/org/opennms/web/rest/RequisitionRestService.java",
"license": "gpl-2.0",
"size": 33150
} | [
"javax.ws.rs.Path",
"javax.ws.rs.PathParam",
"javax.ws.rs.core.Response"
] | import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.core.Response; | import javax.ws.rs.*; import javax.ws.rs.core.*; | [
"javax.ws"
] | javax.ws; | 1,813,397 |
public void addImageShare(@Nonnull String providerImageId, @Nonnull String accountNumber) throws CloudException, InternalException; | void function(@Nonnull String providerImageId, @Nonnull String accountNumber) throws CloudException, InternalException; | /**
* Adds the specified account number to the list of accounts with which this image is shared.
* @param providerImageId the unique ID of the image to be shared
* @param accountNumber the account number with which the image will be shared
* @throws CloudException an error occurred with the cloud provider
* @throws InternalException a local error occurred in the Dasein Cloud implementation
* @throws OperationNotSupportedException the cloud does not support sharing images with other accounts
*/ | Adds the specified account number to the list of accounts with which this image is shared | addImageShare | {
"repo_name": "maksimov/dasein-cloud-core",
"path": "src/main/java/org/dasein/cloud/compute/MachineImageSupport.java",
"license": "apache-2.0",
"size": 38481
} | [
"javax.annotation.Nonnull",
"org.dasein.cloud.CloudException",
"org.dasein.cloud.InternalException"
] | import javax.annotation.Nonnull; import org.dasein.cloud.CloudException; import org.dasein.cloud.InternalException; | import javax.annotation.*; import org.dasein.cloud.*; | [
"javax.annotation",
"org.dasein.cloud"
] | javax.annotation; org.dasein.cloud; | 11,077 |
private int setOfflineInZooKeeper(final RegionState state, final ServerName destination) {
if (!state.isClosed() && !state.isOffline()) {
String msg = "Unexpected state : " + state + " .. Cannot transit it to OFFLINE.";
this.server.abort(msg, new IllegalStateException(msg));
return -1;
}
regionStates.updateRegionState(state.getRegion(), State.OFFLINE);
int versionOfOfflineNode;
try {
// get the version after setting the znode to OFFLINE
versionOfOfflineNode = ZKAssign.createOrForceNodeOffline(watcher,
state.getRegion(), destination);
if (versionOfOfflineNode == -1) {
LOG.warn("Attempted to create/force node into OFFLINE state before "
+ "completing assignment but failed to do so for " + state);
return -1;
}
} catch (KeeperException e) {
server.abort("Unexpected ZK exception creating/setting node OFFLINE", e);
return -1;
}
return versionOfOfflineNode;
} | int function(final RegionState state, final ServerName destination) { if (!state.isClosed() && !state.isOffline()) { String msg = STR + state + STR; this.server.abort(msg, new IllegalStateException(msg)); return -1; } regionStates.updateRegionState(state.getRegion(), State.OFFLINE); int versionOfOfflineNode; try { versionOfOfflineNode = ZKAssign.createOrForceNodeOffline(watcher, state.getRegion(), destination); if (versionOfOfflineNode == -1) { LOG.warn(STR + STR + state); return -1; } } catch (KeeperException e) { server.abort(STR, e); return -1; } return versionOfOfflineNode; } | /**
* Set region as OFFLINED up in zookeeper
*
* @param state
* @return the version of the offline node if setting of the OFFLINE node was
* successful, -1 otherwise.
*/ | Set region as OFFLINED up in zookeeper | setOfflineInZooKeeper | {
"repo_name": "baishuo/hbase-1.0.0-cdh5.4.7_baishuo",
"path": "hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentManager.java",
"license": "apache-2.0",
"size": 174028
} | [
"org.apache.hadoop.hbase.ServerName",
"org.apache.hadoop.hbase.master.RegionState",
"org.apache.hadoop.hbase.zookeeper.ZKAssign",
"org.apache.zookeeper.KeeperException"
] | import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.master.RegionState; import org.apache.hadoop.hbase.zookeeper.ZKAssign; import org.apache.zookeeper.KeeperException; | import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.master.*; import org.apache.hadoop.hbase.zookeeper.*; import org.apache.zookeeper.*; | [
"org.apache.hadoop",
"org.apache.zookeeper"
] | org.apache.hadoop; org.apache.zookeeper; | 1,716,290 |
private int getPacketSize(Format codecFormat, int milliseconds) throws IllegalArgumentException {
String encoding = codecFormat.getEncoding();
if (encoding.equalsIgnoreCase(AudioFormat.G729) ||
encoding.equalsIgnoreCase(AudioFormat.G729_RTP)) {
return milliseconds * 1; // 1 byte per millisec
} else if (encoding.equalsIgnoreCase(AudioFormat.ULAW) ||
encoding.equalsIgnoreCase(AudioFormat.ULAW_RTP)) {
return milliseconds * 8;
} else {
throw new IllegalArgumentException("Unknown codec type");
}
} | int function(Format codecFormat, int milliseconds) throws IllegalArgumentException { String encoding = codecFormat.getEncoding(); if (encoding.equalsIgnoreCase(AudioFormat.G729) encoding.equalsIgnoreCase(AudioFormat.G729_RTP)) { return milliseconds * 1; } else if (encoding.equalsIgnoreCase(AudioFormat.ULAW) encoding.equalsIgnoreCase(AudioFormat.ULAW_RTP)) { return milliseconds * 8; } else { throw new IllegalArgumentException(STR); } } | /**
* Get the best packet size for a given codec and a codec rate
*
* @param codecFormat
* @param milliseconds
* @return
* @throws IllegalArgumentException
*/ | Get the best packet size for a given codec and a codec rate | getPacketSize | {
"repo_name": "joshuairl/toothchat-client",
"path": "src/plugins/sip/src/java/net/java/sipmack/media/VideoChannel.java",
"license": "apache-2.0",
"size": 18324
} | [
"javax.media.Format",
"javax.media.format.AudioFormat"
] | import javax.media.Format; import javax.media.format.AudioFormat; | import javax.media.*; import javax.media.format.*; | [
"javax.media"
] | javax.media; | 645,408 |
protected void setUp() throws Exception {
super.setUp();
testsConfig = TestsConfig.getTestsConfig();
// create a clean directory for the config files and the sample
// repository
localTmp = new File(testsConfig.rootDir, "local_tmp");
if (localTmp.exists())
FileUtils.removeDirectoryWithContent(localTmp);
localTmp.mkdir();
conf = new File(localTmp, "config");
conf.mkdir();
// create and configure the needed subversion objects
client = SVNClientAdapterFactory
.createSVNClient(testsConfig.clientType);
client.setUsername(TEST_USER);
client.setPassword(TEST_USERPASS);
// client.setConfigDirectory(conf.getAbsolutePath());
clientAdmin = SVNClientAdapterFactory
.createSVNClient(testsConfig.adminClientType);
clientAdmin.setUsername(TEST_USER);
clientAdmin.setPassword(TEST_USERPASS);
startServer();
} | void function() throws Exception { super.setUp(); testsConfig = TestsConfig.getTestsConfig(); localTmp = new File(testsConfig.rootDir, STR); if (localTmp.exists()) FileUtils.removeDirectoryWithContent(localTmp); localTmp.mkdir(); conf = new File(localTmp, STR); conf.mkdir(); client = SVNClientAdapterFactory .createSVNClient(testsConfig.clientType); client.setUsername(TEST_USER); client.setPassword(TEST_USERPASS); clientAdmin = SVNClientAdapterFactory .createSVNClient(testsConfig.adminClientType); clientAdmin.setUsername(TEST_USER); clientAdmin.setPassword(TEST_USERPASS); startServer(); } | /**
* Standard initialization of one test
*
* @throws Exception
*/ | Standard initialization of one test | setUp | {
"repo_name": "subclipse/svnclientadapter",
"path": "tests/org/tigris/subversion/svnclientadapter/testUtils/SVNTest.java",
"license": "apache-2.0",
"size": 8368
} | [
"java.io.File",
"org.tigris.subversion.svnclientadapter.SVNClientAdapterFactory"
] | import java.io.File; import org.tigris.subversion.svnclientadapter.SVNClientAdapterFactory; | import java.io.*; import org.tigris.subversion.svnclientadapter.*; | [
"java.io",
"org.tigris.subversion"
] | java.io; org.tigris.subversion; | 2,123,237 |
public Node createOptionalParameters(JSType... parameterTypes) {
FunctionParamBuilder builder = new FunctionParamBuilder(this);
builder.addOptionalParams(parameterTypes);
return builder.build();
} | Node function(JSType... parameterTypes) { FunctionParamBuilder builder = new FunctionParamBuilder(this); builder.addOptionalParams(parameterTypes); return builder.build(); } | /**
* Creates a tree hierarchy representing a typed parameter list in which
* every parameter is optional.
*/ | Creates a tree hierarchy representing a typed parameter list in which every parameter is optional | createOptionalParameters | {
"repo_name": "kencheung/js-symbolic-executor",
"path": "closure-compiler/src/com/google/javascript/rhino/jstype/JSTypeRegistry.java",
"license": "apache-2.0",
"size": 55020
} | [
"com.google.javascript.rhino.Node"
] | import com.google.javascript.rhino.Node; | import com.google.javascript.rhino.*; | [
"com.google.javascript"
] | com.google.javascript; | 1,324,615 |
public void updateTimestamp (String columnName, Timestamp columnValue) throws SQLException
{
validateResultSet();
resultSet_.updateTimestamp(columnName, columnValue);
eventSupport_.fireRowChanged(new RowSetEvent(this));
} | void function (String columnName, Timestamp columnValue) throws SQLException { validateResultSet(); resultSet_.updateTimestamp(columnName, columnValue); eventSupport_.fireRowChanged(new RowSetEvent(this)); } | /**
* Updates a column in the current row using a java.sql.Timestamp value.
* The driver converts this to an SQL TIMESTAMP value.
*
* <p>This does not update the database directly. Instead, it updates
* a copy of the data in memory. Call updateRow() or insertRow() to
* update the database.
*
* @param columnName The column name.
* @param columnValue The column value or null to update
* the value to SQL NULL.
*
* @exception SQLException If the result set is not open,
* the result set is not updatable,
* the cursor is not positioned on a row,
* the column name is not found, or the
* requested conversion is not valid.
**/ | Updates a column in the current row using a java.sql.Timestamp value. The driver converts this to an SQL TIMESTAMP value. This does not update the database directly. Instead, it updates a copy of the data in memory. Call updateRow() or insertRow() to update the database | updateTimestamp | {
"repo_name": "piguangming/jt400",
"path": "jdbc40/com/ibm/as400/access/AS400JDBCRowSet.java",
"license": "epl-1.0",
"size": 308525
} | [
"java.sql.SQLException",
"java.sql.Timestamp",
"javax.sql.RowSetEvent"
] | import java.sql.SQLException; import java.sql.Timestamp; import javax.sql.RowSetEvent; | import java.sql.*; import javax.sql.*; | [
"java.sql",
"javax.sql"
] | java.sql; javax.sql; | 522,740 |
public void event(AuditEvent auditEvent, String string1, String string2, String string3) {
AuditLog auditLog = new AuditLog();
// For this events I put the username into string1 because UserContext is not created yet
if (auditEvent.equals(AuditEvent.LoginSuccessful) || auditEvent.equals(AuditEvent.LogoutSuccessful)) {
auditLog.setUsername(string1);
this.username = string1;
string1 = null;
}
// For those events force to use anonymous user
else if (auditEvent.equals(AuditEvent.LoginFailed) || auditEvent.equals(AuditEvent.ApplicationStartup) || auditEvent.equals(AuditEvent.ApplicationShutdown)) {
this.username = UserContext.ANONYMOUS_USER;
}
else {
auditLog.setUsername(UserContext.getUsername());
}
auditLog.setEvent(auditEvent.name());
auditLog.setStringAttribute1(string1);
auditLog.setStringAttribute2(string2);
auditLog.setStringAttribute3(string3);
this.log(auditLog);
} | void function(AuditEvent auditEvent, String string1, String string2, String string3) { AuditLog auditLog = new AuditLog(); if (auditEvent.equals(AuditEvent.LoginSuccessful) auditEvent.equals(AuditEvent.LogoutSuccessful)) { auditLog.setUsername(string1); this.username = string1; string1 = null; } else if (auditEvent.equals(AuditEvent.LoginFailed) auditEvent.equals(AuditEvent.ApplicationStartup) auditEvent.equals(AuditEvent.ApplicationShutdown)) { this.username = UserContext.ANONYMOUS_USER; } else { auditLog.setUsername(UserContext.getUsername()); } auditLog.setEvent(auditEvent.name()); auditLog.setStringAttribute1(string1); auditLog.setStringAttribute2(string2); auditLog.setStringAttribute3(string3); this.log(auditLog); } | /**
* Create new audit log based on event data
* @param auditEvent audit event
* @param string1 string1
* @param string2 string2
* @param string3 string3
*/ | Create new audit log based on event data | event | {
"repo_name": "ddRPB/rpb",
"path": "radplanbio-core/src/main/java/de/dktk/dd/rpb/core/service/AuditLogService.java",
"license": "gpl-3.0",
"size": 6712
} | [
"de.dktk.dd.rpb.core.context.UserContext",
"de.dktk.dd.rpb.core.domain.admin.AuditLog"
] | import de.dktk.dd.rpb.core.context.UserContext; import de.dktk.dd.rpb.core.domain.admin.AuditLog; | import de.dktk.dd.rpb.core.context.*; import de.dktk.dd.rpb.core.domain.admin.*; | [
"de.dktk.dd"
] | de.dktk.dd; | 2,005,796 |
EReference getDocumentRoot_MinLength(); | EReference getDocumentRoot_MinLength(); | /**
* Returns the meta object for the containment reference '{@link org.w3._2001.schema.DocumentRoot#getMinLength <em>Min Length</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the containment reference '<em>Min Length</em>'.
* @see org.w3._2001.schema.DocumentRoot#getMinLength()
* @see #getDocumentRoot()
* @generated
*/ | Returns the meta object for the containment reference '<code>org.w3._2001.schema.DocumentRoot#getMinLength Min Length</code>'. | getDocumentRoot_MinLength | {
"repo_name": "geotools/geotools",
"path": "modules/ogc/net.opengis.wps/src/org/w3/_2001/schema/SchemaPackage.java",
"license": "lgpl-2.1",
"size": 433240
} | [
"org.eclipse.emf.ecore.EReference"
] | import org.eclipse.emf.ecore.EReference; | import org.eclipse.emf.ecore.*; | [
"org.eclipse.emf"
] | org.eclipse.emf; | 1,878,452 |
public Map<ChunkChecksum,MultiChunkId> getMultiChunkIdsByChecksums(List<ChunkChecksum> chunkChecksums) {
// Gather a unique array of checksum strings (required for query!)
Set<ChunkChecksum> chunkChecksumSet = new HashSet<ChunkChecksum>(chunkChecksums);
String[] checksums = new String[chunkChecksumSet.size()];
int i = 0;
for (ChunkChecksum checksum : chunkChecksumSet) {
checksums[i] = checksum.toString();
i++;
}
// Execute query
Map<ChunkChecksum, MultiChunkId> result = new HashMap<ChunkChecksum, MultiChunkId>();
try (PreparedStatement preparedStatement = getStatement("multichunk.select.all.getMultiChunkIdForChunks.sql")) {
preparedStatement.setArray(1, connection.createArrayOf("varchar", checksums));
try (ResultSet resultSet = preparedStatement.executeQuery()) {
while (resultSet.next()) {
result.put(ChunkChecksum.parseChunkChecksum(resultSet.getString("chunk_checksum")),
MultiChunkId.parseMultiChunkId(resultSet.getString("multichunk_id")));
}
}
return result;
}
catch (SQLException e) {
throw new RuntimeException(e);
}
}
| Map<ChunkChecksum,MultiChunkId> function(List<ChunkChecksum> chunkChecksums) { Set<ChunkChecksum> chunkChecksumSet = new HashSet<ChunkChecksum>(chunkChecksums); String[] checksums = new String[chunkChecksumSet.size()]; int i = 0; for (ChunkChecksum checksum : chunkChecksumSet) { checksums[i] = checksum.toString(); i++; } Map<ChunkChecksum, MultiChunkId> result = new HashMap<ChunkChecksum, MultiChunkId>(); try (PreparedStatement preparedStatement = getStatement(STR)) { preparedStatement.setArray(1, connection.createArrayOf(STR, checksums)); try (ResultSet resultSet = preparedStatement.executeQuery()) { while (resultSet.next()) { result.put(ChunkChecksum.parseChunkChecksum(resultSet.getString(STR)), MultiChunkId.parseMultiChunkId(resultSet.getString(STR))); } } return result; } catch (SQLException e) { throw new RuntimeException(e); } } | /**
* Note: This method selects also {@link DatabaseVersionStatus#DIRTY DIRTY}.
*/ | Note: This method selects also <code>DatabaseVersionStatus#DIRTY DIRTY</code> | getMultiChunkIdsByChecksums | {
"repo_name": "syncany/syncany-plugin-dropbox",
"path": "core/syncany-lib/src/main/java/org/syncany/database/dao/MultiChunkSqlDao.java",
"license": "gpl-3.0",
"size": 11974
} | [
"java.sql.PreparedStatement",
"java.sql.ResultSet",
"java.sql.SQLException",
"java.util.HashMap",
"java.util.HashSet",
"java.util.List",
"java.util.Map",
"java.util.Set",
"org.syncany.database.ChunkEntry",
"org.syncany.database.MultiChunkEntry"
] | import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.syncany.database.ChunkEntry; import org.syncany.database.MultiChunkEntry; | import java.sql.*; import java.util.*; import org.syncany.database.*; | [
"java.sql",
"java.util",
"org.syncany.database"
] | java.sql; java.util; org.syncany.database; | 132,102 |
Bbox getBoundingBox(); | Bbox getBoundingBox(); | /**
* Get the extent for this layer. This bounding box is expressed in the CRS as returned by
* {@link #getBoundingBoxCrs()}.
*
* @return Get the extent for this layer.
*/ | Get the extent for this layer. This bounding box is expressed in the CRS as returned by <code>#getBoundingBoxCrs()</code> | getBoundingBox | {
"repo_name": "geomajas/geomajas-project-client-gwt2",
"path": "plugin/wms/wms/src/main/java/org/geomajas/gwt2/plugin/wms/client/capabilities/WmsLayerInfo.java",
"license": "agpl-3.0",
"size": 3083
} | [
"org.geomajas.geometry.Bbox"
] | import org.geomajas.geometry.Bbox; | import org.geomajas.geometry.*; | [
"org.geomajas.geometry"
] | org.geomajas.geometry; | 2,710,449 |
public void addAPIRevisionDeployment(String apiRevisionId, List<APIRevisionDeployment> apiRevisionDeployments)
throws APIManagementException {
try (Connection connection = APIMgtDBUtil.getConnection()) {
try {
connection.setAutoCommit(false);
// Adding to AM_DEPLOYMENT_REVISION_MAPPING table
PreparedStatement statement = connection
.prepareStatement(SQLConstants.APIRevisionSqlConstants.ADD_API_REVISION_DEPLOYMENT_MAPPING);
for (APIRevisionDeployment apiRevisionDeployment : apiRevisionDeployments) {
String envName = apiRevisionDeployment.getDeployment();
String vhost = apiRevisionDeployment.getVhost();
// set VHost as null, if it is the default vhost of the read only environment
statement.setString(1, apiRevisionDeployment.getDeployment());
statement.setString(2, VHostUtils.resolveIfDefaultVhostToNull(envName, vhost));
statement.setString(3, apiRevisionId);
statement.setBoolean(4, apiRevisionDeployment.isDisplayOnDevportal());
statement.setTimestamp(5, new Timestamp(System.currentTimeMillis()));
statement.addBatch();
}
statement.executeBatch();
connection.commit();
} catch (SQLException e) {
connection.rollback();
handleException("Failed to add API Revision Deployment Mapping entry for Revision UUID "
+ apiRevisionId, e);
}
} catch (SQLException e) {
handleException("Failed to add API Revision Deployment Mapping entry for Revision UUID " + apiRevisionId,
e);
}
} | void function(String apiRevisionId, List<APIRevisionDeployment> apiRevisionDeployments) throws APIManagementException { try (Connection connection = APIMgtDBUtil.getConnection()) { try { connection.setAutoCommit(false); PreparedStatement statement = connection .prepareStatement(SQLConstants.APIRevisionSqlConstants.ADD_API_REVISION_DEPLOYMENT_MAPPING); for (APIRevisionDeployment apiRevisionDeployment : apiRevisionDeployments) { String envName = apiRevisionDeployment.getDeployment(); String vhost = apiRevisionDeployment.getVhost(); statement.setString(1, apiRevisionDeployment.getDeployment()); statement.setString(2, VHostUtils.resolveIfDefaultVhostToNull(envName, vhost)); statement.setString(3, apiRevisionId); statement.setBoolean(4, apiRevisionDeployment.isDisplayOnDevportal()); statement.setTimestamp(5, new Timestamp(System.currentTimeMillis())); statement.addBatch(); } statement.executeBatch(); connection.commit(); } catch (SQLException e) { connection.rollback(); handleException(STR + apiRevisionId, e); } } catch (SQLException e) { handleException(STR + apiRevisionId, e); } } | /**
* Adds an API revision Deployment mapping record to the database
*
* @param apiRevisionId uuid of the revision
* @param apiRevisionDeployments content of the revision deployment mapping objects
* @throws APIManagementException if an error occurs when adding a new API revision
*/ | Adds an API revision Deployment mapping record to the database | addAPIRevisionDeployment | {
"repo_name": "fazlan-nazeem/carbon-apimgt",
"path": "components/apimgt/org.wso2.carbon.apimgt.impl/src/main/java/org/wso2/carbon/apimgt/impl/dao/ApiMgtDAO.java",
"license": "apache-2.0",
"size": 821235
} | [
"java.sql.Connection",
"java.sql.PreparedStatement",
"java.sql.SQLException",
"java.sql.Timestamp",
"java.util.List",
"org.wso2.carbon.apimgt.api.APIManagementException",
"org.wso2.carbon.apimgt.api.model.APIRevisionDeployment",
"org.wso2.carbon.apimgt.impl.dao.constants.SQLConstants",
"org.wso2.carbon.apimgt.impl.utils.APIMgtDBUtil",
"org.wso2.carbon.apimgt.impl.utils.VHostUtils"
] | import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.SQLException; import java.sql.Timestamp; import java.util.List; import org.wso2.carbon.apimgt.api.APIManagementException; import org.wso2.carbon.apimgt.api.model.APIRevisionDeployment; import org.wso2.carbon.apimgt.impl.dao.constants.SQLConstants; import org.wso2.carbon.apimgt.impl.utils.APIMgtDBUtil; import org.wso2.carbon.apimgt.impl.utils.VHostUtils; | import java.sql.*; import java.util.*; import org.wso2.carbon.apimgt.api.*; import org.wso2.carbon.apimgt.api.model.*; import org.wso2.carbon.apimgt.impl.dao.constants.*; import org.wso2.carbon.apimgt.impl.utils.*; | [
"java.sql",
"java.util",
"org.wso2.carbon"
] | java.sql; java.util; org.wso2.carbon; | 1,115,974 |
public String getCachePage(String gcCode) throws URISyntaxException, IOException {
String pageContent = null;
HttpGet get = new HttpGet("http://www.geocaching.com/geocache/" + gcCode);
pageContent = executeMethod(get);
return pageContent;
} | String function(String gcCode) throws URISyntaxException, IOException { String pageContent = null; HttpGet get = new HttpGet("http: pageContent = executeMethod(get); return pageContent; } | /**
* <p>Read the HTML content of a geocahe page based on it's GC code</p>
* <p>This will make a GET request to http://www.geocaching.com/geocache/{gcCode} and return the HTML content</p>
* */ | Read the HTML content of a geocahe page based on it's GC code This will make a GET request to HREF{gcCode} and return the HTML content | getCachePage | {
"repo_name": "ecornely/gpx-jconverter",
"path": "src/main/java/be/ecornely/gpx/Downloader.java",
"license": "gpl-3.0",
"size": 7304
} | [
"java.io.IOException",
"java.net.URISyntaxException",
"org.apache.http.client.methods.HttpGet"
] | import java.io.IOException; import java.net.URISyntaxException; import org.apache.http.client.methods.HttpGet; | import java.io.*; import java.net.*; import org.apache.http.client.methods.*; | [
"java.io",
"java.net",
"org.apache.http"
] | java.io; java.net; org.apache.http; | 1,487,718 |
public Set<T> getCurrentValues() {
return stateSynchronizer.getState().getCurrentValues();
} | Set<T> function() { return stateSynchronizer.getState().getCurrentValues(); } | /**
* Returns the current values in the set.
*/ | Returns the current values in the set | getCurrentValues | {
"repo_name": "pravega/pravega",
"path": "client/src/test/java/io/pravega/client/state/examples/SetSynchronizer.java",
"license": "apache-2.0",
"size": 7537
} | [
"java.util.Set"
] | import java.util.Set; | import java.util.*; | [
"java.util"
] | java.util; | 1,080,327 |
@Test
public void testScriptContainer_1()
throws Exception {
ScriptContainer result = new ScriptContainer();
assertNotNull(result);
assertEquals(null, result.getName());
assertEquals(0, result.getRuntime());
assertEquals(null, result.getId());
assertEquals(null, result.getModified());
assertEquals(null, result.getCreated());
assertEquals(null, result.getCreator());
assertEquals(null, result.getProductName());
assertEquals(null, result.getComments());
} | void function() throws Exception { ScriptContainer result = new ScriptContainer(); assertNotNull(result); assertEquals(null, result.getName()); assertEquals(0, result.getRuntime()); assertEquals(null, result.getId()); assertEquals(null, result.getModified()); assertEquals(null, result.getCreated()); assertEquals(null, result.getCreator()); assertEquals(null, result.getProductName()); assertEquals(null, result.getComments()); } | /**
* Run the ScriptContainer() constructor test.
*
* @throws Exception
*
* @generatedBy CodePro at 12/15/14 1:34 PM
*/ | Run the ScriptContainer() constructor test | testScriptContainer_1 | {
"repo_name": "intuit/Tank",
"path": "data_model/src/test/java/com/intuit/tank/project/ScriptContainerTest.java",
"license": "epl-1.0",
"size": 17554
} | [
"com.intuit.tank.project.ScriptContainer",
"org.junit.jupiter.api.Assertions"
] | import com.intuit.tank.project.ScriptContainer; import org.junit.jupiter.api.Assertions; | import com.intuit.tank.project.*; import org.junit.jupiter.api.*; | [
"com.intuit.tank",
"org.junit.jupiter"
] | com.intuit.tank; org.junit.jupiter; | 254,608 |
protected List<BlockedItem> getBlockedItems() {
return new ArrayList<BlockedItem>(snapshot.blockedProjects);
} | List<BlockedItem> function() { return new ArrayList<BlockedItem>(snapshot.blockedProjects); } | /**
* Gets the snapshot of all {@link BlockedItem}s.
*/ | Gets the snapshot of all <code>BlockedItem</code>s | getBlockedItems | {
"repo_name": "escoem/jenkins",
"path": "core/src/main/java/hudson/model/Queue.java",
"license": "mit",
"size": 111723
} | [
"java.util.ArrayList",
"java.util.List"
] | import java.util.ArrayList; import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 1,264,199 |
@Override
public Adapter createSmooksMediatorInputConnectorAdapter() {
if (smooksMediatorInputConnectorItemProvider == null) {
smooksMediatorInputConnectorItemProvider = new SmooksMediatorInputConnectorItemProvider(this);
}
return smooksMediatorInputConnectorItemProvider;
}
protected SmooksMediatorOutputConnectorItemProvider smooksMediatorOutputConnectorItemProvider;
| Adapter function() { if (smooksMediatorInputConnectorItemProvider == null) { smooksMediatorInputConnectorItemProvider = new SmooksMediatorInputConnectorItemProvider(this); } return smooksMediatorInputConnectorItemProvider; } protected SmooksMediatorOutputConnectorItemProvider smooksMediatorOutputConnectorItemProvider; | /**
* This creates an adapter for a {@link org.wso2.developerstudio.eclipse.gmf.esb.SmooksMediatorInputConnector}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/ | This creates an adapter for a <code>org.wso2.developerstudio.eclipse.gmf.esb.SmooksMediatorInputConnector</code>. | createSmooksMediatorInputConnectorAdapter | {
"repo_name": "rajeevanv89/developer-studio",
"path": "esb/org.wso2.developerstudio.eclipse.gmf.esb.edit/src/org/wso2/developerstudio/eclipse/gmf/esb/provider/EsbItemProviderAdapterFactory.java",
"license": "apache-2.0",
"size": 286852
} | [
"org.eclipse.emf.common.notify.Adapter"
] | import org.eclipse.emf.common.notify.Adapter; | import org.eclipse.emf.common.notify.*; | [
"org.eclipse.emf"
] | org.eclipse.emf; | 1,344,873 |
public void normalizeProperties() {
CmsClientProperty.removeEmptyProperties(m_ownProperties);
if (m_defaultFileProperties != null) {
CmsClientProperty.removeEmptyProperties(m_defaultFileProperties);
}
} | void function() { CmsClientProperty.removeEmptyProperties(m_ownProperties); if (m_defaultFileProperties != null) { CmsClientProperty.removeEmptyProperties(m_defaultFileProperties); } } | /**
* Removes empty properties.<p>
*/ | Removes empty properties | normalizeProperties | {
"repo_name": "it-tavis/opencms-core",
"path": "src/org/opencms/ade/sitemap/shared/CmsClientSitemapEntry.java",
"license": "lgpl-2.1",
"size": 28821
} | [
"org.opencms.gwt.shared.property.CmsClientProperty"
] | import org.opencms.gwt.shared.property.CmsClientProperty; | import org.opencms.gwt.shared.property.*; | [
"org.opencms.gwt"
] | org.opencms.gwt; | 1,993,381 |
@MapInProperties(propertyPrefix = "txTimeouts", propertyType = Integer.class)
void setTxTimeouts(Map<String, Integer> map); | @MapInProperties(propertyPrefix = STR, propertyType = Integer.class) void setTxTimeouts(Map<String, Integer> map); | /**
* Timeouts for each method pattern, * is wildcard
*/ | Timeouts for each method pattern, * is wildcard | setTxTimeouts | {
"repo_name": "mareknovotny/windup",
"path": "rules-java-ee/addon/src/main/java/org/jboss/windup/rules/apps/javaee/model/EjbSessionBeanModel.java",
"license": "epl-1.0",
"size": 4745
} | [
"java.util.Map",
"org.jboss.windup.graph.MapInProperties"
] | import java.util.Map; import org.jboss.windup.graph.MapInProperties; | import java.util.*; import org.jboss.windup.graph.*; | [
"java.util",
"org.jboss.windup"
] | java.util; org.jboss.windup; | 1,234,044 |
public static Classifier create(
AssetManager assetManager,
String modelFilename,
String labelFilename,
int inputSize,
int imageMean,
float imageStd,
String inputName,
String outputName) {
TensorFlowImageClassifier c = new TensorFlowImageClassifier();
c.inputName = inputName;
c.outputName = outputName;
// Read the label names into memory.
// TODO(andrewharp): make this handle non-assets.
final boolean hasAssetPrefix = labelFilename.startsWith("file:///android_asset/");
String actualFilename = hasAssetPrefix ? labelFilename.split("file:///android_asset/")[1] : labelFilename;
Log.i(TAG, "Reading labels from: " + actualFilename);
BufferedReader br = null;
try {
br = new BufferedReader(new InputStreamReader(assetManager.open(actualFilename)));
String line;
while ((line = br.readLine()) != null) {
c.labels.add(line);
}
br.close();
} catch (IOException e) {
if (hasAssetPrefix) {
throw new RuntimeException("Problem reading label file!" , e);
}
try {
br = new BufferedReader(new InputStreamReader(new FileInputStream(actualFilename)));
String line;
while ((line = br.readLine()) != null) {
c.labels.add(line);
}
br.close();
} catch (IOException e2) {
throw new RuntimeException("Problem reading label file!" , e);
}
}
c.inferenceInterface = new TensorFlowInferenceInterface();
if (c.inferenceInterface.initializeTensorFlow(assetManager, modelFilename) != 0) {
throw new RuntimeException("TF initialization failed");
}
// The shape of the output is [N, NUM_CLASSES], where N is the batch size.
final Operation operation = c.inferenceInterface.graph().operation(outputName);
if (operation == null) {
throw new RuntimeException("Node '" + outputName + "' does not exist in model '"
+ modelFilename + "'");
}
final int numClasses = (int) operation.output(0).shape().size(1);
Log.i(TAG, "Read " + c.labels.size() + " labels, output layer size is " + numClasses);
// Ideally, inputSize could have been retrieved from the shape of the input operation. Alas,
// the placeholder node for input in the graphdef typically used does not specify a shape, so it
// must be passed in as a parameter.
c.inputSize = inputSize;
c.imageMean = imageMean;
c.imageStd = imageStd;
// Pre-allocate buffers.
c.outputNames = new String[] {outputName};
c.intValues = new int[inputSize * inputSize];
c.floatValues = new float[inputSize * inputSize * 3];
c.outputs = new float[numClasses];
return c;
} | static Classifier function( AssetManager assetManager, String modelFilename, String labelFilename, int inputSize, int imageMean, float imageStd, String inputName, String outputName) { TensorFlowImageClassifier c = new TensorFlowImageClassifier(); c.inputName = inputName; c.outputName = outputName; final boolean hasAssetPrefix = labelFilename.startsWith(STRfile: Log.i(TAG, STR + actualFilename); BufferedReader br = null; try { br = new BufferedReader(new InputStreamReader(assetManager.open(actualFilename))); String line; while ((line = br.readLine()) != null) { c.labels.add(line); } br.close(); } catch (IOException e) { if (hasAssetPrefix) { throw new RuntimeException(STR , e); } try { br = new BufferedReader(new InputStreamReader(new FileInputStream(actualFilename))); String line; while ((line = br.readLine()) != null) { c.labels.add(line); } br.close(); } catch (IOException e2) { throw new RuntimeException(STR , e); } } c.inferenceInterface = new TensorFlowInferenceInterface(); if (c.inferenceInterface.initializeTensorFlow(assetManager, modelFilename) != 0) { throw new RuntimeException(STR); } final Operation operation = c.inferenceInterface.graph().operation(outputName); if (operation == null) { throw new RuntimeException(STR + outputName + STR + modelFilename + "'"); } final int numClasses = (int) operation.output(0).shape().size(1); Log.i(TAG, STR + c.labels.size() + STR + numClasses); c.inputSize = inputSize; c.imageMean = imageMean; c.imageStd = imageStd; c.outputNames = new String[] {outputName}; c.intValues = new int[inputSize * inputSize]; c.floatValues = new float[inputSize * inputSize * 3]; c.outputs = new float[numClasses]; return c; } | /**
* Initializes a native TensorFlow session for classifying images.
*
* @param assetManager The asset manager to be used to load assets.
* @param modelFilename The filepath of the model GraphDef protocol buffer.
* @param labelFilename The filepath of label file for classes.
* @param inputSize The input size. A square image of inputSize x inputSize is assumed.
* @param imageMean The assumed mean of the image values.
* @param imageStd The assumed std of the image values.
* @param inputName The label of the image input node.
* @param outputName The label of the output node.
* @throws IOException
*/ | Initializes a native TensorFlow session for classifying images | create | {
"repo_name": "heigeo/cordova-plugin-tensorflow",
"path": "src/android/tf_libs/TensorFlowImageClassifier.java",
"license": "mit",
"size": 7989
} | [
"android.content.res.AssetManager",
"android.util.Log",
"java.io.BufferedReader",
"java.io.FileInputStream",
"java.io.IOException",
"java.io.InputStreamReader",
"org.tensorflow.Operation",
"org.tensorflow.contrib.android.TensorFlowInferenceInterface"
] | import android.content.res.AssetManager; import android.util.Log; import java.io.BufferedReader; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStreamReader; import org.tensorflow.Operation; import org.tensorflow.contrib.android.TensorFlowInferenceInterface; | import android.content.res.*; import android.util.*; import java.io.*; import org.tensorflow.*; import org.tensorflow.contrib.android.*; | [
"android.content",
"android.util",
"java.io",
"org.tensorflow",
"org.tensorflow.contrib"
] | android.content; android.util; java.io; org.tensorflow; org.tensorflow.contrib; | 1,991,803 |
public void addAllToProperties(Properties props) {
synchronized (this.confData) {
for (Map.Entry<String, Object> entry : this.confData.entrySet()) {
props.put(entry.getKey(), entry.getValue());
}
}
} | void function(Properties props) { synchronized (this.confData) { for (Map.Entry<String, Object> entry : this.confData.entrySet()) { props.put(entry.getKey(), entry.getValue()); } } } | /**
* Adds all entries in this {@code Configuration} to the given {@link Properties}.
*/ | Adds all entries in this Configuration to the given <code>Properties</code> | addAllToProperties | {
"repo_name": "zimmermatt/flink",
"path": "flink-core/src/main/java/org/apache/flink/configuration/Configuration.java",
"license": "apache-2.0",
"size": 30463
} | [
"java.util.Map",
"java.util.Properties"
] | import java.util.Map; import java.util.Properties; | import java.util.*; | [
"java.util"
] | java.util; | 1,104,470 |
return new ModelLoader(opt,codeModel,er).load();
}
public ModelLoader(Options _opt, JCodeModel _codeModel, ErrorReceiver er) {
this.opt = _opt;
this.codeModel = _codeModel;
this.errorReceiver = new ErrorReceiverFilter(er);
} | return new ModelLoader(opt,codeModel,er).load(); } public ModelLoader(Options _opt, JCodeModel _codeModel, ErrorReceiver er) { this.opt = _opt; this.codeModel = _codeModel; this.errorReceiver = new ErrorReceiverFilter(er); } | /**
* A convenience method to load schemas into a {@link Model}.
*/ | A convenience method to load schemas into a <code>Model</code> | load | {
"repo_name": "samskivert/ikvm-openjdk",
"path": "build/linux-amd64/impsrc/com/sun/tools/internal/xjc/ModelLoader.java",
"license": "gpl-2.0",
"size": 21747
} | [
"com.sun.codemodel.internal.JCodeModel",
"com.sun.tools.internal.xjc.util.ErrorReceiverFilter"
] | import com.sun.codemodel.internal.JCodeModel; import com.sun.tools.internal.xjc.util.ErrorReceiverFilter; | import com.sun.codemodel.internal.*; import com.sun.tools.internal.xjc.util.*; | [
"com.sun.codemodel",
"com.sun.tools"
] | com.sun.codemodel; com.sun.tools; | 2,512,594 |
@Bean
protected SampleServiceController sampleServiceController()
{
return new SampleServiceController();
} | SampleServiceController function() { return new SampleServiceController(); } | /**
* Returns the Spring bean for the Sample Service controller.
*
* @return the Spring bean for the Sample Service controller
*/ | Returns the Spring bean for the Sample Service controller | sampleServiceController | {
"repo_name": "marcusportmann/mmp-java",
"path": "src/mmp-sample-wicket/src/main/java/guru/mmp/sample/Application.java",
"license": "apache-2.0",
"size": 8311
} | [
"guru.mmp.sample.api.SampleServiceController"
] | import guru.mmp.sample.api.SampleServiceController; | import guru.mmp.sample.api.*; | [
"guru.mmp.sample"
] | guru.mmp.sample; | 2,598,942 |
@SuppressWarnings("unchecked")
private void convertConfigToMultipleDSFormat(String configFilePath)
throws DataServiceFault {
FileInputStream fis = null;
boolean changed = false;
try {
fis = new FileInputStream(configFilePath);
OMElement configElement = (new StAXOMBuilder(fis)).getDocumentElement();
configElement.build();
Iterator<OMElement> configElements = configElement.getChildrenWithName(
new QName(DBSFields.CONFIG));
int emptyConfigs = 0;
while (configElements.hasNext()) {
OMElement config = configElements.next();
String configId = config.getAttributeValue(new QName(DBSFields.ID));
if (configId == null || configId.trim().length() == 0) {
config.addAttribute(DBSFields.ID, DBConstants.DEFAULT_CONFIG_ID, null);
changed = true;
emptyConfigs++;
if (emptyConfigs > 1) {
throw new DataServiceFault("More than one config elements found in " +
configFilePath);
}
}
}
Iterator<OMElement> queryElements =
configElement.getChildrenWithName(new QName(DBSFields.QUERY));
while (queryElements.hasNext()) {
OMElement query = queryElements.next();
String useConfig = query.getAttributeValue(new QName(
DBSFields.USE_CONFIG));
if (useConfig == null || useConfig.trim().length() == 0) {
query.addAttribute(DBSFields.USE_CONFIG, DBConstants.DEFAULT_CONFIG_ID, null);
changed = true;
}
}
if (changed) {
if (log.isDebugEnabled()) {
log.debug("Converting " + configFilePath +
" to support multiple data sources.");
}
BufferedWriter out = new BufferedWriter(new FileWriter(configFilePath));
configElement.serialize(out);
out.close();
DBUtils.prettifyXMLFile(configFilePath);
}
} catch (Exception e) {
throw new DataServiceFault(e);
} finally {
if (fis != null) {
try {
fis.close();
} catch (IOException e) {
log.error("Error in closing data service configuration file", e);
}
}
}
} | @SuppressWarnings(STR) void function(String configFilePath) throws DataServiceFault { FileInputStream fis = null; boolean changed = false; try { fis = new FileInputStream(configFilePath); OMElement configElement = (new StAXOMBuilder(fis)).getDocumentElement(); configElement.build(); Iterator<OMElement> configElements = configElement.getChildrenWithName( new QName(DBSFields.CONFIG)); int emptyConfigs = 0; while (configElements.hasNext()) { OMElement config = configElements.next(); String configId = config.getAttributeValue(new QName(DBSFields.ID)); if (configId == null configId.trim().length() == 0) { config.addAttribute(DBSFields.ID, DBConstants.DEFAULT_CONFIG_ID, null); changed = true; emptyConfigs++; if (emptyConfigs > 1) { throw new DataServiceFault(STR + configFilePath); } } } Iterator<OMElement> queryElements = configElement.getChildrenWithName(new QName(DBSFields.QUERY)); while (queryElements.hasNext()) { OMElement query = queryElements.next(); String useConfig = query.getAttributeValue(new QName( DBSFields.USE_CONFIG)); if (useConfig == null useConfig.trim().length() == 0) { query.addAttribute(DBSFields.USE_CONFIG, DBConstants.DEFAULT_CONFIG_ID, null); changed = true; } } if (changed) { if (log.isDebugEnabled()) { log.debug(STR + configFilePath + STR); } BufferedWriter out = new BufferedWriter(new FileWriter(configFilePath)); configElement.serialize(out); out.close(); DBUtils.prettifyXMLFile(configFilePath); } } catch (Exception e) { throw new DataServiceFault(e); } finally { if (fis != null) { try { fis.close(); } catch (IOException e) { log.error(STR, e); } } } } | /**
* Configuration files prior to multiple data source support did not have id attribute
* for config element. Adding that & saving.
*/ | Configuration files prior to multiple data source support did not have id attribute for config element. Adding that & saving | convertConfigToMultipleDSFormat | {
"repo_name": "anupama-pathirage/carbon-data",
"path": "components/data-services/org.wso2.carbon.dataservices.core/src/main/java/org/wso2/carbon/dataservices/core/DBDeployer.java",
"license": "apache-2.0",
"size": 50377
} | [
"java.io.BufferedWriter",
"java.io.FileInputStream",
"java.io.FileWriter",
"java.io.IOException",
"java.util.Iterator",
"javax.xml.namespace.QName",
"org.apache.axiom.om.OMElement",
"org.apache.axiom.om.impl.builder.StAXOMBuilder",
"org.wso2.carbon.dataservices.common.DBConstants"
] | import java.io.BufferedWriter; import java.io.FileInputStream; import java.io.FileWriter; import java.io.IOException; import java.util.Iterator; import javax.xml.namespace.QName; import org.apache.axiom.om.OMElement; import org.apache.axiom.om.impl.builder.StAXOMBuilder; import org.wso2.carbon.dataservices.common.DBConstants; | import java.io.*; import java.util.*; import javax.xml.namespace.*; import org.apache.axiom.om.*; import org.apache.axiom.om.impl.builder.*; import org.wso2.carbon.dataservices.common.*; | [
"java.io",
"java.util",
"javax.xml",
"org.apache.axiom",
"org.wso2.carbon"
] | java.io; java.util; javax.xml; org.apache.axiom; org.wso2.carbon; | 678,374 |
public void testEdgeNGramFilter() throws Exception {
Reader reader = new StringReader("test");
TokenStream stream = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
stream = tokenFilterFactory("EdgeNGram").create(stream);
assertTokenStreamContents(stream,
new String[] { "t" });
} | void function() throws Exception { Reader reader = new StringReader("test"); TokenStream stream = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false); stream = tokenFilterFactory(STR).create(stream); assertTokenStreamContents(stream, new String[] { "t" }); } | /**
* Test EdgeNGramFilterFactory
*/ | Test EdgeNGramFilterFactory | testEdgeNGramFilter | {
"repo_name": "zhangdian/solr4.6.0",
"path": "lucene/analysis/common/src/test/org/apache/lucene/analysis/ngram/TestNGramFilters.java",
"license": "apache-2.0",
"size": 6122
} | [
"java.io.Reader",
"java.io.StringReader",
"org.apache.lucene.analysis.MockTokenizer",
"org.apache.lucene.analysis.TokenStream"
] | import java.io.Reader; import java.io.StringReader; import org.apache.lucene.analysis.MockTokenizer; import org.apache.lucene.analysis.TokenStream; | import java.io.*; import org.apache.lucene.analysis.*; | [
"java.io",
"org.apache.lucene"
] | java.io; org.apache.lucene; | 2,899,518 |
@RequiresPermission(MANAGE_FINGERPRINT)
public void setActiveUser(int userId) {
if (mService != null) try {
mService.setActiveUser(userId);
} catch (RemoteException e) {
throw e.rethrowFromSystemServer();
}
} | @RequiresPermission(MANAGE_FINGERPRINT) void function(int userId) { if (mService != null) try { mService.setActiveUser(userId); } catch (RemoteException e) { throw e.rethrowFromSystemServer(); } } | /**
* Sets the active user. This is meant to be used to select the current profile for enrollment
* to allow separate enrolled fingers for a work profile
* @param userId
* @hide
*/ | Sets the active user. This is meant to be used to select the current profile for enrollment to allow separate enrolled fingers for a work profile | setActiveUser | {
"repo_name": "xorware/android_frameworks_base",
"path": "core/java/android/hardware/fingerprint/FingerprintManager.java",
"license": "apache-2.0",
"size": 39147
} | [
"android.annotation.RequiresPermission",
"android.os.RemoteException"
] | import android.annotation.RequiresPermission; import android.os.RemoteException; | import android.annotation.*; import android.os.*; | [
"android.annotation",
"android.os"
] | android.annotation; android.os; | 2,170,569 |
public void writeTable(Table table, OutputStream os) throws DataIOException;
| void function(Table table, OutputStream os) throws DataIOException; | /**
* Write a table from the given OutputStream.
* @param table the Table to write
* @param os the OutputStream to write the table to
* @throws DataWriteException
*/ | Write a table from the given OutputStream | writeTable | {
"repo_name": "effrafax/Prefux",
"path": "src/main/java/prefux/data/io/TableWriter.java",
"license": "bsd-3-clause",
"size": 2795
} | [
"java.io.OutputStream"
] | import java.io.OutputStream; | import java.io.*; | [
"java.io"
] | java.io; | 2,155,560 |
public static ims.ocrr.orderingresults.domain.objects.OrderInvestigation extractOrderInvestigation(ims.domain.ILightweightDomainFactory domainFactory, ims.ocrr.vo.PathologyOrderVo valueObject)
{
return extractOrderInvestigation(domainFactory, valueObject, new HashMap());
}
| static ims.ocrr.orderingresults.domain.objects.OrderInvestigation function(ims.domain.ILightweightDomainFactory domainFactory, ims.ocrr.vo.PathologyOrderVo valueObject) { return extractOrderInvestigation(domainFactory, valueObject, new HashMap()); } | /**
* Create the domain object from the value object.
* @param domainFactory - used to create existing (persistent) domain objects.
* @param valueObject - extract the domain object fields from this.
*/ | Create the domain object from the value object | extractOrderInvestigation | {
"repo_name": "FreudianNM/openMAXIMS",
"path": "Source Library/openmaxims_workspace/ValueObjects/src/ims/ocrr/vo/domain/PathologyOrderVoAssembler.java",
"license": "agpl-3.0",
"size": 18105
} | [
"java.util.HashMap"
] | import java.util.HashMap; | import java.util.*; | [
"java.util"
] | java.util; | 2,515,104 |
private static Method internalFindMethod(Class start, String methodName,
int argCount, Class args[]) {
// For overriden methods we need to find the most derived version.
// So we start with the given class and walk up the superclass chain.
Method method = null;
for (Class cl = start; cl != null; cl = cl.getSuperclass()) {
Method methods[] = getPublicDeclaredMethods(cl);
for (int i = 0; i < methods.length; i++) {
method = methods[i];
if (method == null) {
continue;
}
// make sure method signature matches.
if (method.getName().equals(methodName)) {
Type[] params = method.getGenericParameterTypes();
if (params.length == argCount) {
if (args != null) {
boolean different = false;
if (argCount > 0) {
for (int j = 0; j < argCount; j++) {
if (TypeResolver.erase(TypeResolver.resolveInClass(start, params[j])) != args[j]) {
different = true;
continue;
}
}
if (different) {
continue;
}
}
}
return method;
}
}
}
}
method = null;
// Now check any inherited interfaces. This is necessary both when
// the argument class is itself an interface, and when the argument
// class is an abstract class.
Class ifcs[] = start.getInterfaces();
for (int i = 0 ; i < ifcs.length; i++) {
// Note: The original implementation had both methods calling
// the 3 arg method. This is preserved but perhaps it should
// pass the args array instead of null.
method = internalFindMethod(ifcs[i], methodName, argCount, null);
if (method != null) {
break;
}
}
return method;
} | static Method function(Class start, String methodName, int argCount, Class args[]) { Method method = null; for (Class cl = start; cl != null; cl = cl.getSuperclass()) { Method methods[] = getPublicDeclaredMethods(cl); for (int i = 0; i < methods.length; i++) { method = methods[i]; if (method == null) { continue; } if (method.getName().equals(methodName)) { Type[] params = method.getGenericParameterTypes(); if (params.length == argCount) { if (args != null) { boolean different = false; if (argCount > 0) { for (int j = 0; j < argCount; j++) { if (TypeResolver.erase(TypeResolver.resolveInClass(start, params[j])) != args[j]) { different = true; continue; } } if (different) { continue; } } } return method; } } } } method = null; Class ifcs[] = start.getInterfaces(); for (int i = 0 ; i < ifcs.length; i++) { method = internalFindMethod(ifcs[i], methodName, argCount, null); if (method != null) { break; } } return method; } | /**
* Internal support for finding a target methodName with a given
* parameter list on a given class.
*/ | Internal support for finding a target methodName with a given parameter list on a given class | internalFindMethod | {
"repo_name": "openjdk/jdk7u",
"path": "jdk/src/share/classes/java/beans/Introspector.java",
"license": "gpl-2.0",
"size": 63636
} | [
"com.sun.beans.TypeResolver",
"java.lang.reflect.Method",
"java.lang.reflect.Type"
] | import com.sun.beans.TypeResolver; import java.lang.reflect.Method; import java.lang.reflect.Type; | import com.sun.beans.*; import java.lang.reflect.*; | [
"com.sun.beans",
"java.lang"
] | com.sun.beans; java.lang; | 1,530,518 |
private void prepare_template_(File file) throws IOException {
if (true) throw new RuntimeException("remove this line");
boolean modified = false;
Document doc;
try {
doc = new SAXBuilder().build(file);
} catch (JDOMException ex) {
throw new RuntimeException(ex);
}
Element root = doc.getRootElement();
Element principalDescription = root.getChild("principalDescription");
if (principalDescription != null) {
root.removeContent(principalDescription);
modified = true;
}
if (modified) save(doc, file);
} | void function(File file) throws IOException { if (true) throw new RuntimeException(STR); boolean modified = false; Document doc; try { doc = new SAXBuilder().build(file); } catch (JDOMException ex) { throw new RuntimeException(ex); } Element root = doc.getRootElement(); Element principalDescription = root.getChild(STR); if (principalDescription != null) { root.removeContent(principalDescription); modified = true; } if (modified) save(doc, file); } | /**
* use as template, don't modify
*/ | use as template, don't modify | prepare_template_ | {
"repo_name": "MiguelSMendoza/Kunagi",
"path": "WEB-INF/classes/scrum/server/ScrumEntityfilePreparator.java",
"license": "agpl-3.0",
"size": 4879
} | [
"java.io.File",
"java.io.IOException",
"org.jdom2.Document",
"org.jdom2.Element",
"org.jdom2.JDOMException",
"org.jdom2.input.SAXBuilder"
] | import java.io.File; import java.io.IOException; import org.jdom2.Document; import org.jdom2.Element; import org.jdom2.JDOMException; import org.jdom2.input.SAXBuilder; | import java.io.*; import org.jdom2.*; import org.jdom2.input.*; | [
"java.io",
"org.jdom2",
"org.jdom2.input"
] | java.io; org.jdom2; org.jdom2.input; | 2,140,901 |
public static br.gov.camara.edemocracia.portlets.chat.model.ChatRoomMessage findByPublic_Last(
boolean messagePublic, java.util.Date messageTS,
com.liferay.portal.kernel.util.OrderByComparator orderByComparator)
throws br.gov.camara.edemocracia.portlets.chat.NoSuchChatRoomMessageException,
com.liferay.portal.kernel.exception.SystemException {
return getPersistence()
.findByPublic_Last(messagePublic, messageTS,
orderByComparator);
} | static br.gov.camara.edemocracia.portlets.chat.model.ChatRoomMessage function( boolean messagePublic, java.util.Date messageTS, com.liferay.portal.kernel.util.OrderByComparator orderByComparator) throws br.gov.camara.edemocracia.portlets.chat.NoSuchChatRoomMessageException, com.liferay.portal.kernel.exception.SystemException { return getPersistence() .findByPublic_Last(messagePublic, messageTS, orderByComparator); } | /**
* Returns the last chat room message in the ordered set where messagePublic = ? and messageTS = ?.
*
* @param messagePublic the message public
* @param messageTS the message t s
* @param orderByComparator the comparator to order the set by (optionally <code>null</code>)
* @return the last matching chat room message
* @throws br.gov.camara.edemocracia.portlets.chat.NoSuchChatRoomMessageException if a matching chat room message could not be found
* @throws SystemException if a system exception occurred
*/ | Returns the last chat room message in the ordered set where messagePublic = ? and messageTS = ? | findByPublic_Last | {
"repo_name": "camaradosdeputadosoficial/edemocracia",
"path": "cd-chat-portlet/src/main/java/br/gov/camara/edemocracia/portlets/chat/service/persistence/ChatRoomMessageUtil.java",
"license": "lgpl-2.1",
"size": 33198
} | [
"br.gov.camara.edemocracia.portlets.chat.model.ChatRoomMessage",
"com.liferay.portal.kernel.exception.SystemException",
"com.liferay.portal.kernel.util.OrderByComparator"
] | import br.gov.camara.edemocracia.portlets.chat.model.ChatRoomMessage; import com.liferay.portal.kernel.exception.SystemException; import com.liferay.portal.kernel.util.OrderByComparator; | import br.gov.camara.edemocracia.portlets.chat.model.*; import com.liferay.portal.kernel.exception.*; import com.liferay.portal.kernel.util.*; | [
"br.gov.camara",
"com.liferay.portal"
] | br.gov.camara; com.liferay.portal; | 1,096,035 |
List<DashboardPreference> getAll();
| List<DashboardPreference> getAll(); | /**
* Returns a List<DashboardPreference> of all of the preference data in the database.
* @return List<DashboardPreference>
*/ | Returns a List of all of the preference data in the database | getAll | {
"repo_name": "DePaul2015SEStudioTeam1/armada",
"path": "src/main/java/edu/depaul/armada/dao/PreferenceDao.java",
"license": "mit",
"size": 2064
} | [
"edu.depaul.armada.model.DashboardPreference",
"java.util.List"
] | import edu.depaul.armada.model.DashboardPreference; import java.util.List; | import edu.depaul.armada.model.*; import java.util.*; | [
"edu.depaul.armada",
"java.util"
] | edu.depaul.armada; java.util; | 635,209 |
protected void processRequest(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
// set response type to JS and allow programs from other servers to send and receive
response.setContentType("application/json;charset=UTF-8");
response.setHeader("Access-Control-Allow-Origin", "*");
boolean error = false; // has an error been detected?
String solrResult = ""; // JSON doc sent back to UI
// create URL for SOLR query
String queryURL = url;
boolean first = true;
Map<String, String[]> params = request.getParameterMap(); // get map of parameters and their values
Enumeration<String> allParams = request.getParameterNames(); // get a list of parameter names
while (allParams.hasMoreElements()) {
String param = allParams.nextElement();
if (param.equalsIgnoreCase("id")) { // ID of channel
if (!first) { // at the moment it will always be the first (and only) param
queryURL += "&";
}
queryURL += "channelId=" + URLEncoder.encode(params.get("id")[0], "UTF-8"); // extend stem with parameter
first = false; // next time you need a separator
} else if (param.equalsIgnoreCase("version")) {
// do nothing
} else { // parameter was not recognised, send error
error = true; // error has been detected
logger.log(Level.WARNING, "Client sent invalid parameter: {0}", param);
solrResult = "{\"invalid_paramater\": \"" + param + "\"}";
break;
}
}
// run query against SOLR API
if (!error) { // if no error detected
CommunicateWithSolr cws = new CommunicateWithSolr();
solrResult = cws.talk(queryURL);
} else {
logger.log(Level.SEVERE, "[BAD QUERY] {0}", queryURL);
}
try ( // send result to client (UI)
PrintWriter out = response.getWriter()) {
out.println(solrResult); // may be error or genuine result
}
} | void function(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { response.setContentType(STR); response.setHeader(STR, "*"); boolean error = false; String solrResult = STRidSTR&STRchannelId=STRidSTRUTF-8STRversionSTRClient sent invalid parameter: {0}STR{\STR: \STR\"}"; break; } } if (!error) { CommunicateWithSolr cws = new CommunicateWithSolr(); solrResult = cws.talk(queryURL); } else { logger.log(Level.SEVERE, STR, queryURL); } try ( PrintWriter out = response.getWriter()) { out.println(solrResult); } } | /**
* Processes requests for both HTTP <code>GET</code> and <code>POST</code>
* methods.
*
* @param request servlet request
* @param response servlet response
* @throws ServletException if a servlet-specific error occurs
* @throws IOException if an I/O error occurs
*/ | Processes requests for both HTTP <code>GET</code> and <code>POST</code> methods | processRequest | {
"repo_name": "PhenoImageShare/PhenoImageShare",
"path": "HWU/iqs/src/main/java/uk/ac/hw/macs/bisel/phis/iqs/v101/v101GC.java",
"license": "apache-2.0",
"size": 5361
} | [
"java.io.IOException",
"java.io.PrintWriter",
"java.util.logging.Level",
"javax.servlet.ServletException",
"javax.servlet.http.HttpServletRequest",
"javax.servlet.http.HttpServletResponse",
"uk.ac.hw.macs.bisel.phis.iqs.CommunicateWithSolr"
] | import java.io.IOException; import java.io.PrintWriter; import java.util.logging.Level; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import uk.ac.hw.macs.bisel.phis.iqs.CommunicateWithSolr; | import java.io.*; import java.util.logging.*; import javax.servlet.*; import javax.servlet.http.*; import uk.ac.hw.macs.bisel.phis.iqs.*; | [
"java.io",
"java.util",
"javax.servlet",
"uk.ac.hw"
] | java.io; java.util; javax.servlet; uk.ac.hw; | 2,896,500 |
Boolean escalationMatch(List<String> messageText); | Boolean escalationMatch(List<String> messageText); | /**
* Utility method for determining if a reply email should escalate an acknowledgable
*
* @param messageText a {@link java.util.List} object.
* @return Boolean
*/ | Utility method for determining if a reply email should escalate an acknowledgable | escalationMatch | {
"repo_name": "tdefilip/opennms",
"path": "opennms-dao-api/src/main/java/org/opennms/netmgt/dao/api/AckdConfigurationDao.java",
"license": "agpl-3.0",
"size": 5208
} | [
"java.util.List"
] | import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 1,071,451 |
@Test
public void testGetAverageCallsPerPeriod() throws InterruptedException {
final ScheduledExecutorService service = EasyMock
.createMock(ScheduledExecutorService.class);
final ScheduledFuture<?> future = EasyMock.createMock(ScheduledFuture.class);
prepareStartTimer(service, future);
EasyMock.replay(service, future);
final TimedSemaphore semaphore = new TimedSemaphore(service, PERIOD, UNIT,
LIMIT);
semaphore.acquire();
semaphore.endOfPeriod();
assertEquals("Wrong average (1)", 1.0, semaphore
.getAverageCallsPerPeriod(), .005);
semaphore.acquire();
semaphore.acquire();
semaphore.endOfPeriod();
assertEquals("Wrong average (2)", 1.5, semaphore
.getAverageCallsPerPeriod(), .005);
EasyMock.verify(service, future);
} | void function() throws InterruptedException { final ScheduledExecutorService service = EasyMock .createMock(ScheduledExecutorService.class); final ScheduledFuture<?> future = EasyMock.createMock(ScheduledFuture.class); prepareStartTimer(service, future); EasyMock.replay(service, future); final TimedSemaphore semaphore = new TimedSemaphore(service, PERIOD, UNIT, LIMIT); semaphore.acquire(); semaphore.endOfPeriod(); assertEquals(STR, 1.0, semaphore .getAverageCallsPerPeriod(), .005); semaphore.acquire(); semaphore.acquire(); semaphore.endOfPeriod(); assertEquals(STR, 1.5, semaphore .getAverageCallsPerPeriod(), .005); EasyMock.verify(service, future); } | /**
* Tests the methods for statistics.
*/ | Tests the methods for statistics | testGetAverageCallsPerPeriod | {
"repo_name": "mureinik/commons-lang",
"path": "src/test/java/org/apache/commons/lang3/concurrent/TimedSemaphoreTest.java",
"license": "apache-2.0",
"size": 18252
} | [
"java.util.concurrent.ScheduledExecutorService",
"java.util.concurrent.ScheduledFuture",
"org.easymock.EasyMock",
"org.junit.Assert"
] | import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; import org.easymock.EasyMock; import org.junit.Assert; | import java.util.concurrent.*; import org.easymock.*; import org.junit.*; | [
"java.util",
"org.easymock",
"org.junit"
] | java.util; org.easymock; org.junit; | 2,654,757 |
@Test
public void testExpiredAuthorizationTokenIntrospect() {
ApplicationContext testContext = context.getBuilder()
.client(ClientType.ClientCredentials, true)
.token(OAuthTokenType.Authorization, true, "debug", null, null)
.build();
OAuthToken targetToken = testContext.getToken();
Map<String, String> values = new HashMap<>();
values.put("token", IdUtil.toString(targetToken.getId()));
Response r = target("/oauth2/introspect")
.request()
.header(AUTHORIZATION, contextAuthHeader)
.post(buildEntity(values));
assertInactiveIntrospectionResponse(r);
} | void function() { ApplicationContext testContext = context.getBuilder() .client(ClientType.ClientCredentials, true) .token(OAuthTokenType.Authorization, true, "debug", null, null) .build(); OAuthToken targetToken = testContext.getToken(); Map<String, String> values = new HashMap<>(); values.put("token", IdUtil.toString(targetToken.getId())); Response r = target(STR) .request() .header(AUTHORIZATION, contextAuthHeader) .post(buildEntity(values)); assertInactiveIntrospectionResponse(r); } | /**
* Assert that an expired authorization token can be introspected.
*/ | Assert that an expired authorization token can be introspected | testExpiredAuthorizationTokenIntrospect | {
"repo_name": "kangaroo-server/kangaroo",
"path": "kangaroo-server-authz/src/test/java/net/krotscheck/kangaroo/authz/oauth2/rfc7662/TokenIntrospectionTest.java",
"license": "apache-2.0",
"size": 26425
} | [
"java.util.HashMap",
"java.util.Map",
"javax.ws.rs.core.Response",
"net.krotscheck.kangaroo.authz.common.database.entity.ClientType",
"net.krotscheck.kangaroo.authz.common.database.entity.OAuthToken",
"net.krotscheck.kangaroo.authz.common.database.entity.OAuthTokenType",
"net.krotscheck.kangaroo.authz.test.ApplicationBuilder",
"net.krotscheck.kangaroo.common.hibernate.id.IdUtil"
] | import java.util.HashMap; import java.util.Map; import javax.ws.rs.core.Response; import net.krotscheck.kangaroo.authz.common.database.entity.ClientType; import net.krotscheck.kangaroo.authz.common.database.entity.OAuthToken; import net.krotscheck.kangaroo.authz.common.database.entity.OAuthTokenType; import net.krotscheck.kangaroo.authz.test.ApplicationBuilder; import net.krotscheck.kangaroo.common.hibernate.id.IdUtil; | import java.util.*; import javax.ws.rs.core.*; import net.krotscheck.kangaroo.authz.common.database.entity.*; import net.krotscheck.kangaroo.authz.test.*; import net.krotscheck.kangaroo.common.hibernate.id.*; | [
"java.util",
"javax.ws",
"net.krotscheck.kangaroo"
] | java.util; javax.ws; net.krotscheck.kangaroo; | 2,316,117 |
public void simpleRename(String from, String toDirPath, String toName) {
String simplePath = null;
if (toDirPath.endsWith("/")) {
simplePath = toDirPath + toName;
} else {
simplePath = toDirPath + "/" + toName;
}
try {
fetchResponse(issueRenameToSlave(from, toDirPath, toName));
} catch (RemoteIOException e) {
setOffline(e);
addQueueRename(from, simplePath);
} catch (SlaveUnavailableException e) {
addQueueRename(from, simplePath);
}
} | void function(String from, String toDirPath, String toName) { String simplePath = null; if (toDirPath.endsWith("/")) { simplePath = toDirPath + toName; } else { simplePath = toDirPath + "/" + toName; } try { fetchResponse(issueRenameToSlave(from, toDirPath, toName)); } catch (RemoteIOException e) { setOffline(e); addQueueRename(from, simplePath); } catch (SlaveUnavailableException e) { addQueueRename(from, simplePath); } } | /**
* Renames files/directories and waits for the response
*/ | Renames files/directories and waits for the response | simpleRename | {
"repo_name": "g2x3k/Drftpd2Stable",
"path": "src/org/drftpd/master/RemoteSlave.java",
"license": "gpl-2.0",
"size": 34816
} | [
"net.sf.drftpd.SlaveUnavailableException",
"org.drftpd.slave.RemoteIOException"
] | import net.sf.drftpd.SlaveUnavailableException; import org.drftpd.slave.RemoteIOException; | import net.sf.drftpd.*; import org.drftpd.slave.*; | [
"net.sf.drftpd",
"org.drftpd.slave"
] | net.sf.drftpd; org.drftpd.slave; | 838,284 |
public QName getQName() {
return qname;
} | QName function() { return qname; } | /**
* Returns the wrapped QName object.
*
* @return the wrapped QName
*/ | Returns the wrapped QName object | getQName | {
"repo_name": "windauer/exist",
"path": "exist-core/src/main/java/org/exist/xquery/value/QNameValue.java",
"license": "lgpl-2.1",
"size": 8362
} | [
"org.exist.dom.QName"
] | import org.exist.dom.QName; | import org.exist.dom.*; | [
"org.exist.dom"
] | org.exist.dom; | 666,184 |
@Test
public void testNegotiate() {
final String securityPackage = "Negotiate";
IWindowsCredentialsHandle clientCredentials = null;
WindowsSecurityContextImpl clientContext = null;
try {
// client credentials handle
clientCredentials = WindowsCredentialsHandleImpl.getCurrent(securityPackage);
clientCredentials.initialize();
// initial client security context
clientContext = new WindowsSecurityContextImpl();
clientContext.setPrincipalName(WindowsAccountImpl.getCurrentUsername());
clientContext.setCredentialsHandle(clientCredentials.getHandle());
clientContext.setSecurityPackage(securityPackage);
clientContext.initialize(null, null, WindowsAccountImpl.getCurrentUsername());
// negotiate
boolean authenticated = false;
final SimpleHttpRequest request = new SimpleHttpRequest();
request.setQueryString("j_negotiate_check");
String clientToken;
while (true) {
clientToken = BaseEncoding.base64().encode(clientContext.getToken());
request.addHeader("Authorization", securityPackage + " " + clientToken);
final SimpleHttpResponse response = new SimpleHttpResponse();
authenticated = this.authenticator.authenticate(request, response);
if (authenticated) {
Assertions.assertThat(response.getHeaderNames().size()).isGreaterThanOrEqualTo(0);
break;
}
Assert.assertTrue(response.getHeader("WWW-Authenticate").startsWith(securityPackage + " "));
Assert.assertEquals("keep-alive", response.getHeader("Connection"));
Assert.assertEquals(2, response.getHeaderNames().size());
Assert.assertEquals(401, response.getStatus());
final String continueToken = response.getHeader("WWW-Authenticate").substring(
securityPackage.length() + 1);
final byte[] continueTokenBytes = BaseEncoding.base64().decode(continueToken);
Assertions.assertThat(continueTokenBytes.length).isGreaterThan(0);
final SecBufferDesc continueTokenBuffer = new SecBufferDesc(Sspi.SECBUFFER_TOKEN, continueTokenBytes);
clientContext.initialize(clientContext.getHandle(), continueTokenBuffer,
WindowsAccountImpl.getCurrentUsername());
}
Assert.assertTrue(authenticated);
} finally {
if (clientContext != null) {
clientContext.dispose();
}
if (clientCredentials != null) {
clientCredentials.dispose();
}
}
} | void function() { final String securityPackage = STR; IWindowsCredentialsHandle clientCredentials = null; WindowsSecurityContextImpl clientContext = null; try { clientCredentials = WindowsCredentialsHandleImpl.getCurrent(securityPackage); clientCredentials.initialize(); clientContext = new WindowsSecurityContextImpl(); clientContext.setPrincipalName(WindowsAccountImpl.getCurrentUsername()); clientContext.setCredentialsHandle(clientCredentials.getHandle()); clientContext.setSecurityPackage(securityPackage); clientContext.initialize(null, null, WindowsAccountImpl.getCurrentUsername()); boolean authenticated = false; final SimpleHttpRequest request = new SimpleHttpRequest(); request.setQueryString(STR); String clientToken; while (true) { clientToken = BaseEncoding.base64().encode(clientContext.getToken()); request.addHeader(STR, securityPackage + " " + clientToken); final SimpleHttpResponse response = new SimpleHttpResponse(); authenticated = this.authenticator.authenticate(request, response); if (authenticated) { Assertions.assertThat(response.getHeaderNames().size()).isGreaterThanOrEqualTo(0); break; } Assert.assertTrue(response.getHeader(STR).startsWith(securityPackage + " ")); Assert.assertEquals(STR, response.getHeader(STR)); Assert.assertEquals(2, response.getHeaderNames().size()); Assert.assertEquals(401, response.getStatus()); final String continueToken = response.getHeader(STR).substring( securityPackage.length() + 1); final byte[] continueTokenBytes = BaseEncoding.base64().decode(continueToken); Assertions.assertThat(continueTokenBytes.length).isGreaterThan(0); final SecBufferDesc continueTokenBuffer = new SecBufferDesc(Sspi.SECBUFFER_TOKEN, continueTokenBytes); clientContext.initialize(clientContext.getHandle(), continueTokenBuffer, WindowsAccountImpl.getCurrentUsername()); } Assert.assertTrue(authenticated); } finally { if (clientContext != null) { clientContext.dispose(); } if (clientCredentials != null) { clientCredentials.dispose(); } } } | /**
* Test negotiate.
*/ | Test negotiate | testNegotiate | {
"repo_name": "boyley/waffle",
"path": "Source/JNA/waffle-tomcat8/src/test/java/waffle/apache/MixedAuthenticatorTests.java",
"license": "epl-1.0",
"size": 12421
} | [
"com.google.common.io.BaseEncoding",
"com.sun.jna.platform.win32.Sspi",
"org.assertj.core.api.Assertions",
"org.junit.Assert"
] | import com.google.common.io.BaseEncoding; import com.sun.jna.platform.win32.Sspi; import org.assertj.core.api.Assertions; import org.junit.Assert; | import com.google.common.io.*; import com.sun.jna.platform.win32.*; import org.assertj.core.api.*; import org.junit.*; | [
"com.google.common",
"com.sun.jna",
"org.assertj.core",
"org.junit"
] | com.google.common; com.sun.jna; org.assertj.core; org.junit; | 709,919 |
public final int buildMdb(XmlParser.Node node, OutputStream out)
throws IOException {
XmlExpression.buildMdbUnary(node, out);
return 0;
}
| final int function(XmlParser.Node node, OutputStream out) throws IOException { XmlExpression.buildMdbUnary(node, out); return 0; } | /**
* Write the contained test of the specified node <br>
* <ul>
* Structure of stream : Data[size]
* <li>IdTokens#COMPLEX_EXPRESSION [2]</li>
* <li>operation id [1]</li>
* <li>depending on the expression [...]</li>
* </ul>
*
* @param node
* the XML test container structure
* @param out
* output stream where the card structure will be saved
* @return the amount of written action, so return always ZERO.
* @throws IOException
*/ | Write the contained test of the specified node Structure of stream : Data[size] IdTokens#COMPLEX_EXPRESSION [2] operation id [1] depending on the expression [...] | buildMdb | {
"repo_name": "JoeyLeeuwinga/Firemox",
"path": "src/main/java/net/sf/firemox/xml/expression/Addhalftruncated.java",
"license": "gpl-2.0",
"size": 1919
} | [
"java.io.IOException",
"java.io.OutputStream",
"net.sf.firemox.xml.XmlExpression",
"net.sf.firemox.xml.XmlParser"
] | import java.io.IOException; import java.io.OutputStream; import net.sf.firemox.xml.XmlExpression; import net.sf.firemox.xml.XmlParser; | import java.io.*; import net.sf.firemox.xml.*; | [
"java.io",
"net.sf.firemox"
] | java.io; net.sf.firemox; | 389,474 |
protected void checkVectorDimensions(int n) throws IllegalArgumentException {
if (getDimension() != n) {
throw MathRuntimeException.createIllegalArgumentException(
"vector length mismatch: got {0} but expected {1}",
getDimension(), n);
}
}
| void function(int n) throws IllegalArgumentException { if (getDimension() != n) { throw MathRuntimeException.createIllegalArgumentException( STR, getDimension(), n); } } | /**
* Check if instance dimension is equal to some expected value.
*
* @param n
* expected dimension.
* @exception IllegalArgumentException
* if the dimension is inconsistent with vector size
*/ | Check if instance dimension is equal to some expected value | checkVectorDimensions | {
"repo_name": "SpoonLabs/astor",
"path": "examples/Math-issue-340/src/main/java/org/apache/commons/math/linear/SparseFieldVector.java",
"license": "gpl-2.0",
"size": 22181
} | [
"org.apache.commons.math.MathRuntimeException"
] | import org.apache.commons.math.MathRuntimeException; | import org.apache.commons.math.*; | [
"org.apache.commons"
] | org.apache.commons; | 1,725,670 |
@Column(updatable = false)
public Integer getFirmid() {
return firmid;
} | @Column(updatable = false) Integer function() { return firmid; } | /**
* Gets the firmid.
*
* @return the firmid
*/ | Gets the firmid | getFirmid | {
"repo_name": "gleb619/hotel_shop",
"path": "src/main/java/org/test/shop/model/domain/SObject.java",
"license": "apache-2.0",
"size": 8039
} | [
"javax.persistence.Column"
] | import javax.persistence.Column; | import javax.persistence.*; | [
"javax.persistence"
] | javax.persistence; | 1,177,299 |
static String dpidToUri(String dpid) {
return "of:" + dpid.replace(":", "");
}
private class InnerLeadershipEventListener
implements LeadershipEventListener { | static String dpidToUri(String dpid) { return "of:" + dpid.replace(":", ""); } private class InnerLeadershipEventListener implements LeadershipEventListener { | /**
* Converts DPIDs of the form xx:xx:xx:xx:xx:xx:xx to OpenFlow provider
* device URIs.
*
* @param dpid the DPID string to convert
* @return the URI string for this device
*/ | Converts DPIDs of the form xx:xx:xx:xx:xx:xx:xx to OpenFlow provider device URIs | dpidToUri | {
"repo_name": "kuangrewawa/onos",
"path": "apps/sdnip/src/main/java/org/onosproject/sdnip/SdnIp.java",
"license": "apache-2.0",
"size": 5843
} | [
"org.onosproject.cluster.LeadershipEventListener"
] | import org.onosproject.cluster.LeadershipEventListener; | import org.onosproject.cluster.*; | [
"org.onosproject.cluster"
] | org.onosproject.cluster; | 1,742,614 |
public Habit getHabitSync(String habitId) {
return source.getHabitSync(habitId);
} | Habit function(String habitId) { return source.getHabitSync(habitId); } | /**
* Get a Habit synchronously
* @param habitId unique id for habit we wish to get
* @return the Habit with id habitId, or null if not found
*/ | Get a Habit synchronously | getHabitSync | {
"repo_name": "CMPUT301F17T21/StylePoints",
"path": "HabitTracker/app/src/main/java/com/stylepoints/habittracker/repository/HabitRepository.java",
"license": "apache-2.0",
"size": 8592
} | [
"com.stylepoints.habittracker.model.Habit"
] | import com.stylepoints.habittracker.model.Habit; | import com.stylepoints.habittracker.model.*; | [
"com.stylepoints.habittracker"
] | com.stylepoints.habittracker; | 2,813,856 |
private void createBlueArray() {
Arrays.fill(bluePixels, 0);
int largestOcc = 0;
int index = 0;
for (int i = 0; i < this.pixels.length; i++) {
int rgb = this.pixels[i];
int b = rgb & 0xff;
bluePixels[b]++;
if (bluePixels[b] > largestOcc) {
largestOcc = bluePixels[b];
index = b;
}
}
largestOccs[2] = index;
} | void function() { Arrays.fill(bluePixels, 0); int largestOcc = 0; int index = 0; for (int i = 0; i < this.pixels.length; i++) { int rgb = this.pixels[i]; int b = rgb & 0xff; bluePixels[b]++; if (bluePixels[b] > largestOcc) { largestOcc = bluePixels[b]; index = b; } } largestOccs[2] = index; } | /**
* counts occurrences of the 256 levels of blue
*/ | counts occurrences of the 256 levels of blue | createBlueArray | {
"repo_name": "sgoeschl/java-image-processing-survival-guide",
"path": "code/jipsg/image-manipulation/src/test/java/org/github/jipsg/image/filter/AutoCorrectionFilter.java",
"license": "apache-2.0",
"size": 11064
} | [
"java.util.Arrays"
] | import java.util.Arrays; | import java.util.*; | [
"java.util"
] | java.util; | 2,591,991 |
public LocalDateTime getCreatedAt() {
return createdAt;
} | LocalDateTime function() { return createdAt; } | /**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column ASYNC_PROCESS_FILE_RESULT.CREATED_AT
*
* @return the value of ASYNC_PROCESS_FILE_RESULT.CREATED_AT
*
* @mbggenerated
*/ | This method was generated by MyBatis Generator. This method returns the value of the database column ASYNC_PROCESS_FILE_RESULT.CREATED_AT | getCreatedAt | {
"repo_name": "agwlvssainokuni/sqlapp",
"path": "src/generated/java/cherry/sqlapp/db/gen/dto/AsyncProcessFileResult.java",
"license": "apache-2.0",
"size": 12379
} | [
"org.joda.time.LocalDateTime"
] | import org.joda.time.LocalDateTime; | import org.joda.time.*; | [
"org.joda.time"
] | org.joda.time; | 2,893,564 |
private List<AbstractColumnParser<?>> getParserList(
List<ColumnType> typeArray, ReadOptions options) {
// Types to choose from. When more than one would work, we pick the first of the options
List<AbstractColumnParser<?>> parsers = new ArrayList<>();
for (ColumnType type : typeArray) {
parsers.add(type.customParser(options));
}
return parsers;
} | List<AbstractColumnParser<?>> function( List<ColumnType> typeArray, ReadOptions options) { List<AbstractColumnParser<?>> parsers = new ArrayList<>(); for (ColumnType type : typeArray) { parsers.add(type.customParser(options)); } return parsers; } | /**
* Returns the list of parsers to use for type detection
*
* @param typeArray Array of column types. The order specifies the order the types are applied
* @param options CsvReadOptions to use to modify the default parsers for each type
* @return A list of parsers in the order they should be used for type detection
*/ | Returns the list of parsers to use for type detection | getParserList | {
"repo_name": "axkr/symja_android_library",
"path": "symja_android_library/matheclipse-io/src/main/java/tech/tablesaw/io/ColumnTypeDetector.java",
"license": "gpl-3.0",
"size": 7102
} | [
"java.util.ArrayList",
"java.util.List",
"tech.tablesaw.api.ColumnType",
"tech.tablesaw.columns.AbstractColumnParser"
] | import java.util.ArrayList; import java.util.List; import tech.tablesaw.api.ColumnType; import tech.tablesaw.columns.AbstractColumnParser; | import java.util.*; import tech.tablesaw.api.*; import tech.tablesaw.columns.*; | [
"java.util",
"tech.tablesaw.api",
"tech.tablesaw.columns"
] | java.util; tech.tablesaw.api; tech.tablesaw.columns; | 2,355,341 |
public final COSDictionary getFirstTrailer()
{
if (bytePosToXrefMap.isEmpty())
{
return null;
}
Set<Long> offsets = bytePosToXrefMap.keySet();
SortedSet<Long> sortedOffset = new TreeSet<>(offsets);
return bytePosToXrefMap.get(sortedOffset.first()).trailer;
}
| final COSDictionary function() { if (bytePosToXrefMap.isEmpty()) { return null; } Set<Long> offsets = bytePosToXrefMap.keySet(); SortedSet<Long> sortedOffset = new TreeSet<>(offsets); return bytePosToXrefMap.get(sortedOffset.first()).trailer; } | /**
* Returns the first trailer if at least one exists.
*
* @return the first trailer or null
*/ | Returns the first trailer if at least one exists | getFirstTrailer | {
"repo_name": "kalaspuffar/pdfbox",
"path": "pdfbox/src/main/java/org/apache/pdfbox/pdfparser/XrefTrailerResolver.java",
"license": "apache-2.0",
"size": 12187
} | [
"java.util.Set",
"java.util.SortedSet",
"java.util.TreeSet",
"org.apache.pdfbox.cos.COSDictionary"
] | import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; import org.apache.pdfbox.cos.COSDictionary; | import java.util.*; import org.apache.pdfbox.cos.*; | [
"java.util",
"org.apache.pdfbox"
] | java.util; org.apache.pdfbox; | 2,086,585 |
void setData(EObject value); | void setData(EObject value); | /**
* Sets the value of the '{@link gov.nasa.ensemble.core.model.plan.EPlanElement#getData <em>Data</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Data</em>' containment reference.
* @see #getData()
* @generated
*/ | Sets the value of the '<code>gov.nasa.ensemble.core.model.plan.EPlanElement#getData Data</code>' containment reference. | setData | {
"repo_name": "nasa/OpenSPIFe",
"path": "gov.nasa.ensemble.core.model.plan/src/gov/nasa/ensemble/core/model/plan/EPlanElement.java",
"license": "apache-2.0",
"size": 8689
} | [
"org.eclipse.emf.ecore.EObject"
] | import org.eclipse.emf.ecore.EObject; | import org.eclipse.emf.ecore.*; | [
"org.eclipse.emf"
] | org.eclipse.emf; | 1,581,351 |
public void setContextMenuButton(CmsToolbarContextButton button) {
m_contextButton = button;
} | void function(CmsToolbarContextButton button) { m_contextButton = button; } | /**
* Sets the context menu button.<p>
*
* @param button the context menu button
*/ | Sets the context menu button | setContextMenuButton | {
"repo_name": "it-tavis/opencms-core",
"path": "src-gwt/org/opencms/ade/editprovider/client/CmsDirectEditToolbarHandler.java",
"license": "lgpl-2.1",
"size": 7050
} | [
"org.opencms.gwt.client.ui.CmsToolbarContextButton"
] | import org.opencms.gwt.client.ui.CmsToolbarContextButton; | import org.opencms.gwt.client.ui.*; | [
"org.opencms.gwt"
] | org.opencms.gwt; | 709,972 |
StdDraw.point(x, y);
} | StdDraw.point(x, y); } | /**
* Draws this point to standard draw.
*/ | Draws this point to standard draw | draw | {
"repo_name": "neuromantik33/Algopad",
"path": "algorithms/src/main/java/algopad/algorithms/pt1/w3/Point.java",
"license": "mit",
"size": 4895
} | [
"edu.princeton.cs.algs4.StdDraw"
] | import edu.princeton.cs.algs4.StdDraw; | import edu.princeton.cs.algs4.*; | [
"edu.princeton.cs"
] | edu.princeton.cs; | 1,848,520 |
@Test
public void testFileDeterminationFolderMatcherAndFolderUri()
throws IOException {
final Extension e = new Extension();
e.setProperty(HttpListener.PROPERTY_URLMATCHER, "/doc/*");
e.setProperty(FileHandler.PROPERTY_DOCROOT, testDir.getAbsolutePath());
// create the files
final File tmpDir = new File(testDir, "afolder");
final File tmpFile = new File(testDir, "index.html");
assertTrue(tmpDir.mkdirs());
assertTrue(tmpFile.createNewFile());
assertTrue(new File(testDir, "afolder/index.html").createNewFile());
final FileHandler h = new FileHandler();
h.initialize(e);
// the test subject
File file;
// check without sub-folder
file = h.determineFile("/doc/index.html");
assertEquals(new File(testDir, "index.html").getCanonicalFile(), file);
// check with sub-folder
file = h.determineFile("/doc/afolder/index.html");
assertEquals(new File(testDir, "/afolder/index.html").getCanonicalFile(), file);
assertTrue(tmpFile.delete());
assertTrue(Files.deleteDir(tmpDir));
} | void function() throws IOException { final Extension e = new Extension(); e.setProperty(HttpListener.PROPERTY_URLMATCHER, STR); e.setProperty(FileHandler.PROPERTY_DOCROOT, testDir.getAbsolutePath()); final File tmpDir = new File(testDir, STR); final File tmpFile = new File(testDir, STR); assertTrue(tmpDir.mkdirs()); assertTrue(tmpFile.createNewFile()); assertTrue(new File(testDir, STR).createNewFile()); final FileHandler h = new FileHandler(); h.initialize(e); File file; file = h.determineFile(STR); assertEquals(new File(testDir, STR).getCanonicalFile(), file); file = h.determineFile(STR); assertEquals(new File(testDir, STR).getCanonicalFile(), file); assertTrue(tmpFile.delete()); assertTrue(Files.deleteDir(tmpDir)); } | /**
* Test a folder-<code>urlMatcher</code> and an <code>URI</code> which
* contains the folder.
*
* @throws IOException
* if the decoding fails
*/ | Test a folder-<code>urlMatcher</code> and an <code>URI</code> which contains the folder | testFileDeterminationFolderMatcherAndFolderUri | {
"repo_name": "pmeisen/gen-server-http-listener",
"path": "test/net/meisen/general/server/http/listener/handler/TestFileHandler.java",
"license": "mit",
"size": 20094
} | [
"java.io.File",
"java.io.IOException",
"net.meisen.general.genmisc.types.Files",
"net.meisen.general.server.http.listener.HttpListener",
"net.meisen.general.server.settings.pojos.Extension",
"org.junit.Assert"
] | import java.io.File; import java.io.IOException; import net.meisen.general.genmisc.types.Files; import net.meisen.general.server.http.listener.HttpListener; import net.meisen.general.server.settings.pojos.Extension; import org.junit.Assert; | import java.io.*; import net.meisen.general.genmisc.types.*; import net.meisen.general.server.http.listener.*; import net.meisen.general.server.settings.pojos.*; import org.junit.*; | [
"java.io",
"net.meisen.general",
"org.junit"
] | java.io; net.meisen.general; org.junit; | 217,647 |
@Test
public void testDurationMetricsCanBeLonger24Hours() throws Exception {
Ignite ign = startGrid("MockedMetrics", getConfiguration().setMetricsUpdateFrequency(500));
ClusterNode node = ign.cluster().localNode();
assert node instanceof TcpDiscoveryNode : "Setup failed, test is incorrect.";
// Get rid of metrics provider: current logic ignores metrics field if provider != null.
setField(node, "metricsProvider", null);
ClusterMetricsImpl original = getField(node, "metrics");
setField(node, "metrics", new MockedClusterMetrics(original));;
List<?> durationMetrics = execSql(ign,
"SELECT " +
"MAX_JOBS_WAIT_TIME, " +
"CUR_JOBS_WAIT_TIME, " +
"AVG_JOBS_WAIT_TIME, " +
"MAX_JOBS_EXECUTE_TIME, " +
"CUR_JOBS_EXECUTE_TIME, " +
"AVG_JOBS_EXECUTE_TIME, " +
"TOTAL_JOBS_EXECUTE_TIME, " +
"TOTAL_BUSY_TIME, " +
"TOTAL_IDLE_TIME, " +
"CUR_IDLE_TIME, " +
"UPTIME " +
"FROM " + systemSchemaName() + ".NODE_METRICS").get(0);
List<Long> elevenExpVals = LongStream
.generate(() -> MockedClusterMetrics.LONG_DURATION_MS)
.limit(11)
.boxed()
.collect(Collectors.toList());
assertEqualsCollections(elevenExpVals, durationMetrics);
}
public static class MockedClusterMetrics extends ClusterMetricsImpl {
public static final long LONG_DURATION_MS = TimeUnit.DAYS.toMillis(365);
public MockedClusterMetrics(ClusterMetricsImpl original) throws Exception {
super(
getField(original, "ctx"),
getField(original, "nodeStartTime"));
} | void function() throws Exception { Ignite ign = startGrid(STR, getConfiguration().setMetricsUpdateFrequency(500)); ClusterNode node = ign.cluster().localNode(); assert node instanceof TcpDiscoveryNode : STR; setField(node, STR, null); ClusterMetricsImpl original = getField(node, STR); setField(node, STR, new MockedClusterMetrics(original));; List<?> durationMetrics = execSql(ign, STR + STR + STR + STR + STR + STR + STR + STR + STR + STR + STR + STR + STR + systemSchemaName() + STR).get(0); List<Long> elevenExpVals = LongStream .generate(() -> MockedClusterMetrics.LONG_DURATION_MS) .limit(11) .boxed() .collect(Collectors.toList()); assertEqualsCollections(elevenExpVals, durationMetrics); } public static class MockedClusterMetrics extends ClusterMetricsImpl { public static final long LONG_DURATION_MS = TimeUnit.DAYS.toMillis(365); public MockedClusterMetrics(ClusterMetricsImpl original) throws Exception { super( getField(original, "ctx"), getField(original, STR)); } | /**
* Regression test. Verifies that duration metrics is able to be longer than 24 hours.
*/ | Regression test. Verifies that duration metrics is able to be longer than 24 hours | testDurationMetricsCanBeLonger24Hours | {
"repo_name": "ilantukh/ignite",
"path": "modules/indexing/src/test/java/org/apache/ignite/internal/processors/query/SqlSystemViewsSelfTest.java",
"license": "apache-2.0",
"size": 73343
} | [
"java.util.List",
"java.util.concurrent.TimeUnit",
"java.util.stream.Collectors",
"java.util.stream.LongStream",
"org.apache.ignite.Ignite",
"org.apache.ignite.cluster.ClusterNode",
"org.apache.ignite.internal.managers.discovery.ClusterMetricsImpl",
"org.apache.ignite.spi.discovery.tcp.internal.TcpDiscoveryNode"
] | import java.util.List; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import java.util.stream.LongStream; import org.apache.ignite.Ignite; import org.apache.ignite.cluster.ClusterNode; import org.apache.ignite.internal.managers.discovery.ClusterMetricsImpl; import org.apache.ignite.spi.discovery.tcp.internal.TcpDiscoveryNode; | import java.util.*; import java.util.concurrent.*; import java.util.stream.*; import org.apache.ignite.*; import org.apache.ignite.cluster.*; import org.apache.ignite.internal.managers.discovery.*; import org.apache.ignite.spi.discovery.tcp.internal.*; | [
"java.util",
"org.apache.ignite"
] | java.util; org.apache.ignite; | 896,738 |
// Get a SAX parser.
SAXParserFactory saxParserFactory = SAXParserFactory.newInstance();
SAXParser saxParser;
try {
saxParser = saxParserFactory.newSAXParser();
} catch (ParserConfigurationException pce) {
throw new AssertionError("Could not get SAX parser for XML.");
}
// Construct the handler for SAX parsing.
XliffSaxHandler xliffSaxHandler = new XliffSaxHandler();
// Parse the XLIFF content.
try {
saxParser.parse(new InputSource(new StringReader(xliffContent)), xliffSaxHandler);
} catch (IOException e) {
throw new AssertionError("Should not fail in reading a string.");
}
// Build a SoyMsgBundle from the parsed data (stored in xliffSaxHandler).
return new SoyMsgBundleImpl(xliffSaxHandler.getTargetLocaleString(), xliffSaxHandler.getMsgs());
}
// -----------------------------------------------------------------------------------------------
private static class XliffSaxHandler extends DefaultHandler {
private String targetLocaleString;
private final List<SoyMsg> msgs;
private boolean isInMsg;
private long currMsgId;
private List<SoyMsgPart> currMsgParts;
private String currRawTextPart;
public XliffSaxHandler() {
msgs = Lists.newArrayList();
isInMsg = false;
} | SAXParserFactory saxParserFactory = SAXParserFactory.newInstance(); SAXParser saxParser; try { saxParser = saxParserFactory.newSAXParser(); } catch (ParserConfigurationException pce) { throw new AssertionError(STR); } XliffSaxHandler xliffSaxHandler = new XliffSaxHandler(); try { saxParser.parse(new InputSource(new StringReader(xliffContent)), xliffSaxHandler); } catch (IOException e) { throw new AssertionError(STR); } return new SoyMsgBundleImpl(xliffSaxHandler.getTargetLocaleString(), xliffSaxHandler.getMsgs()); } private static class XliffSaxHandler extends DefaultHandler { private String targetLocaleString; private final List<SoyMsg> msgs; private boolean isInMsg; private long currMsgId; private List<SoyMsgPart> currMsgParts; private String currRawTextPart; public XliffSaxHandler() { msgs = Lists.newArrayList(); isInMsg = false; } | /**
* Parses the content of a translated XLIFF file and creates a SoyMsgBundle.
*
* @param xliffContent The XLIFF content to parse.
* @return The resulting SoyMsgBundle.
* @throws SAXException If there's an error parsing the data.
* @throws SoyMsgException If there's an error in parsing the data.
*/ | Parses the content of a translated XLIFF file and creates a SoyMsgBundle | parseXliffTargetMsgs | {
"repo_name": "Medium/closure-templates",
"path": "java/src/com/google/template/soy/xliffmsgplugin/XliffParser.java",
"license": "apache-2.0",
"size": 7615
} | [
"com.google.common.collect.Lists",
"com.google.template.soy.msgs.restricted.SoyMsg",
"com.google.template.soy.msgs.restricted.SoyMsgBundleImpl",
"com.google.template.soy.msgs.restricted.SoyMsgPart",
"java.io.IOException",
"java.io.StringReader",
"java.util.List",
"javax.xml.parsers.ParserConfigurationException",
"javax.xml.parsers.SAXParser",
"javax.xml.parsers.SAXParserFactory",
"org.xml.sax.InputSource",
"org.xml.sax.helpers.DefaultHandler"
] | import com.google.common.collect.Lists; import com.google.template.soy.msgs.restricted.SoyMsg; import com.google.template.soy.msgs.restricted.SoyMsgBundleImpl; import com.google.template.soy.msgs.restricted.SoyMsgPart; import java.io.IOException; import java.io.StringReader; import java.util.List; import javax.xml.parsers.ParserConfigurationException; import javax.xml.parsers.SAXParser; import javax.xml.parsers.SAXParserFactory; import org.xml.sax.InputSource; import org.xml.sax.helpers.DefaultHandler; | import com.google.common.collect.*; import com.google.template.soy.msgs.restricted.*; import java.io.*; import java.util.*; import javax.xml.parsers.*; import org.xml.sax.*; import org.xml.sax.helpers.*; | [
"com.google.common",
"com.google.template",
"java.io",
"java.util",
"javax.xml",
"org.xml.sax"
] | com.google.common; com.google.template; java.io; java.util; javax.xml; org.xml.sax; | 234,869 |
public void updateCustomRoles(
final IncomingToken userToken,
final UserName userName,
final Set<String> addRoles,
final Set<String> removeRoles)
throws AuthStorageException, NoSuchUserException, NoSuchRoleException,
InvalidTokenException, UnauthorizedException, IllegalParameterException {
// some of this code is similar to the updateRoles function, refactor?
nonNull(userName, "userName");
nonNull(addRoles, "addRoles");
nonNull(removeRoles, "removeRoles");
noNulls(addRoles, "Null role in addRoles");
noNulls(removeRoles, "Null role in removeRoles");
final Set<String> intersect = new HashSet<>(addRoles);
intersect.retainAll(removeRoles);
if (!intersect.isEmpty()) {
throw new IllegalParameterException(
"One or more roles is to be both removed and added: " +
String.join(", ", intersect));
}
final AuthUser admin = getUser(userToken,
new OpReqs("update custom roles for user {}", userName.getName())
.types(TokenType.LOGIN).roles(Role.ADMIN));
storage.updateCustomRoles(userName, addRoles, removeRoles);
logCustomRoleUpdate(admin.getUserName(), userName, addRoles, removeRoles);
}
| void function( final IncomingToken userToken, final UserName userName, final Set<String> addRoles, final Set<String> removeRoles) throws AuthStorageException, NoSuchUserException, NoSuchRoleException, InvalidTokenException, UnauthorizedException, IllegalParameterException { nonNull(userName, STR); nonNull(addRoles, STR); nonNull(removeRoles, STR); noNulls(addRoles, STR); noNulls(removeRoles, STR); final Set<String> intersect = new HashSet<>(addRoles); intersect.retainAll(removeRoles); if (!intersect.isEmpty()) { throw new IllegalParameterException( STR + String.join(STR, intersect)); } final AuthUser admin = getUser(userToken, new OpReqs(STR, userName.getName()) .types(TokenType.LOGIN).roles(Role.ADMIN)); storage.updateCustomRoles(userName, addRoles, removeRoles); logCustomRoleUpdate(admin.getUserName(), userName, addRoles, removeRoles); } | /** Update a user's custom roles.
* @param userToken a token for a user account with the administrator role.
* @param userName the name of the user for which the custom roles will be altered.
* @param addRoles the roles to add.
* @param removeRoles the roles to remove.
* @throws AuthStorageException if an error occurred accessing the storage system.
* @throws UnauthorizedException if the user account associated with the token does not have
* the administrator role or the token is not a login token.
* @throws NoSuchUserException if there is no user account with the given name.
* @throws NoSuchRoleException if one of the roles does not exist in the database.
* @throws InvalidTokenException if the token is invalid.
* @throws IllegalParameterException if a role is to be both removed and added.
*/ | Update a user's custom roles | updateCustomRoles | {
"repo_name": "kbase/auth2",
"path": "src/us/kbase/auth2/lib/Authentication.java",
"license": "mit",
"size": 135654
} | [
"java.util.HashSet",
"java.util.Set",
"us.kbase.auth2.lib.Utils",
"us.kbase.auth2.lib.exceptions.IllegalParameterException",
"us.kbase.auth2.lib.exceptions.InvalidTokenException",
"us.kbase.auth2.lib.exceptions.NoSuchRoleException",
"us.kbase.auth2.lib.exceptions.NoSuchUserException",
"us.kbase.auth2.lib.exceptions.UnauthorizedException",
"us.kbase.auth2.lib.storage.exceptions.AuthStorageException",
"us.kbase.auth2.lib.token.IncomingToken",
"us.kbase.auth2.lib.token.TokenType",
"us.kbase.auth2.lib.user.AuthUser"
] | import java.util.HashSet; import java.util.Set; import us.kbase.auth2.lib.Utils; import us.kbase.auth2.lib.exceptions.IllegalParameterException; import us.kbase.auth2.lib.exceptions.InvalidTokenException; import us.kbase.auth2.lib.exceptions.NoSuchRoleException; import us.kbase.auth2.lib.exceptions.NoSuchUserException; import us.kbase.auth2.lib.exceptions.UnauthorizedException; import us.kbase.auth2.lib.storage.exceptions.AuthStorageException; import us.kbase.auth2.lib.token.IncomingToken; import us.kbase.auth2.lib.token.TokenType; import us.kbase.auth2.lib.user.AuthUser; | import java.util.*; import us.kbase.auth2.lib.*; import us.kbase.auth2.lib.exceptions.*; import us.kbase.auth2.lib.storage.exceptions.*; import us.kbase.auth2.lib.token.*; import us.kbase.auth2.lib.user.*; | [
"java.util",
"us.kbase.auth2"
] | java.util; us.kbase.auth2; | 995,020 |