method
stringlengths
13
441k
clean_method
stringlengths
7
313k
doc
stringlengths
17
17.3k
comment
stringlengths
3
1.42k
method_name
stringlengths
1
273
extra
dict
imports
sequence
imports_info
stringlengths
19
34.8k
cluster_imports_info
stringlengths
15
3.66k
libraries
sequence
libraries_info
stringlengths
6
661
id
int64
0
2.92M
public void split() throws Exception { // // Open the database that has the initial hash string BerkeleyDataWrapper bdbRoot = new BerkeleyDataWrapper( fsNamePrefix + initialHashBits + "/" + initialHashBits + "/db", logger); // // Open the target database environments, putting them into a // trie by hash code value BinaryTrie<BerkeleyDataWrapper> trie = new BinaryTrie<BerkeleyDataWrapper>(); String[] prefixes = getNewPrefixes(numSegments, initialHashBits); for (String prefix : prefixes) { // // Open a db and put it in the trie BerkeleyDataWrapper curr = new BerkeleyDataWrapper( fsNamePrefix + prefix + "/" + prefix + "/db", logger); DSBitSet currPrefix = DSBitSet.parse(prefix); trie.add(curr, currPrefix); } // // Iterate over the entire database writing the data out to each // of the children. Start with the items, then do the attention StopWatch sw = new StopWatch(); sw.start(); DBIterator<Item> itemIt = bdbRoot.getAllIterator(null); System.out.print("About to transfer " + bdbRoot.getItemCount(null) + " items........"); while(itemIt.hasNext()) { ItemImpl item = (ItemImpl)itemIt.next(); BerkeleyDataWrapper target = trie.get(DSBitSet.parse(item.hashCode())); target.putItem(item); } itemIt.close(); System.out.println("Done"); DBIterator<Attention> attnIt = bdbRoot.getAttentionAddedSince(0); System.out.print("About to transfer " + bdbRoot.getAttentionCount(null) + " attentions....."); while (attnIt.hasNext()) { Attention attn = attnIt.next(); BerkeleyDataWrapper target = trie.get(DSBitSet.parse(attn.hashCode())); target.putAttention((PersistentAttention)attn); } attnIt.close(); System.out.println("Done"); sw.stop(); System.out.println("Transfer finished in " + sw.getTime() / 1000 + " seconds"); bdbRoot.close(); long numItems = 0; long numAttns = 0; for (BerkeleyDataWrapper bdb : trie.getAll()) { numItems += bdb.getItemCount(null); numAttns += bdb.getAttentionCount(null); bdb.close(); } System.out.println("Accounted for " + numItems + " items and " + numAttns + " attentions in new splits."); }
void function() throws Exception { BerkeleyDataWrapper bdbRoot = new BerkeleyDataWrapper( fsNamePrefix + initialHashBits + "/" + initialHashBits + "/db", logger); BinaryTrie<BerkeleyDataWrapper> trie = new BinaryTrie<BerkeleyDataWrapper>(); String[] prefixes = getNewPrefixes(numSegments, initialHashBits); for (String prefix : prefixes) { BerkeleyDataWrapper curr = new BerkeleyDataWrapper( fsNamePrefix + prefix + "/" + prefix + "/db", logger); DSBitSet currPrefix = DSBitSet.parse(prefix); trie.add(curr, currPrefix); } StopWatch sw = new StopWatch(); sw.start(); DBIterator<Item> itemIt = bdbRoot.getAllIterator(null); System.out.print(STR + bdbRoot.getItemCount(null) + STR); while(itemIt.hasNext()) { ItemImpl item = (ItemImpl)itemIt.next(); BerkeleyDataWrapper target = trie.get(DSBitSet.parse(item.hashCode())); target.putItem(item); } itemIt.close(); System.out.println("Done"); DBIterator<Attention> attnIt = bdbRoot.getAttentionAddedSince(0); System.out.print(STR + bdbRoot.getAttentionCount(null) + STR); while (attnIt.hasNext()) { Attention attn = attnIt.next(); BerkeleyDataWrapper target = trie.get(DSBitSet.parse(attn.hashCode())); target.putAttention((PersistentAttention)attn); } attnIt.close(); System.out.println("Done"); sw.stop(); System.out.println(STR + sw.getTime() / 1000 + STR); bdbRoot.close(); long numItems = 0; long numAttns = 0; for (BerkeleyDataWrapper bdb : trie.getAll()) { numItems += bdb.getItemCount(null); numAttns += bdb.getAttentionCount(null); bdb.close(); } System.out.println(STR + numItems + STR + numAttns + STR); }
/** * Splits the database into numSegments parts. */
Splits the database into numSegments parts
split
{ "repo_name": "SunLabsAST/AURA", "path": "aura/src/com/sun/labs/aura/util/DBSplitter.java", "license": "gpl-2.0", "size": 7246 }
[ "com.sun.labs.aura.datastore.Attention", "com.sun.labs.aura.datastore.DBIterator", "com.sun.labs.aura.datastore.Item", "com.sun.labs.aura.datastore.impl.BinaryTrie", "com.sun.labs.aura.datastore.impl.DSBitSet", "com.sun.labs.aura.datastore.impl.store.BerkeleyDataWrapper", "com.sun.labs.aura.datastore.impl.store.persist.ItemImpl", "com.sun.labs.aura.datastore.impl.store.persist.PersistentAttention", "com.sun.labs.minion.util.StopWatch" ]
import com.sun.labs.aura.datastore.Attention; import com.sun.labs.aura.datastore.DBIterator; import com.sun.labs.aura.datastore.Item; import com.sun.labs.aura.datastore.impl.BinaryTrie; import com.sun.labs.aura.datastore.impl.DSBitSet; import com.sun.labs.aura.datastore.impl.store.BerkeleyDataWrapper; import com.sun.labs.aura.datastore.impl.store.persist.ItemImpl; import com.sun.labs.aura.datastore.impl.store.persist.PersistentAttention; import com.sun.labs.minion.util.StopWatch;
import com.sun.labs.aura.datastore.*; import com.sun.labs.aura.datastore.impl.*; import com.sun.labs.aura.datastore.impl.store.*; import com.sun.labs.aura.datastore.impl.store.persist.*; import com.sun.labs.minion.util.*;
[ "com.sun.labs" ]
com.sun.labs;
1,679,610
public final MetaProperty<String> emailAddress() { return _emailAddress; }
final MetaProperty<String> function() { return _emailAddress; }
/** * The meta-property for the {@code emailAddress} property. * @return the meta-property, not null */
The meta-property for the emailAddress property
emailAddress
{ "repo_name": "DevStreet/FinanceAnalytics", "path": "projects/OG-Master/src/main/java/com/opengamma/master/user/ManageableUser.java", "license": "apache-2.0", "size": 25314 }
[ "org.joda.beans.MetaProperty" ]
import org.joda.beans.MetaProperty;
import org.joda.beans.*;
[ "org.joda.beans" ]
org.joda.beans;
2,581,696
File directoryToZip = new File(sourceDirectory); List<File> fileList = new ArrayList<File>(); getAllFiles(directoryToZip, fileList); writeArchiveFile(directoryToZip, fileList); }
File directoryToZip = new File(sourceDirectory); List<File> fileList = new ArrayList<File>(); getAllFiles(directoryToZip, fileList); writeArchiveFile(directoryToZip, fileList); }
/** * Archive a provided source directory to a zipped file * * @param sourceDirectory Source directory */
Archive a provided source directory to a zipped file
archiveDirectory
{ "repo_name": "ajanthan/wso2apim-soap-api-importer", "path": "src/main/java/org/wso2/apim/tools/ZipUtil.java", "license": "apache-2.0", "size": 3597 }
[ "java.io.File", "java.util.ArrayList", "java.util.List" ]
import java.io.File; import java.util.ArrayList; import java.util.List;
import java.io.*; import java.util.*;
[ "java.io", "java.util" ]
java.io; java.util;
184,405
public void show(String title, String detail, Boolean withTitle) { progressIndicator = new ProgressDialog(cordova.getActivity()); if(withTitle) progressIndicator.setTitle(title); progressIndicator.setMessage(detail); progressIndicator.show(); }
void function(String title, String detail, Boolean withTitle) { progressIndicator = new ProgressDialog(cordova.getActivity()); if(withTitle) progressIndicator.setTitle(title); progressIndicator.setMessage(detail); progressIndicator.show(); }
/** * This show the ProgressDialog * * @param text - Message to display in the Progress Dialog */
This show the ProgressDialog
show
{ "repo_name": "pbernasconi/cordova-progressIndicator", "path": "src/android/ProgressIndicator.java", "license": "apache-2.0", "size": 2954 }
[ "android.app.ProgressDialog" ]
import android.app.ProgressDialog;
import android.app.*;
[ "android.app" ]
android.app;
1,036,669
void stopRecording() { if (DEBUG) Log.v(TAG, "stopRecording"); synchronized (mSync) { if (!mIsCapturing || mRequestStop) { return; } mRequestStop = true; // for rejecting newer frame mSync.notifyAll(); // We can not know when the encoding and writing finish. // so we return immediately after request to avoid delay of caller thread } }
void stopRecording() { if (DEBUG) Log.v(TAG, STR); synchronized (mSync) { if (!mIsCapturing mRequestStop) { return; } mRequestStop = true; mSync.notifyAll(); } }
/** * the method to request stop encoding */
the method to request stop encoding
stopRecording
{ "repo_name": "itsnothingg/EasyScreenRecorder", "path": "library/src/main/java/com/choiintack/easyscreenrecorder/encoder/MediaEncoder.java", "license": "apache-2.0", "size": 15278 }
[ "android.util.Log" ]
import android.util.Log;
import android.util.*;
[ "android.util" ]
android.util;
282,658
RegistrationRequest getOriginalRegistrationRequest();
RegistrationRequest getOriginalRegistrationRequest();
/** * Returns the request sent from the node to the hub to register the proxy. * * @return the original node registration request. */
Returns the request sent from the node to the hub to register the proxy
getOriginalRegistrationRequest
{ "repo_name": "jmt4/Selenium2", "path": "java/server/src/org/openqa/grid/internal/RemoteProxy.java", "license": "apache-2.0", "size": 6990 }
[ "org.openqa.grid.common.RegistrationRequest" ]
import org.openqa.grid.common.RegistrationRequest;
import org.openqa.grid.common.*;
[ "org.openqa.grid" ]
org.openqa.grid;
1,426,592
void replaceAndDelete(String toReplace, String replaceWith) { // delete all productions not containing toReplace for (Iterator i = rules.iterator(); i.hasNext();) { boolean matched = false; List ruleBody = (List) i.next(); for (int j = 0; j < ruleBody.size(); j++) { String token = (String) ruleBody.get(j); if (token.equals(toReplace)) { matched = true; break; } } if (!matched) i.remove(); } // now do the replaces for those that match replaceToken(toReplace, replaceWith); }
void replaceAndDelete(String toReplace, String replaceWith) { for (Iterator i = rules.iterator(); i.hasNext();) { boolean matched = false; List ruleBody = (List) i.next(); for (int j = 0; j < ruleBody.size(); j++) { String token = (String) ruleBody.get(j); if (token.equals(toReplace)) { matched = true; break; } } if (!matched) i.remove(); } replaceToken(toReplace, replaceWith); }
/** * Executes all replaces in the list of productions and deletes all those that * do not match * * NOTE: a destructive operation, generally run only on copies... */
Executes all replaces in the list of productions and deletes all those that do not match
replaceAndDelete
{ "repo_name": "FreeSchoolHackers/RiTa", "path": "java/rita/support/Definition.java", "license": "gpl-3.0", "size": 8454 }
[ "java.util.Iterator", "java.util.List" ]
import java.util.Iterator; import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
1,379,851
private JSONWriter end(char m, char c) throws JSONException { if (this.mode != m) { throw new JSONException(m == 'a' ? "Misplaced endArray." : "Misplaced endObject."); } this.pop(m); try { this.writer.append(c); } catch (IOException e) { throw new JSONException(e); } this.comma = true; return this; }
JSONWriter function(char m, char c) throws JSONException { if (this.mode != m) { throw new JSONException(m == 'a' ? STR : STR); } this.pop(m); try { this.writer.append(c); } catch (IOException e) { throw new JSONException(e); } this.comma = true; return this; }
/** * End something. * @param m Mode * @param c Closing character * @return this * @throws JSONException If unbalanced. */
End something
end
{ "repo_name": "videogreg93/AnglicismeWordEntryTool", "path": "src/org/json/JSONWriter.java", "license": "mit", "size": 10654 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
383,169
@DesignerProperty(editorType = PropertyTypeConstants.PROPERTY_TYPE_LEGO_EV3_GYRO_SENSOR_MODE, defaultValue = "angle") @SimpleProperty public void Mode(@Options(GyroSensorMode.class) String modeName) { // Make sure modeName is a valid GyroSensorMode. GyroSensorMode gyroMode = GyroSensorMode.fromUnderlyingValue(modeName); if (gyroMode == null) { form.dispatchErrorOccurredEvent( this, "Mode", ErrorMessages.ERROR_EV3_ILLEGAL_ARGUMENT, modeName); return; } setMode(gyroMode); }
@DesignerProperty(editorType = PropertyTypeConstants.PROPERTY_TYPE_LEGO_EV3_GYRO_SENSOR_MODE, defaultValue = "angle") void function(@Options(GyroSensorMode.class) String modeName) { GyroSensorMode gyroMode = GyroSensorMode.fromUnderlyingValue(modeName); if (gyroMode == null) { form.dispatchErrorOccurredEvent( this, "Mode", ErrorMessages.ERROR_EV3_ILLEGAL_ARGUMENT, modeName); return; } setMode(gyroMode); }
/** * Specifies the mode of the sensor. */
Specifies the mode of the sensor
Mode
{ "repo_name": "jisqyv/appinventor-sources", "path": "appinventor/components/src/com/google/appinventor/components/runtime/Ev3GyroSensor.java", "license": "apache-2.0", "size": 7074 }
[ "com.google.appinventor.components.annotations.DesignerProperty", "com.google.appinventor.components.annotations.Options", "com.google.appinventor.components.common.GyroSensorMode", "com.google.appinventor.components.common.PropertyTypeConstants", "com.google.appinventor.components.runtime.util.ErrorMessages" ]
import com.google.appinventor.components.annotations.DesignerProperty; import com.google.appinventor.components.annotations.Options; import com.google.appinventor.components.common.GyroSensorMode; import com.google.appinventor.components.common.PropertyTypeConstants; import com.google.appinventor.components.runtime.util.ErrorMessages;
import com.google.appinventor.components.annotations.*; import com.google.appinventor.components.common.*; import com.google.appinventor.components.runtime.util.*;
[ "com.google.appinventor" ]
com.google.appinventor;
2,646,990
private JsonPath getPath() { List<JsonPath> tokens = new ArrayList<>(); tokens.add(RootToken.instance()); for (int i = 0, size = stackSize; i < size; i++) { if (stack[i] == JsonScope.NONEMPTY_OBJECT || stack[i] == JsonScope.NONEMPTY_ARRAY) { tokens.add(paths[i]); } } return JsonPath.from(tokens); }
JsonPath function() { List<JsonPath> tokens = new ArrayList<>(); tokens.add(RootToken.instance()); for (int i = 0, size = stackSize; i < size; i++) { if (stack[i] == JsonScope.NONEMPTY_OBJECT stack[i] == JsonScope.NONEMPTY_ARRAY) { tokens.add(paths[i]); } } return JsonPath.from(tokens); }
/** * Returns a <a href="http://goessner.net/articles/JsonPath/">JsonPath</a> to * the current location in the JSON value. */
Returns a JsonPath to the current location in the JSON value
getPath
{ "repo_name": "Trunkplatform/rxjava-json", "path": "rxjava-json-core/src/main/java/com/trunk/rx/json/impl/JsonParser.java", "license": "apache-2.0", "size": 26319 }
[ "com.trunk.rx.json.path.JsonPath", "com.trunk.rx.json.path.RootToken", "java.util.ArrayList", "java.util.List" ]
import com.trunk.rx.json.path.JsonPath; import com.trunk.rx.json.path.RootToken; import java.util.ArrayList; import java.util.List;
import com.trunk.rx.json.path.*; import java.util.*;
[ "com.trunk.rx", "java.util" ]
com.trunk.rx; java.util;
509,885
@ScreenshotCheck @Test(timeout = 300000) public void defaultConstructorTest() throws InterruptedException { testCommon(Pages.DefaultConstructor.name()); }
@Test(timeout = 300000) void function() throws InterruptedException { testCommon(Pages.DefaultConstructor.name()); }
/** * Test for Slider default constructor */
Test for Slider default constructor
defaultConstructorTest
{ "repo_name": "teamfx/openjfx-8u-dev-tests", "path": "functional/ControlsTests/test/javafx/scene/control/test/mix/SliderTest.java", "license": "gpl-2.0", "size": 3726 }
[ "org.junit.Test" ]
import org.junit.Test;
import org.junit.*;
[ "org.junit" ]
org.junit;
1,854,549
@Test public void testAggNoCombine() throws Exception { for (String[] aggGroup : aggs) { String[] aggFinalTypes = null; // will contains AVGFinal, DoubleAvgFinal etc String[] aggInitialTypes = null; // will contains AVGInitial, DoubleAvgInitial etc for (String stage: stages) { String[] aggTypesArray = null; if(stage.equals("Initial")) { aggInitialTypes = new String[aggGroup.length]; aggTypesArray = aggInitialTypes; } else if(stage.equals("Final")) { aggFinalTypes = new String[aggGroup.length]; aggTypesArray = aggFinalTypes; } else { // Intermediate continue; } for (int i = 0; i < aggTypesArray.length; i++) { aggTypesArray[i] = aggGroup[i] + stage; } } for(int k = 0; k < aggFinalTypes.length; k++) { EvalFunc<?> avgInitial = evalFuncMap.get(aggInitialTypes[k]); Tuple tup = inputMap.get(getInputType(aggInitialTypes[k])); // To test this case, first AVGInitial is called for each input // value and output of it is put into a bag. The bag containing // all AVGInitial output is provided as input to AVGFinal // The tuple we got above has a bag with input // values. Lets call AVGInitial with each value: DataBag bg = (DataBag) tup.get(0); DataBag finalInputBg = bagFactory.newDefaultBag(); for (Tuple tuple : bg) { DataBag initialInputBg = bagFactory.newDefaultBag(); initialInputBg.add(tuple); Tuple initialInputTuple = tupleFactory.newTuple(initialInputBg); finalInputBg.add((Tuple)avgInitial.exec(initialInputTuple)); } Tuple finalInputTuple = tupleFactory.newTuple(finalInputBg); EvalFunc<?> aggFinal = evalFuncMap.get(aggFinalTypes[k]); String msg = "[Testing " + aggGroup[k] + " on input type: " + getInputType(aggFinalTypes[k]); System.err.println(msg + " for no combiner case]"); Object output = aggFinal.exec(finalInputTuple); msg += " ( (output) " + output + " == " + getExpected(aggFinalTypes[k]) + " (expected) )]"; // for doubles, precisions can be a problem - so check // if the type is double for expected result and check // within some precision if(getExpected(aggFinalTypes[k]) instanceof Double) { assertEquals(msg, (Double)getExpected(aggFinalTypes[k]), (Double)output, 0.00001); } else { assertEquals(msg, getExpected(aggFinalTypes[k]), output); } } } }
void function() throws Exception { for (String[] aggGroup : aggs) { String[] aggFinalTypes = null; String[] aggInitialTypes = null; for (String stage: stages) { String[] aggTypesArray = null; if(stage.equals(STR)) { aggInitialTypes = new String[aggGroup.length]; aggTypesArray = aggInitialTypes; } else if(stage.equals("Final")) { aggFinalTypes = new String[aggGroup.length]; aggTypesArray = aggFinalTypes; } else { continue; } for (int i = 0; i < aggTypesArray.length; i++) { aggTypesArray[i] = aggGroup[i] + stage; } } for(int k = 0; k < aggFinalTypes.length; k++) { EvalFunc<?> avgInitial = evalFuncMap.get(aggInitialTypes[k]); Tuple tup = inputMap.get(getInputType(aggInitialTypes[k])); DataBag bg = (DataBag) tup.get(0); DataBag finalInputBg = bagFactory.newDefaultBag(); for (Tuple tuple : bg) { DataBag initialInputBg = bagFactory.newDefaultBag(); initialInputBg.add(tuple); Tuple initialInputTuple = tupleFactory.newTuple(initialInputBg); finalInputBg.add((Tuple)avgInitial.exec(initialInputTuple)); } Tuple finalInputTuple = tupleFactory.newTuple(finalInputBg); EvalFunc<?> aggFinal = evalFuncMap.get(aggFinalTypes[k]); String msg = STR + aggGroup[k] + STR + getInputType(aggFinalTypes[k]); System.err.println(msg + STR); Object output = aggFinal.exec(finalInputTuple); msg += STR + output + STR + getExpected(aggFinalTypes[k]) + STR; if(getExpected(aggFinalTypes[k]) instanceof Double) { assertEquals(msg, (Double)getExpected(aggFinalTypes[k]), (Double)output, 0.00001); } else { assertEquals(msg, getExpected(aggFinalTypes[k]), output); } } } }
/** * Test the case where the combiner is not called - so initial is called * and then final is called * @throws Exception */
Test the case where the combiner is not called - so initial is called and then final is called
testAggNoCombine
{ "repo_name": "d601/pig", "path": "test/org/apache/pig/test/TestBuiltin.java", "license": "apache-2.0", "size": 105887 }
[ "org.apache.pig.EvalFunc", "org.apache.pig.data.DataBag", "org.apache.pig.data.Tuple", "org.junit.Assert" ]
import org.apache.pig.EvalFunc; import org.apache.pig.data.DataBag; import org.apache.pig.data.Tuple; import org.junit.Assert;
import org.apache.pig.*; import org.apache.pig.data.*; import org.junit.*;
[ "org.apache.pig", "org.junit" ]
org.apache.pig; org.junit;
1,923,123
EReference getDependencyModel_RootArtifacts();
EReference getDependencyModel_RootArtifacts();
/** * Returns the meta object for the reference list ' * {@link org.sourcepit.maven.dependency.model.DependencyModel#getRootArtifacts <em>Root Artifacts</em>}'. * <!-- begin-user-doc --> * <!-- end-user-doc --> * * @return the meta object for the reference list '<em>Root Artifacts</em>'. * @see org.sourcepit.maven.dependency.model.DependencyModel#getRootArtifacts() * @see #getDependencyModel() * @generated */
Returns the meta object for the reference list ' <code>org.sourcepit.maven.dependency.model.DependencyModel#getRootArtifacts Root Artifacts</code>'.
getDependencyModel_RootArtifacts
{ "repo_name": "sourcepit/maven-dependency-model", "path": "gen/main/emf/org/sourcepit/maven/dependency/model/DependencyModelPackage.java", "license": "apache-2.0", "size": 25206 }
[ "org.eclipse.emf.ecore.EReference" ]
import org.eclipse.emf.ecore.EReference;
import org.eclipse.emf.ecore.*;
[ "org.eclipse.emf" ]
org.eclipse.emf;
2,769,465
private static String[] getServiceDetails(String uri, String tenantDomain) throws ODataServiceFault { String odataServices; String odataServiceName; String odataServiceUri; String configID; if (tenantDomain == null) { odataServices = "odata/"; } else { odataServices = "odata/t/" + tenantDomain + "/"; } int index = uri.indexOf(odataServices); if (-1 != index) { int serviceStart = index + odataServices.length(); if (uri.length() > serviceStart + 1) { odataServiceUri = uri.substring(serviceStart); if (-1 != odataServiceUri.indexOf('/')) { String[] params = odataServiceUri.split("/"); odataServiceName = params[0]; configID = params[1]; return new String[] { odataServiceName, configID }; } } } throw new ODataServiceFault("Bad OData request."); }
static String[] function(String uri, String tenantDomain) throws ODataServiceFault { String odataServices; String odataServiceName; String odataServiceUri; String configID; if (tenantDomain == null) { odataServices = STR; } else { odataServices = STR + tenantDomain + "/"; } int index = uri.indexOf(odataServices); if (-1 != index) { int serviceStart = index + odataServices.length(); if (uri.length() > serviceStart + 1) { odataServiceUri = uri.substring(serviceStart); if (-1 != odataServiceUri.indexOf('/')) { String[] params = odataServiceUri.split("/"); odataServiceName = params[0]; configID = params[1]; return new String[] { odataServiceName, configID }; } } } throw new ODataServiceFault(STR); }
/** * This method retrieve the service name and config id from the request uri. * * @param uri Request uri * @param tenantDomain Tenant domain * @return String Array String[0] ServiceName, String[1] ConfigID */
This method retrieve the service name and config id from the request uri
getServiceDetails
{ "repo_name": "wso2/carbon-data", "path": "components/data-services/org.wso2.carbon.dataservices.odata.endpoint/src/main/java/org/wso2/carbon/dataservices/odata/endpoint/ODataEndpoint.java", "license": "apache-2.0", "size": 5762 }
[ "org.wso2.carbon.dataservices.core.odata.ODataServiceFault" ]
import org.wso2.carbon.dataservices.core.odata.ODataServiceFault;
import org.wso2.carbon.dataservices.core.odata.*;
[ "org.wso2.carbon" ]
org.wso2.carbon;
2,848,612
public void updateTask() { EntityLivingBase entitylivingbase = this.attacker.getAttackTarget(); this.attacker.getLookHelper().setLookPositionWithEntity(entitylivingbase, 30.0F, 30.0F); double d0 = this.attacker.getDistanceSq(entitylivingbase.posX, entitylivingbase.getEntityBoundingBox().minY, entitylivingbase.posZ); --this.delayCounter; if ((this.longMemory || this.attacker.getEntitySenses().canSee(entitylivingbase)) && this.delayCounter <= 0 && (this.targetX == 0.0D && this.targetY == 0.0D && this.targetZ == 0.0D || entitylivingbase.getDistanceSq(this.targetX, this.targetY, this.targetZ) >= 1.0D || this.attacker.getRNG().nextFloat() < 0.05F)) { this.targetX = entitylivingbase.posX; this.targetY = entitylivingbase.getEntityBoundingBox().minY; this.targetZ = entitylivingbase.posZ; this.delayCounter = 4 + this.attacker.getRNG().nextInt(7); if (this.canPenalize) { this.delayCounter += failedPathFindingPenalty; if (this.attacker.getNavigator().getPath() != null) { net.minecraft.pathfinding.PathPoint finalPathPoint = this.attacker.getNavigator().getPath() .getFinalPathPoint(); if (finalPathPoint != null && entitylivingbase.getDistanceSq(finalPathPoint.xCoord, finalPathPoint.yCoord, finalPathPoint.zCoord) < 1) failedPathFindingPenalty = 0; else failedPathFindingPenalty += 10; } else { failedPathFindingPenalty += 10; } } if (d0 > 1024.0D) { this.delayCounter += 10; } else if (d0 > 256.0D) { this.delayCounter += 5; } if (!this.attacker.getNavigator().tryMoveToEntityLiving(entitylivingbase, this.speedTowardsTarget)) { this.delayCounter += 15; } } this.attackTick = Math.max(this.attackTick - 1, 0); this.checkAndPerformAttack(entitylivingbase, d0); }
void function() { EntityLivingBase entitylivingbase = this.attacker.getAttackTarget(); this.attacker.getLookHelper().setLookPositionWithEntity(entitylivingbase, 30.0F, 30.0F); double d0 = this.attacker.getDistanceSq(entitylivingbase.posX, entitylivingbase.getEntityBoundingBox().minY, entitylivingbase.posZ); --this.delayCounter; if ((this.longMemory this.attacker.getEntitySenses().canSee(entitylivingbase)) && this.delayCounter <= 0 && (this.targetX == 0.0D && this.targetY == 0.0D && this.targetZ == 0.0D entitylivingbase.getDistanceSq(this.targetX, this.targetY, this.targetZ) >= 1.0D this.attacker.getRNG().nextFloat() < 0.05F)) { this.targetX = entitylivingbase.posX; this.targetY = entitylivingbase.getEntityBoundingBox().minY; this.targetZ = entitylivingbase.posZ; this.delayCounter = 4 + this.attacker.getRNG().nextInt(7); if (this.canPenalize) { this.delayCounter += failedPathFindingPenalty; if (this.attacker.getNavigator().getPath() != null) { net.minecraft.pathfinding.PathPoint finalPathPoint = this.attacker.getNavigator().getPath() .getFinalPathPoint(); if (finalPathPoint != null && entitylivingbase.getDistanceSq(finalPathPoint.xCoord, finalPathPoint.yCoord, finalPathPoint.zCoord) < 1) failedPathFindingPenalty = 0; else failedPathFindingPenalty += 10; } else { failedPathFindingPenalty += 10; } } if (d0 > 1024.0D) { this.delayCounter += 10; } else if (d0 > 256.0D) { this.delayCounter += 5; } if (!this.attacker.getNavigator().tryMoveToEntityLiving(entitylivingbase, this.speedTowardsTarget)) { this.delayCounter += 15; } } this.attackTick = Math.max(this.attackTick - 1, 0); this.checkAndPerformAttack(entitylivingbase, d0); }
/** * Updates the task */
Updates the task
updateTask
{ "repo_name": "EPIICTHUNDERCAT/TameableMobs", "path": "src/main/java/com/github/epiicthundercat/tameablemobs/mobs/TameableSnowman.java", "license": "mit", "size": 35195 }
[ "net.minecraft.entity.EntityLivingBase" ]
import net.minecraft.entity.EntityLivingBase;
import net.minecraft.entity.*;
[ "net.minecraft.entity" ]
net.minecraft.entity;
2,756,376
@Override public Annotations getAnnotationsV2(String entityId) throws SynapseException { String url = ENTITY_URI_PATH + "/" + entityId + ANNOTATIONS_V2; return getJSONEntity(getRepoEndpoint(), url, Annotations.class); }
Annotations function(String entityId) throws SynapseException { String url = ENTITY_URI_PATH + "/" + entityId + ANNOTATIONS_V2; return getJSONEntity(getRepoEndpoint(), url, Annotations.class); }
/** * Get the annotations for an entity. * * @param entityId * @return * @throws SynapseException */
Get the annotations for an entity
getAnnotationsV2
{ "repo_name": "zimingd/Synapse-Repository-Services", "path": "client/synapseJavaClient/src/main/java/org/sagebionetworks/client/SynapseClientImpl.java", "license": "apache-2.0", "size": 229293 }
[ "org.sagebionetworks.client.exceptions.SynapseException", "org.sagebionetworks.repo.model.annotation.v2.Annotations" ]
import org.sagebionetworks.client.exceptions.SynapseException; import org.sagebionetworks.repo.model.annotation.v2.Annotations;
import org.sagebionetworks.client.exceptions.*; import org.sagebionetworks.repo.model.annotation.v2.*;
[ "org.sagebionetworks.client", "org.sagebionetworks.repo" ]
org.sagebionetworks.client; org.sagebionetworks.repo;
698,355
public static void invalidate( ThinNodeQueryableGraph graph, Iterable<SkyKey> diff, EvaluationProgressReceiver invalidationReceiver, InvalidationState state, DirtyKeyTracker dirtyKeyTracker, ForkJoinPool forkJoinPool) throws InterruptedException { DirtyingNodeVisitor visitor = createInvalidatingVisitorIfNeeded( graph, diff, invalidationReceiver, state, dirtyKeyTracker, forkJoinPool); if (visitor != null) { visitor.run(); } }
static void function( ThinNodeQueryableGraph graph, Iterable<SkyKey> diff, EvaluationProgressReceiver invalidationReceiver, InvalidationState state, DirtyKeyTracker dirtyKeyTracker, ForkJoinPool forkJoinPool) throws InterruptedException { DirtyingNodeVisitor visitor = createInvalidatingVisitorIfNeeded( graph, diff, invalidationReceiver, state, dirtyKeyTracker, forkJoinPool); if (visitor != null) { visitor.run(); } }
/** * Invalidates given values and their upward transitive closure in the graph if necessary, using * the provided {@link ForkJoinPool}. */
Invalidates given values and their upward transitive closure in the graph if necessary, using the provided <code>ForkJoinPool</code>
invalidate
{ "repo_name": "kamalmarhubi/bazel", "path": "src/main/java/com/google/devtools/build/skyframe/EagerInvalidator.java", "license": "apache-2.0", "size": 5803 }
[ "com.google.devtools.build.skyframe.InvalidatingNodeVisitor", "java.util.concurrent.ForkJoinPool" ]
import com.google.devtools.build.skyframe.InvalidatingNodeVisitor; import java.util.concurrent.ForkJoinPool;
import com.google.devtools.build.skyframe.*; import java.util.concurrent.*;
[ "com.google.devtools", "java.util" ]
com.google.devtools; java.util;
386,580
private void cleanUpStackTrace(Throwable e, final String callerid) { // if e is no runtimeexception, we certainly expect and handle this // exception type anyway. no reason for transmitting stacktraces if (e instanceof RuntimeException && !(e instanceof BadRequestException)) { final StackTraceElement[] stack = e.getStackTrace(); StackTraceElement[] newStack = new StackTraceElement[] {}; for (final StackTraceElement el : stack) { if (el.getClassName().startsWith(java.lang.reflect.Method.class.getName())) { final StackTraceElement[] n = new StackTraceElement[newStack.length - 2]; System.arraycopy(newStack, 0, n, 0, newStack.length - 2); n[n.length - 1] = new StackTraceElement("RemotecallServer from ", callerid, null, -1); e.setStackTrace(n); break; } else { final StackTraceElement[] n = new StackTraceElement[newStack.length + 1]; System.arraycopy(newStack, 0, n, 0, newStack.length); n[n.length - 1] = el; newStack = n; } } e = e.getCause(); } while (e != null) { // do not send cause stacktraces e.setStackTrace(e.getStackTrace().length > 0 ? new StackTraceElement[] { e.getStackTrace()[0] } : new StackTraceElement[] {}); e = e.getCause(); } }
void function(Throwable e, final String callerid) { if (e instanceof RuntimeException && !(e instanceof BadRequestException)) { final StackTraceElement[] stack = e.getStackTrace(); StackTraceElement[] newStack = new StackTraceElement[] {}; for (final StackTraceElement el : stack) { if (el.getClassName().startsWith(java.lang.reflect.Method.class.getName())) { final StackTraceElement[] n = new StackTraceElement[newStack.length - 2]; System.arraycopy(newStack, 0, n, 0, newStack.length - 2); n[n.length - 1] = new StackTraceElement(STR, callerid, null, -1); e.setStackTrace(n); break; } else { final StackTraceElement[] n = new StackTraceElement[newStack.length + 1]; System.arraycopy(newStack, 0, n, 0, newStack.length); n[n.length - 1] = el; newStack = n; } } e = e.getCause(); } while (e != null) { e.setStackTrace(e.getStackTrace().length > 0 ? new StackTraceElement[] { e.getStackTrace()[0] } : new StackTraceElement[] {}); e = e.getCause(); } }
/** * breaks the stacktrace. the invoke process is not important * * @param e * @param callerid */
breaks the stacktrace. the invoke process is not important
cleanUpStackTrace
{ "repo_name": "Horstman/AppWorkUtils", "path": "src/org/appwork/remotecall/server/RemoteCallServer.java", "license": "artistic-2.0", "size": 6575 }
[ "java.lang.reflect.Method" ]
import java.lang.reflect.Method;
import java.lang.reflect.*;
[ "java.lang" ]
java.lang;
1,673,608
@Override public int toByteArray(byte buffer[]) { if (_hasChecksum) return toByteArrayWithSavedChecksum(buffer); if (VERIFY_TEST && _log.shouldLog(Log.INFO)) _log.info("Generating new c/s " + getClass().getSimpleName()); return super.toByteArray(buffer); }
int function(byte buffer[]) { if (_hasChecksum) return toByteArrayWithSavedChecksum(buffer); if (VERIFY_TEST && _log.shouldLog(Log.INFO)) _log.info(STR + getClass().getSimpleName()); return super.toByteArray(buffer); }
/** * If available, use the previously-computed or previously-read checksum for speed */
If available, use the previously-computed or previously-read checksum for speed
toByteArray
{ "repo_name": "oakes/Nightweb", "path": "common/java/router/net/i2p/data/i2np/FastI2NPMessageImpl.java", "license": "unlicense", "size": 7495 }
[ "net.i2p.util.Log" ]
import net.i2p.util.Log;
import net.i2p.util.*;
[ "net.i2p.util" ]
net.i2p.util;
1,453,184
public void onPartitionMigrate(Address thisAddress, MigrationInfo migrationInfo) { if (!thisAddress.equals(migrationInfo.getSource())) { return; } int partitionId = migrationInfo.getPartitionId(); for (Queue<ParkedOperation> parkQueue : parkQueueMap.values()) { Iterator<ParkedOperation> it = parkQueue.iterator(); while (it.hasNext()) { if (Thread.interrupted()) { return; } ParkedOperation parkedOperation = it.next(); if (!parkedOperation.isValid()) { continue; } Operation op = parkedOperation.getOperation(); if (partitionId == op.getPartitionId()) { parkedOperation.setValid(false); PartitionMigratingException pme = new PartitionMigratingException(thisAddress, partitionId, op.getClass().getName(), op.getServiceName()); OperationResponseHandler responseHandler = op.getOperationResponseHandler(); responseHandler.sendResponse(op, pme); it.remove(); } } } }
void function(Address thisAddress, MigrationInfo migrationInfo) { if (!thisAddress.equals(migrationInfo.getSource())) { return; } int partitionId = migrationInfo.getPartitionId(); for (Queue<ParkedOperation> parkQueue : parkQueueMap.values()) { Iterator<ParkedOperation> it = parkQueue.iterator(); while (it.hasNext()) { if (Thread.interrupted()) { return; } ParkedOperation parkedOperation = it.next(); if (!parkedOperation.isValid()) { continue; } Operation op = parkedOperation.getOperation(); if (partitionId == op.getPartitionId()) { parkedOperation.setValid(false); PartitionMigratingException pme = new PartitionMigratingException(thisAddress, partitionId, op.getClass().getName(), op.getServiceName()); OperationResponseHandler responseHandler = op.getOperationResponseHandler(); responseHandler.sendResponse(op, pme); it.remove(); } } } }
/** * Invalidates all parked operations for the migrated partition and sends a {@link PartitionMigratingException} as a * response. * Invoked on the migration destination. This is executed under partition migration lock! */
Invalidates all parked operations for the migrated partition and sends a <code>PartitionMigratingException</code> as a response. Invoked on the migration destination. This is executed under partition migration lock
onPartitionMigrate
{ "repo_name": "emrahkocaman/hazelcast", "path": "hazelcast/src/main/java/com/hazelcast/spi/impl/operationparker/impl/OperationParkerImpl.java", "license": "apache-2.0", "size": 14187 }
[ "com.hazelcast.internal.partition.MigrationInfo", "com.hazelcast.nio.Address", "com.hazelcast.spi.Operation", "com.hazelcast.spi.OperationResponseHandler", "com.hazelcast.spi.exception.PartitionMigratingException", "java.util.Iterator", "java.util.Queue" ]
import com.hazelcast.internal.partition.MigrationInfo; import com.hazelcast.nio.Address; import com.hazelcast.spi.Operation; import com.hazelcast.spi.OperationResponseHandler; import com.hazelcast.spi.exception.PartitionMigratingException; import java.util.Iterator; import java.util.Queue;
import com.hazelcast.internal.partition.*; import com.hazelcast.nio.*; import com.hazelcast.spi.*; import com.hazelcast.spi.exception.*; import java.util.*;
[ "com.hazelcast.internal", "com.hazelcast.nio", "com.hazelcast.spi", "java.util" ]
com.hazelcast.internal; com.hazelcast.nio; com.hazelcast.spi; java.util;
1,313,937
static public @Nonnull VMFilterOptions getInstance( boolean matchesAny ) { return new VMFilterOptions(matchesAny); }
static @Nonnull VMFilterOptions function( boolean matchesAny ) { return new VMFilterOptions(matchesAny); }
/** * Constructs filter options that will match either any criteria or all criteria, but has no actual criteria * associated with it. * * @param matchesAny <code>true</code> if it is sufficient that just one of the criteria are matched, false if all are needed to be matched * @return a newly constructed set of VM filtering options */
Constructs filter options that will match either any criteria or all criteria, but has no actual criteria associated with it
getInstance
{ "repo_name": "maksimov/dasein-cloud-core", "path": "src/main/java/org/dasein/cloud/compute/VMFilterOptions.java", "license": "apache-2.0", "size": 10622 }
[ "javax.annotation.Nonnull" ]
import javax.annotation.Nonnull;
import javax.annotation.*;
[ "javax.annotation" ]
javax.annotation;
1,587,029
public void testRoleInheritance() throws Throwable { CmsObject cms = getCmsObject(); echo("Testing the inheritance of role memberships"); cms.getRequestContext().setSiteRoot(""); CmsRoleManager roleMan = OpenCms.getRoleManager(); CmsOrgUnitManager ouMan = OpenCms.getOrgUnitManager(); // check the root ou CmsOrganizationalUnit rootOu = ouMan.readOrganizationalUnit(cms, ""); CmsResource rootRes = (ouMan.getResourcesForOrganizationalUnit(cms, rootOu.getName()).get(0)); // / CmsOrganizationalUnit ou = ouMan.readOrganizationalUnit(cms, "test"); CmsResource ouRes = (ouMan.getResourcesForOrganizationalUnit(cms, ou.getName()).get(0)); // /sites/default/ CmsOrganizationalUnit ou2 = ouMan.readOrganizationalUnit(cms, ou.getName() + "test2"); CmsResource ou2Res = (ouMan.getResourcesForOrganizationalUnit(cms, ou2.getName()).get(0)); // /sites/default/folder2/ CmsUser user = cms.readUser("test/test1"); // check preconditions assertFalse( roleMan.hasRoleForResource( cms, user.getName(), CmsRole.ACCOUNT_MANAGER, cms.getRequestContext().getSitePath(rootRes))); assertFalse( roleMan.hasRoleForResource( cms, user.getName(), CmsRole.ACCOUNT_MANAGER, cms.getRequestContext().getSitePath(ouRes))); assertFalse( roleMan.hasRoleForResource( cms, user.getName(), CmsRole.ACCOUNT_MANAGER, cms.getRequestContext().getSitePath(ou2Res))); assertTrue( roleMan.hasRoleForResource(cms, CmsRole.ACCOUNT_MANAGER, cms.getRequestContext().getSitePath(rootRes))); assertTrue( roleMan.hasRoleForResource(cms, CmsRole.ACCOUNT_MANAGER, cms.getRequestContext().getSitePath(ouRes))); assertTrue( roleMan.hasRoleForResource(cms, CmsRole.ACCOUNT_MANAGER, cms.getRequestContext().getSitePath(ou2Res))); assertFalse(roleMan.hasRole(cms, user.getName(), CmsRole.ACCOUNT_MANAGER.forOrgUnit(ou.getName()))); assertTrue(roleMan.getRolesOfUser(cms, user.getName(), ou.getName(), true, false, false).isEmpty()); assertEquals( 1, roleMan.getUsersOfRole(cms, CmsRole.ACCOUNT_MANAGER.forOrgUnit(ou.getName()), true, false).size()); assertTrue( roleMan.getUsersOfRole(cms, CmsRole.ACCOUNT_MANAGER.forOrgUnit(ou.getName()), true, false).contains( cms.readUser(OpenCms.getDefaultUsers().getUserAdmin()))); // add user to role roleMan.addUserToRole(cms, CmsRole.ACCOUNT_MANAGER.forOrgUnit(ou.getName()), user.getName()); // check role in the given ou assertTrue(roleMan.hasRole(cms, user.getName(), CmsRole.ACCOUNT_MANAGER.forOrgUnit(ou.getName()))); assertEquals(6, roleMan.getRolesOfUser(cms, user.getName(), ou.getName(), false, false, false).size()); assertTrue( roleMan.getRolesOfUser(cms, user.getName(), ou.getName(), false, false, false).contains( CmsRole.ACCOUNT_MANAGER.forOrgUnit(ou.getName()))); assertTrue( roleMan.getRolesOfUser(cms, user.getName(), ou.getName(), false, false, false).contains( CmsRole.WORKPLACE_USER.forOrgUnit(ou.getName()))); assertEquals(1, roleMan.getRolesOfUser(cms, user.getName(), ou.getName(), false, true, false).size()); assertTrue( roleMan.getRolesOfUser(cms, user.getName(), ou.getName(), false, true, false).contains( CmsRole.ACCOUNT_MANAGER.forOrgUnit(ou.getName()))); assertFalse( roleMan.getRolesOfUser(cms, user.getName(), ou.getName(), false, true, false).contains( CmsRole.WORKPLACE_USER.forOrgUnit(ou.getName()))); assertEquals(12, roleMan.getRolesOfUser(cms, user.getName(), ou.getName(), true, false, false).size()); assertTrue( roleMan.getRolesOfUser(cms, user.getName(), ou.getName(), true, false, false).contains( CmsRole.ACCOUNT_MANAGER.forOrgUnit(ou.getName()))); assertTrue( roleMan.getRolesOfUser(cms, user.getName(), ou.getName(), true, false, false).contains( CmsRole.WORKPLACE_USER.forOrgUnit(ou.getName()))); assertEquals(1, roleMan.getRolesOfUser(cms, user.getName(), ou.getName(), true, true, false).size()); assertTrue( roleMan.getRolesOfUser(cms, user.getName(), ou.getName(), true, true, false).contains( CmsRole.ACCOUNT_MANAGER.forOrgUnit(ou.getName()))); assertFalse( roleMan.getRolesOfUser(cms, user.getName(), ou.getName(), true, true, false).contains( CmsRole.WORKPLACE_USER.forOrgUnit(ou.getName()))); assertEquals( 1, roleMan.getUsersOfRole(cms, CmsRole.ACCOUNT_MANAGER.forOrgUnit(ou.getName()), false, false).size()); assertTrue( roleMan.getUsersOfRole(cms, CmsRole.ACCOUNT_MANAGER.forOrgUnit(ou.getName()), false, false).contains(user)); assertEquals( 1, roleMan.getUsersOfRole(cms, CmsRole.ACCOUNT_MANAGER.forOrgUnit(ou.getName()), false, true).size()); assertTrue( roleMan.getUsersOfRole(cms, CmsRole.ACCOUNT_MANAGER.forOrgUnit(ou.getName()), false, true).contains(user)); assertEquals( 2, roleMan.getUsersOfRole(cms, CmsRole.ACCOUNT_MANAGER.forOrgUnit(ou.getName()), true, false).size()); assertTrue( roleMan.getUsersOfRole(cms, CmsRole.ACCOUNT_MANAGER.forOrgUnit(ou.getName()), true, false).contains(user)); assertEquals( 1, roleMan.getUsersOfRole(cms, CmsRole.ACCOUNT_MANAGER.forOrgUnit(ou.getName()), true, true).size()); assertTrue( roleMan.getUsersOfRole(cms, CmsRole.ACCOUNT_MANAGER.forOrgUnit(ou.getName()), true, false).contains(user)); // check role in deeper ou assertTrue(roleMan.hasRole(cms, user.getName(), CmsRole.ACCOUNT_MANAGER.forOrgUnit(ou2.getName()))); assertTrue(roleMan.getRolesOfUser(cms, user.getName(), ou2.getName(), false, false, false).isEmpty()); assertTrue(roleMan.getRolesOfUser(cms, user.getName(), ou2.getName(), false, true, false).isEmpty()); assertEquals(11, roleMan.getRolesOfUser(cms, user.getName(), ou2.getName(), true, false, false).size()); assertTrue(roleMan.getRolesOfUser(cms, user.getName(), ou2.getName(), true, true, false).isEmpty()); assertEquals( 0, roleMan.getUsersOfRole(cms, CmsRole.ACCOUNT_MANAGER.forOrgUnit(ou2.getName()), false, false).size()); assertTrue( roleMan.getUsersOfRole(cms, CmsRole.ACCOUNT_MANAGER.forOrgUnit(ou2.getName()), false, true).isEmpty()); assertEquals( 2, roleMan.getUsersOfRole(cms, CmsRole.ACCOUNT_MANAGER.forOrgUnit(ou2.getName()), true, false).size()); assertTrue( roleMan.getUsersOfRole(cms, CmsRole.ACCOUNT_MANAGER.forOrgUnit(ou2.getName()), true, false).contains( cms.readUser(OpenCms.getDefaultUsers().getUserAdmin()))); assertTrue( roleMan.getUsersOfRole(cms, CmsRole.ACCOUNT_MANAGER.forOrgUnit(ou2.getName()), true, true).isEmpty()); // check role in higher ou assertFalse(roleMan.hasRole(cms, user.getName(), CmsRole.ACCOUNT_MANAGER.forOrgUnit(rootOu.getName()))); assertEquals(12, roleMan.getRolesOfUser(cms, user.getName(), rootOu.getName(), true, false, false).size()); assertTrue( roleMan.getRolesOfUser(cms, user.getName(), rootOu.getName(), true, false, false).contains( CmsRole.ACCOUNT_MANAGER.forOrgUnit(ou.getName()))); assertTrue(roleMan.getRolesOfUser(cms, user.getName(), rootOu.getName(), false, false, false).isEmpty()); assertEquals( 1, roleMan.getUsersOfRole(cms, CmsRole.ACCOUNT_MANAGER.forOrgUnit(rootOu.getName()), true, false).size()); assertTrue( roleMan.getUsersOfRole(cms, CmsRole.ACCOUNT_MANAGER.forOrgUnit(rootOu.getName()), true, false).contains( cms.readUser(OpenCms.getDefaultUsers().getUserAdmin()))); // check resources assertFalse( roleMan.hasRoleForResource( cms, user.getName(), CmsRole.ACCOUNT_MANAGER, cms.getRequestContext().getSitePath(rootRes))); assertTrue( roleMan.hasRoleForResource( cms, user.getName(), CmsRole.ACCOUNT_MANAGER, cms.getRequestContext().getSitePath(ouRes))); assertTrue( roleMan.hasRoleForResource( cms, user.getName(), CmsRole.ACCOUNT_MANAGER, cms.getRequestContext().getSitePath(ou2Res))); }
void function() throws Throwable { CmsObject cms = getCmsObject(); echo(STR); cms.getRequestContext().setSiteRoot(STRSTRtestSTRtest2STRtest/test1"); assertFalse( roleMan.hasRoleForResource( cms, user.getName(), CmsRole.ACCOUNT_MANAGER, cms.getRequestContext().getSitePath(rootRes))); assertFalse( roleMan.hasRoleForResource( cms, user.getName(), CmsRole.ACCOUNT_MANAGER, cms.getRequestContext().getSitePath(ouRes))); assertFalse( roleMan.hasRoleForResource( cms, user.getName(), CmsRole.ACCOUNT_MANAGER, cms.getRequestContext().getSitePath(ou2Res))); assertTrue( roleMan.hasRoleForResource(cms, CmsRole.ACCOUNT_MANAGER, cms.getRequestContext().getSitePath(rootRes))); assertTrue( roleMan.hasRoleForResource(cms, CmsRole.ACCOUNT_MANAGER, cms.getRequestContext().getSitePath(ouRes))); assertTrue( roleMan.hasRoleForResource(cms, CmsRole.ACCOUNT_MANAGER, cms.getRequestContext().getSitePath(ou2Res))); assertFalse(roleMan.hasRole(cms, user.getName(), CmsRole.ACCOUNT_MANAGER.forOrgUnit(ou.getName()))); assertTrue(roleMan.getRolesOfUser(cms, user.getName(), ou.getName(), true, false, false).isEmpty()); assertEquals( 1, roleMan.getUsersOfRole(cms, CmsRole.ACCOUNT_MANAGER.forOrgUnit(ou.getName()), true, false).size()); assertTrue( roleMan.getUsersOfRole(cms, CmsRole.ACCOUNT_MANAGER.forOrgUnit(ou.getName()), true, false).contains( cms.readUser(OpenCms.getDefaultUsers().getUserAdmin()))); roleMan.addUserToRole(cms, CmsRole.ACCOUNT_MANAGER.forOrgUnit(ou.getName()), user.getName()); assertTrue(roleMan.hasRole(cms, user.getName(), CmsRole.ACCOUNT_MANAGER.forOrgUnit(ou.getName()))); assertEquals(6, roleMan.getRolesOfUser(cms, user.getName(), ou.getName(), false, false, false).size()); assertTrue( roleMan.getRolesOfUser(cms, user.getName(), ou.getName(), false, false, false).contains( CmsRole.ACCOUNT_MANAGER.forOrgUnit(ou.getName()))); assertTrue( roleMan.getRolesOfUser(cms, user.getName(), ou.getName(), false, false, false).contains( CmsRole.WORKPLACE_USER.forOrgUnit(ou.getName()))); assertEquals(1, roleMan.getRolesOfUser(cms, user.getName(), ou.getName(), false, true, false).size()); assertTrue( roleMan.getRolesOfUser(cms, user.getName(), ou.getName(), false, true, false).contains( CmsRole.ACCOUNT_MANAGER.forOrgUnit(ou.getName()))); assertFalse( roleMan.getRolesOfUser(cms, user.getName(), ou.getName(), false, true, false).contains( CmsRole.WORKPLACE_USER.forOrgUnit(ou.getName()))); assertEquals(12, roleMan.getRolesOfUser(cms, user.getName(), ou.getName(), true, false, false).size()); assertTrue( roleMan.getRolesOfUser(cms, user.getName(), ou.getName(), true, false, false).contains( CmsRole.ACCOUNT_MANAGER.forOrgUnit(ou.getName()))); assertTrue( roleMan.getRolesOfUser(cms, user.getName(), ou.getName(), true, false, false).contains( CmsRole.WORKPLACE_USER.forOrgUnit(ou.getName()))); assertEquals(1, roleMan.getRolesOfUser(cms, user.getName(), ou.getName(), true, true, false).size()); assertTrue( roleMan.getRolesOfUser(cms, user.getName(), ou.getName(), true, true, false).contains( CmsRole.ACCOUNT_MANAGER.forOrgUnit(ou.getName()))); assertFalse( roleMan.getRolesOfUser(cms, user.getName(), ou.getName(), true, true, false).contains( CmsRole.WORKPLACE_USER.forOrgUnit(ou.getName()))); assertEquals( 1, roleMan.getUsersOfRole(cms, CmsRole.ACCOUNT_MANAGER.forOrgUnit(ou.getName()), false, false).size()); assertTrue( roleMan.getUsersOfRole(cms, CmsRole.ACCOUNT_MANAGER.forOrgUnit(ou.getName()), false, false).contains(user)); assertEquals( 1, roleMan.getUsersOfRole(cms, CmsRole.ACCOUNT_MANAGER.forOrgUnit(ou.getName()), false, true).size()); assertTrue( roleMan.getUsersOfRole(cms, CmsRole.ACCOUNT_MANAGER.forOrgUnit(ou.getName()), false, true).contains(user)); assertEquals( 2, roleMan.getUsersOfRole(cms, CmsRole.ACCOUNT_MANAGER.forOrgUnit(ou.getName()), true, false).size()); assertTrue( roleMan.getUsersOfRole(cms, CmsRole.ACCOUNT_MANAGER.forOrgUnit(ou.getName()), true, false).contains(user)); assertEquals( 1, roleMan.getUsersOfRole(cms, CmsRole.ACCOUNT_MANAGER.forOrgUnit(ou.getName()), true, true).size()); assertTrue( roleMan.getUsersOfRole(cms, CmsRole.ACCOUNT_MANAGER.forOrgUnit(ou.getName()), true, false).contains(user)); assertTrue(roleMan.hasRole(cms, user.getName(), CmsRole.ACCOUNT_MANAGER.forOrgUnit(ou2.getName()))); assertTrue(roleMan.getRolesOfUser(cms, user.getName(), ou2.getName(), false, false, false).isEmpty()); assertTrue(roleMan.getRolesOfUser(cms, user.getName(), ou2.getName(), false, true, false).isEmpty()); assertEquals(11, roleMan.getRolesOfUser(cms, user.getName(), ou2.getName(), true, false, false).size()); assertTrue(roleMan.getRolesOfUser(cms, user.getName(), ou2.getName(), true, true, false).isEmpty()); assertEquals( 0, roleMan.getUsersOfRole(cms, CmsRole.ACCOUNT_MANAGER.forOrgUnit(ou2.getName()), false, false).size()); assertTrue( roleMan.getUsersOfRole(cms, CmsRole.ACCOUNT_MANAGER.forOrgUnit(ou2.getName()), false, true).isEmpty()); assertEquals( 2, roleMan.getUsersOfRole(cms, CmsRole.ACCOUNT_MANAGER.forOrgUnit(ou2.getName()), true, false).size()); assertTrue( roleMan.getUsersOfRole(cms, CmsRole.ACCOUNT_MANAGER.forOrgUnit(ou2.getName()), true, false).contains( cms.readUser(OpenCms.getDefaultUsers().getUserAdmin()))); assertTrue( roleMan.getUsersOfRole(cms, CmsRole.ACCOUNT_MANAGER.forOrgUnit(ou2.getName()), true, true).isEmpty()); assertFalse(roleMan.hasRole(cms, user.getName(), CmsRole.ACCOUNT_MANAGER.forOrgUnit(rootOu.getName()))); assertEquals(12, roleMan.getRolesOfUser(cms, user.getName(), rootOu.getName(), true, false, false).size()); assertTrue( roleMan.getRolesOfUser(cms, user.getName(), rootOu.getName(), true, false, false).contains( CmsRole.ACCOUNT_MANAGER.forOrgUnit(ou.getName()))); assertTrue(roleMan.getRolesOfUser(cms, user.getName(), rootOu.getName(), false, false, false).isEmpty()); assertEquals( 1, roleMan.getUsersOfRole(cms, CmsRole.ACCOUNT_MANAGER.forOrgUnit(rootOu.getName()), true, false).size()); assertTrue( roleMan.getUsersOfRole(cms, CmsRole.ACCOUNT_MANAGER.forOrgUnit(rootOu.getName()), true, false).contains( cms.readUser(OpenCms.getDefaultUsers().getUserAdmin()))); assertFalse( roleMan.hasRoleForResource( cms, user.getName(), CmsRole.ACCOUNT_MANAGER, cms.getRequestContext().getSitePath(rootRes))); assertTrue( roleMan.hasRoleForResource( cms, user.getName(), CmsRole.ACCOUNT_MANAGER, cms.getRequestContext().getSitePath(ouRes))); assertTrue( roleMan.hasRoleForResource( cms, user.getName(), CmsRole.ACCOUNT_MANAGER, cms.getRequestContext().getSitePath(ou2Res))); }
/** * Tests the inheritance of role memberships.<p> * * @throws Throwable if something goes wrong */
Tests the inheritance of role memberships
testRoleInheritance
{ "repo_name": "victos/opencms-core", "path": "test/org/opencms/security/TestOrganizationalUnits.java", "license": "lgpl-2.1", "size": 80097 }
[ "org.opencms.file.CmsObject", "org.opencms.main.OpenCms" ]
import org.opencms.file.CmsObject; import org.opencms.main.OpenCms;
import org.opencms.file.*; import org.opencms.main.*;
[ "org.opencms.file", "org.opencms.main" ]
org.opencms.file; org.opencms.main;
1,329,234
public ItemStack createItem() { return new ItemStack(mat, amount, data); }
ItemStack function() { return new ItemStack(mat, amount, data); }
/** * Creates an item from the blueprint. * * @return item stack */
Creates an item from the blueprint
createItem
{ "repo_name": "andfRa/Saga", "path": "src/org/saga/utility/items/ItemBlueprint.java", "license": "gpl-3.0", "size": 1318 }
[ "org.bukkit.inventory.ItemStack" ]
import org.bukkit.inventory.ItemStack;
import org.bukkit.inventory.*;
[ "org.bukkit.inventory" ]
org.bukkit.inventory;
1,067,916
public ClassNode getClassNode() { if (classNode == null && GroovyObject.class.isAssignableFrom(theClass)) { // let's try load it from the classpath String groovyFile = theClass.getName(); int idx = groovyFile.indexOf('$'); if (idx > 0) { groovyFile = groovyFile.substring(0, idx); } groovyFile = groovyFile.replace('.', '/') + ".groovy"; //System.out.println("Attempting to load: " + groovyFile); URL url = theClass.getClassLoader().getResource(groovyFile); if (url == null) { url = Thread.currentThread().getContextClassLoader().getResource(groovyFile); } if (url != null) { try { CompilationUnit.ClassgenCallback search = (writer, node) -> { if (node.getName().equals(theClass.getName())) { MetaClassImpl.this.classNode = node; } }; CompilationUnit unit = new CompilationUnit(); unit.setClassgenCallback(search); unit.addSource(url); unit.compile(Phases.CLASS_GENERATION); } catch (Exception e) { throw new GroovyRuntimeException("Exception thrown parsing: " + groovyFile + ". Reason: " + e, e); } } } return classNode; }
ClassNode function() { if (classNode == null && GroovyObject.class.isAssignableFrom(theClass)) { String groovyFile = theClass.getName(); int idx = groovyFile.indexOf('$'); if (idx > 0) { groovyFile = groovyFile.substring(0, idx); } groovyFile = groovyFile.replace('.', '/') + STR; URL url = theClass.getClassLoader().getResource(groovyFile); if (url == null) { url = Thread.currentThread().getContextClassLoader().getResource(groovyFile); } if (url != null) { try { CompilationUnit.ClassgenCallback search = (writer, node) -> { if (node.getName().equals(theClass.getName())) { MetaClassImpl.this.classNode = node; } }; CompilationUnit unit = new CompilationUnit(); unit.setClassgenCallback(search); unit.addSource(url); unit.compile(Phases.CLASS_GENERATION); } catch (Exception e) { throw new GroovyRuntimeException(STR + groovyFile + STR + e, e); } } } return classNode; }
/** * Obtains a reference to the original AST for the MetaClass if it is available at runtime * * @return The original AST or null if it cannot be returned */
Obtains a reference to the original AST for the MetaClass if it is available at runtime
getClassNode
{ "repo_name": "paulk-asert/groovy", "path": "src/main/java/groovy/lang/MetaClassImpl.java", "license": "apache-2.0", "size": 174312 }
[ "org.codehaus.groovy.ast.ClassNode", "org.codehaus.groovy.control.CompilationUnit", "org.codehaus.groovy.control.Phases", "org.codehaus.groovy.reflection.ReflectionCache" ]
import org.codehaus.groovy.ast.ClassNode; import org.codehaus.groovy.control.CompilationUnit; import org.codehaus.groovy.control.Phases; import org.codehaus.groovy.reflection.ReflectionCache;
import org.codehaus.groovy.ast.*; import org.codehaus.groovy.control.*; import org.codehaus.groovy.reflection.*;
[ "org.codehaus.groovy" ]
org.codehaus.groovy;
1,707,462
@NotNull default TimeZone getPreferredTimeZoneNullSafe(@Nullable Authentication authentication) { return getPreferredTimeZone(authentication) .orElseGet(this::getDefaultTimeZone); }
default TimeZone getPreferredTimeZoneNullSafe(@Nullable Authentication authentication) { return getPreferredTimeZone(authentication) .orElseGet(this::getDefaultTimeZone); }
/** * Gets preferred time zone null safe. * * @param authentication the authentication * @return the preferred time zone null safe */
Gets preferred time zone null safe
getPreferredTimeZoneNullSafe
{ "repo_name": "bremersee/common", "path": "common-base-security/src/main/java/org/bremersee/security/authentication/AuthenticationDetails.java", "license": "apache-2.0", "size": 2544 }
[ "java.util.TimeZone", "org.springframework.lang.Nullable", "org.springframework.security.core.Authentication" ]
import java.util.TimeZone; import org.springframework.lang.Nullable; import org.springframework.security.core.Authentication;
import java.util.*; import org.springframework.lang.*; import org.springframework.security.core.*;
[ "java.util", "org.springframework.lang", "org.springframework.security" ]
java.util; org.springframework.lang; org.springframework.security;
1,851,523
public Dataset cloneWithNoRows(String clonedName) { if (clonedName == null) { clonedName = String.format("Data_%s", UUID.randomUUID().toString()); } Dataset dataset = new Dataset(clonedName, className); attributes.stream().map((attr) -> { Attribute newAttr = new Attribute(attr.getName()); attr.getTerms().forEach((term) -> { newAttr.addTerm(term.getName(), term.getFuzzySet()); }); return newAttr; }).forEachOrdered((newAttr) -> { dataset.addAttribute(newAttr); }); return dataset; }
Dataset function(String clonedName) { if (clonedName == null) { clonedName = String.format(STR, UUID.randomUUID().toString()); } Dataset dataset = new Dataset(clonedName, className); attributes.stream().map((attr) -> { Attribute newAttr = new Attribute(attr.getName()); attr.getTerms().forEach((term) -> { newAttr.addTerm(term.getName(), term.getFuzzySet()); }); return newAttr; }).forEachOrdered((newAttr) -> { dataset.addAttribute(newAttr); }); return dataset; }
/** * To create an empty dataset that has the same structure. * @param clonedName * @return */
To create an empty dataset that has the same structure
cloneWithNoRows
{ "repo_name": "mhjabreel/FDTKit", "path": "src/fdt/data/Dataset.java", "license": "mit", "size": 10870 }
[ "java.util.UUID" ]
import java.util.UUID;
import java.util.*;
[ "java.util" ]
java.util;
2,251,721
public ApiResponse<List<Integer>> getDogmaEffectsWithHttpInfo(String datasource, String userAgent, String xUserAgent) throws ApiException { com.squareup.okhttp.Call call = getDogmaEffectsValidateBeforeCall(datasource, userAgent, xUserAgent, null, null); Type localVarReturnType = new TypeToken<List<Integer>>(){}.getType(); return apiClient.execute(call, localVarReturnType); }
ApiResponse<List<Integer>> function(String datasource, String userAgent, String xUserAgent) throws ApiException { com.squareup.okhttp.Call call = getDogmaEffectsValidateBeforeCall(datasource, userAgent, xUserAgent, null, null); Type localVarReturnType = new TypeToken<List<Integer>>(){}.getType(); return apiClient.execute(call, localVarReturnType); }
/** * Get effects * Get a list of dogma effect ids --- Alternate route: &#x60;/v1/dogma/effects/&#x60; Alternate route: &#x60;/legacy/dogma/effects/&#x60; Alternate route: &#x60;/dev/dogma/effects/&#x60; --- This route is cached for up to 3600 seconds * @param datasource The server name you would like data from (optional, default to tranquility) * @param userAgent Client identifier, takes precedence over headers (optional) * @param xUserAgent Client identifier, takes precedence over User-Agent (optional) * @return ApiResponse&lt;List&lt;Integer&gt;&gt; * @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body */
Get effects Get a list of dogma effect ids --- Alternate route: &#x60;/v1/dogma/effects/&#x60; Alternate route: &#x60;/legacy/dogma/effects/&#x60; Alternate route: &#x60;/dev/dogma/effects/&#x60; --- This route is cached for up to 3600 seconds
getDogmaEffectsWithHttpInfo
{ "repo_name": "Tmin10/EVE-Security-Service", "path": "server-api/src/main/java/ru/tmin10/EVESecurityService/serverApi/api/DogmaApi.java", "license": "gpl-3.0", "size": 33114 }
[ "com.google.gson.reflect.TypeToken", "java.lang.reflect.Type", "java.util.List", "ru.tmin10.EVESecurityService" ]
import com.google.gson.reflect.TypeToken; import java.lang.reflect.Type; import java.util.List; import ru.tmin10.EVESecurityService;
import com.google.gson.reflect.*; import java.lang.reflect.*; import java.util.*; import ru.tmin10.*;
[ "com.google.gson", "java.lang", "java.util", "ru.tmin10" ]
com.google.gson; java.lang; java.util; ru.tmin10;
2,383,150
@Nonnull public static Set<OWLAxiom> getAxiomsOfTypes(@Nonnull Set<OWLAxiom> sourceAxioms, @Nonnull AxiomType<?>... axiomTypes) { Set<OWLAxiom> result = new HashSet<>(); Set<AxiomType<?>> allowed = Sets.newHashSet(axiomTypes); for (OWLAxiom ax : sourceAxioms) { if (allowed.contains(ax.getAxiomType())) { result.add(ax); } } return result; }
static Set<OWLAxiom> function(@Nonnull Set<OWLAxiom> sourceAxioms, @Nonnull AxiomType<?>... axiomTypes) { Set<OWLAxiom> result = new HashSet<>(); Set<AxiomType<?>> allowed = Sets.newHashSet(axiomTypes); for (OWLAxiom ax : sourceAxioms) { if (allowed.contains(ax.getAxiomType())) { result.add(ax); } } return result; }
/** * Gets the set of axioms from a source set of axioms that have a specified * type * * @param sourceAxioms * The source set of axioms * @param axiomTypes * The types of axioms that will be returned * @return A set of axioms that represents the sourceAxioms that have the * specified types. Note that sourceAxioms will not be modified. The * returned set is a copy. */
Gets the set of axioms from a source set of axioms that have a specified type
getAxiomsOfTypes
{ "repo_name": "matthewhorridge/owlapi-gwt", "path": "owlapi-gwt-client-side-emul/src/main/java/org/semanticweb/owlapi/model/AxiomType.java", "license": "lgpl-3.0", "size": 22801 }
[ "com.google.common.collect.Sets", "java.util.HashSet", "java.util.Set", "javax.annotation.Nonnull" ]
import com.google.common.collect.Sets; import java.util.HashSet; import java.util.Set; import javax.annotation.Nonnull;
import com.google.common.collect.*; import java.util.*; import javax.annotation.*;
[ "com.google.common", "java.util", "javax.annotation" ]
com.google.common; java.util; javax.annotation;
241,332
@Override public void info(@NonNull String msgFormat, Object... args) { if (mLevel.mLevel > Level.INFO.mLevel) { return; } String msg = String.format(msgFormat, args); printMessage(msg, System.out); }
void function(@NonNull String msgFormat, Object... args) { if (mLevel.mLevel > Level.INFO.mLevel) { return; } String msg = String.format(msgFormat, args); printMessage(msg, System.out); }
/** * Prints an info message. * <p/> * The output is done on {@link System#out}. * <p/> * This is displayed only if the logging {@link com.android.common.utils.StdLogger.Level} is {@link com.android.common.utils.StdLogger.Level#INFO} or higher. * * @param msgFormat is a string format to be used with a {@link java.util.Formatter}. Cannot be null. * @param args provides the arguments for msgFormat. */
Prints an info message. The output is done on <code>System#out</code>. This is displayed only if the logging <code>com.android.common.utils.StdLogger.Level</code> is <code>com.android.common.utils.StdLogger.Level#INFO</code> or higher
info
{ "repo_name": "mread/buck", "path": "third-party/java/aosp/src/com/android/common/utils/StdLogger.java", "license": "apache-2.0", "size": 5658 }
[ "com.android.common.annotations.NonNull" ]
import com.android.common.annotations.NonNull;
import com.android.common.annotations.*;
[ "com.android.common" ]
com.android.common;
2,100,659
private RegistryAuth authWithCredentialHelper(final String credsStore, final String registry) throws IOException { final DockerCredentialHelperAuth dockerCredentialHelperAuth = DockerCredentialHelper.get(credsStore, registry); return dockerCredentialHelperAuth == null ? null : dockerCredentialHelperAuth.toRegistryAuth(); }
RegistryAuth function(final String credsStore, final String registry) throws IOException { final DockerCredentialHelperAuth dockerCredentialHelperAuth = DockerCredentialHelper.get(credsStore, registry); return dockerCredentialHelperAuth == null ? null : dockerCredentialHelperAuth.toRegistryAuth(); }
/** * Obtain auth using a credential helper. * @param credsStore The name of the credential helper * @param registry The registry for which we need to obtain auth * @return A RegistryAuth object with a username, password, and server. * @throws IOException This method attempts to execute * "docker-credential-" + credsStore + " get". If you don't have the * proper credential helper installed and on your path, this * will fail. */
Obtain auth using a credential helper
authWithCredentialHelper
{ "repo_name": "spotify/docker-client", "path": "src/main/java/com/spotify/docker/client/DockerConfigReader.java", "license": "apache-2.0", "size": 12102 }
[ "com.spotify.docker.client.messages.DockerCredentialHelperAuth", "com.spotify.docker.client.messages.RegistryAuth", "java.io.IOException" ]
import com.spotify.docker.client.messages.DockerCredentialHelperAuth; import com.spotify.docker.client.messages.RegistryAuth; import java.io.IOException;
import com.spotify.docker.client.messages.*; import java.io.*;
[ "com.spotify.docker", "java.io" ]
com.spotify.docker; java.io;
1,588,348
// This is also used by LookAndFeelCommand public static LookAndFeelInfo[] getAllLAFs() { UIManager.LookAndFeelInfo lafs[] = UIManager.getInstalledLookAndFeels(); int i = lafs.length; UIManager.LookAndFeelInfo lafsAll[] = new UIManager.LookAndFeelInfo[i+2]; System.arraycopy(lafs, 0, lafsAll, 0, i); lafsAll[i++]=new UIManager.LookAndFeelInfo(CROSS_PLATFORM_LAF,UIManager.getCrossPlatformLookAndFeelClassName()); lafsAll[i++]=new UIManager.LookAndFeelInfo(SYSTEM_LAF,UIManager.getSystemLookAndFeelClassName()); return lafsAll; }
static LookAndFeelInfo[] function() { UIManager.LookAndFeelInfo lafs[] = UIManager.getInstalledLookAndFeels(); int i = lafs.length; UIManager.LookAndFeelInfo lafsAll[] = new UIManager.LookAndFeelInfo[i+2]; System.arraycopy(lafs, 0, lafsAll, 0, i); lafsAll[i++]=new UIManager.LookAndFeelInfo(CROSS_PLATFORM_LAF,UIManager.getCrossPlatformLookAndFeelClassName()); lafsAll[i++]=new UIManager.LookAndFeelInfo(SYSTEM_LAF,UIManager.getSystemLookAndFeelClassName()); return lafsAll; }
/** * Get a list of all installed LAFs plus CrossPlatform and System. */
Get a list of all installed LAFs plus CrossPlatform and System
getAllLAFs
{ "repo_name": "liuqingtao/jmeter", "path": "src/core/org/apache/jmeter/gui/util/JMeterMenuBar.java", "license": "apache-2.0", "size": 31828 }
[ "javax.swing.UIManager" ]
import javax.swing.UIManager;
import javax.swing.*;
[ "javax.swing" ]
javax.swing;
2,908,965
public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { try { processRequest(request, response); } catch (Exception ex) { Logger.getLogger(ReportGenerator.class.getName()).log(Level.SEVERE, null, ex); } }
void function(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { try { processRequest(request, response); } catch (Exception ex) { Logger.getLogger(ReportGenerator.class.getName()).log(Level.SEVERE, null, ex); } }
/** * Handles the HTTP <code>GET</code> method. * * @param request servlet request * @param response servlet response * @throws javax.servlet.ServletException if a servlet-specific error occurs * @throws java.io.IOException if an I/O error occurs */
Handles the HTTP <code>GET</code> method
doGet
{ "repo_name": "madhawa-gunasekara/carbon-commons", "path": "components/reporting/org.wso2.carbon.reporting.template.ui/src/main/java/org/wso2/carbon/reporting/template/ui/servlet/CompositeReportProcessor.java", "license": "apache-2.0", "size": 4934 }
[ "java.io.IOException", "java.util.logging.Level", "java.util.logging.Logger", "javax.servlet.ServletException", "javax.servlet.http.HttpServletRequest", "javax.servlet.http.HttpServletResponse" ]
import java.io.IOException; import java.util.logging.Level; import java.util.logging.Logger; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse;
import java.io.*; import java.util.logging.*; import javax.servlet.*; import javax.servlet.http.*;
[ "java.io", "java.util", "javax.servlet" ]
java.io; java.util; javax.servlet;
1,941,808
@Path("admin-events") @GET @NoCache @Produces(MediaType.APPLICATION_JSON) public List<AdminEvent> getEvents(@QueryParam("authRealm") String authRealm, @QueryParam("authClient") String authClient, @QueryParam("authUser") String authUser, @QueryParam("authIpAddress") String authIpAddress, @QueryParam("resourcePath") String resourcePath, @QueryParam("dateFrom") String dateFrom, @QueryParam("dateTo") String dateTo, @QueryParam("first") Integer firstResult, @QueryParam("max") Integer maxResults) { auth.init(RealmAuth.Resource.EVENTS).requireView(); EventStoreProvider eventStore = session.getProvider(EventStoreProvider.class); AdminEventQuery query = eventStore.createAdminQuery().realm(realm.getId());; if (authRealm != null) { query.authRealm(authRealm); } if (authClient != null) { query.authClient(authClient); } if (authUser != null) { query.authUser(authUser); } if (authIpAddress != null) { query.authIpAddress(authIpAddress); } if (resourcePath != null) { query.resourcePath(resourcePath); } List<String> operationTypes = uriInfo.getQueryParameters().get("operationTypes"); if (operationTypes != null) { OperationType[] t = new OperationType[operationTypes.size()]; for (int i = 0; i < t.length; i++) { t[i] = OperationType.valueOf(operationTypes.get(i)); } query.operation(t); } if(dateFrom != null) { SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd"); Date from = null; try { from = df.parse(dateFrom); } catch (ParseException e) { throw new BadRequestException("Invalid value for 'Date(From)', expected format is yyyy-MM-dd"); } query.fromTime(from); } if(dateTo != null) { SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd"); Date to = null; try { to = df.parse(dateTo); } catch (ParseException e) { throw new BadRequestException("Invalid value for 'Date(To)', expected format is yyyy-MM-dd"); } query.toTime(to); } if (firstResult != null) { query.firstResult(firstResult); } if (maxResults != null) { query.maxResults(maxResults); } return query.getResultList(); }
@Path(STR) @Produces(MediaType.APPLICATION_JSON) List<AdminEvent> function(@QueryParam(STR) String authRealm, @QueryParam(STR) String authClient, @QueryParam(STR) String authUser, @QueryParam(STR) String authIpAddress, @QueryParam(STR) String resourcePath, @QueryParam(STR) String dateFrom, @QueryParam(STR) String dateTo, @QueryParam("first") Integer firstResult, @QueryParam("max") Integer maxResults) { auth.init(RealmAuth.Resource.EVENTS).requireView(); EventStoreProvider eventStore = session.getProvider(EventStoreProvider.class); AdminEventQuery query = eventStore.createAdminQuery().realm(realm.getId());; if (authRealm != null) { query.authRealm(authRealm); } if (authClient != null) { query.authClient(authClient); } if (authUser != null) { query.authUser(authUser); } if (authIpAddress != null) { query.authIpAddress(authIpAddress); } if (resourcePath != null) { query.resourcePath(resourcePath); } List<String> operationTypes = uriInfo.getQueryParameters().get(STR); if (operationTypes != null) { OperationType[] t = new OperationType[operationTypes.size()]; for (int i = 0; i < t.length; i++) { t[i] = OperationType.valueOf(operationTypes.get(i)); } query.operation(t); } if(dateFrom != null) { SimpleDateFormat df = new SimpleDateFormat(STR); Date from = null; try { from = df.parse(dateFrom); } catch (ParseException e) { throw new BadRequestException(STR); } query.fromTime(from); } if(dateTo != null) { SimpleDateFormat df = new SimpleDateFormat(STR); Date to = null; try { to = df.parse(dateTo); } catch (ParseException e) { throw new BadRequestException(STR); } query.toTime(to); } if (firstResult != null) { query.firstResult(firstResult); } if (maxResults != null) { query.maxResults(maxResults); } return query.getResultList(); }
/** * Query admin events. Returns all admin events, or will query based on URL query parameters listed here * * @param authRealm * @param authClient * @param authUser user id * @param authIpAddress * @param resourcePath * @param dateTo * @param dateFrom * @param resourcePath * @param firstResult * @param maxResults * @return */
Query admin events. Returns all admin events, or will query based on URL query parameters listed here
getEvents
{ "repo_name": "matzew/keycloak", "path": "services/src/main/java/org/keycloak/services/resources/admin/RealmAdminResource.java", "license": "apache-2.0", "size": 19746 }
[ "java.text.ParseException", "java.text.SimpleDateFormat", "java.util.Date", "java.util.List", "javax.ws.rs.BadRequestException", "javax.ws.rs.Path", "javax.ws.rs.Produces", "javax.ws.rs.QueryParam", "javax.ws.rs.core.MediaType", "org.keycloak.events.EventStoreProvider", "org.keycloak.events.admin.AdminEvent", "org.keycloak.events.admin.AdminEventQuery", "org.keycloak.events.admin.OperationType" ]
import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Date; import java.util.List; import javax.ws.rs.BadRequestException; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.MediaType; import org.keycloak.events.EventStoreProvider; import org.keycloak.events.admin.AdminEvent; import org.keycloak.events.admin.AdminEventQuery; import org.keycloak.events.admin.OperationType;
import java.text.*; import java.util.*; import javax.ws.rs.*; import javax.ws.rs.core.*; import org.keycloak.events.*; import org.keycloak.events.admin.*;
[ "java.text", "java.util", "javax.ws", "org.keycloak.events" ]
java.text; java.util; javax.ws; org.keycloak.events;
604,373
@Test public void presentValueExplicit() { final MultipleCurrencyAmount pv = METHOD_HW.presentValue(SWAPTION_LONG_PAYER, HW_MULTICURVES); final double timeToExpiry = SWAPTION_LONG_PAYER.getTimeToExpiry(); final AnnuityPaymentFixed cfe = CFEC.visitSwap(SWAPTION_LONG_PAYER.getUnderlyingSwap(), MULTICURVES); final int numberOfPayments = cfe.getNumberOfPayments(); final double[] alpha = new double[numberOfPayments]; final double[] disccf = new double[numberOfPayments]; for (int loopcf = 0; loopcf < numberOfPayments; loopcf++) { alpha[loopcf] = MODEL.alpha(HW_PARAMETERS, 0.0, timeToExpiry, timeToExpiry, cfe.getNthPayment(loopcf).getPaymentTime()); disccf[loopcf] = MULTICURVES.getDiscountFactor(EUR, cfe.getNthPayment(loopcf).getPaymentTime()) * cfe.getNthPayment(loopcf).getAmount(); } final double kappa = MODEL.kappa(disccf, alpha); double pvExpected = 0.0; for (int loopcf = 0; loopcf < numberOfPayments; loopcf++) { pvExpected += disccf[loopcf] * NORMAL.getCDF(-kappa - alpha[loopcf]); } assertEquals("Swaption physical - Hull-White - present value", pvExpected, pv.getAmount(EUR), 1E-2); final MultipleCurrencyAmount pv2 = METHOD_HW.presentValue(SWAPTION_LONG_PAYER, cfe, HW_MULTICURVES); assertEquals("Swaption physical - Hull-White - present value", pv, pv2); }
void function() { final MultipleCurrencyAmount pv = METHOD_HW.presentValue(SWAPTION_LONG_PAYER, HW_MULTICURVES); final double timeToExpiry = SWAPTION_LONG_PAYER.getTimeToExpiry(); final AnnuityPaymentFixed cfe = CFEC.visitSwap(SWAPTION_LONG_PAYER.getUnderlyingSwap(), MULTICURVES); final int numberOfPayments = cfe.getNumberOfPayments(); final double[] alpha = new double[numberOfPayments]; final double[] disccf = new double[numberOfPayments]; for (int loopcf = 0; loopcf < numberOfPayments; loopcf++) { alpha[loopcf] = MODEL.alpha(HW_PARAMETERS, 0.0, timeToExpiry, timeToExpiry, cfe.getNthPayment(loopcf).getPaymentTime()); disccf[loopcf] = MULTICURVES.getDiscountFactor(EUR, cfe.getNthPayment(loopcf).getPaymentTime()) * cfe.getNthPayment(loopcf).getAmount(); } final double kappa = MODEL.kappa(disccf, alpha); double pvExpected = 0.0; for (int loopcf = 0; loopcf < numberOfPayments; loopcf++) { pvExpected += disccf[loopcf] * NORMAL.getCDF(-kappa - alpha[loopcf]); } assertEquals(STR, pvExpected, pv.getAmount(EUR), 1E-2); final MultipleCurrencyAmount pv2 = METHOD_HW.presentValue(SWAPTION_LONG_PAYER, cfe, HW_MULTICURVES); assertEquals(STR, pv, pv2); }
/** * Test the present value. */
Test the present value
presentValueExplicit
{ "repo_name": "McLeodMoores/starling", "path": "projects/analytics/src/test/java/com/opengamma/analytics/financial/interestrate/swaption/provider/SwaptionPhysicalFixedIborHullWhiteMethodTest.java", "license": "apache-2.0", "size": 37667 }
[ "com.opengamma.analytics.financial.interestrate.annuity.derivative.AnnuityPaymentFixed", "com.opengamma.util.money.MultipleCurrencyAmount", "org.testng.AssertJUnit" ]
import com.opengamma.analytics.financial.interestrate.annuity.derivative.AnnuityPaymentFixed; import com.opengamma.util.money.MultipleCurrencyAmount; import org.testng.AssertJUnit;
import com.opengamma.analytics.financial.interestrate.annuity.derivative.*; import com.opengamma.util.money.*; import org.testng.*;
[ "com.opengamma.analytics", "com.opengamma.util", "org.testng" ]
com.opengamma.analytics; com.opengamma.util; org.testng;
2,485,649
protected void validateName(GivenName givenName) throws ValidationException { if (DatatypeHelper.isEmpty(givenName.getName())) { throw new ValidationException("Name required"); } }
void function(GivenName givenName) throws ValidationException { if (DatatypeHelper.isEmpty(givenName.getName())) { throw new ValidationException(STR); } }
/** * Checks that Name is present. * * @param givenName * @throws ValidationException */
Checks that Name is present
validateName
{ "repo_name": "Safewhere/kombit-web-java", "path": "kombit-opensaml-2.5.1/src/org/opensaml/saml2/metadata/validator/GivenNameSchemaValidator.java", "license": "mit", "size": 1809 }
[ "org.opensaml.saml2.metadata.GivenName", "org.opensaml.xml.util.DatatypeHelper", "org.opensaml.xml.validation.ValidationException" ]
import org.opensaml.saml2.metadata.GivenName; import org.opensaml.xml.util.DatatypeHelper; import org.opensaml.xml.validation.ValidationException;
import org.opensaml.saml2.metadata.*; import org.opensaml.xml.util.*; import org.opensaml.xml.validation.*;
[ "org.opensaml.saml2", "org.opensaml.xml" ]
org.opensaml.saml2; org.opensaml.xml;
1,258,426
public static List<Class<?>> loadSPCClassesFromDirectory(File directory, String parent) { if(directory.toString().indexOf("sijobe") == -1) { System.out.println("Nope: " + directory); return new Vector<Class<?>>(); } Vector<Class<?>> classes = new Vector<Class<?>>(); try { File files[] = directory.listFiles(); for (File file : files) { try { if (file.isFile()) { classes.add(loadClass(file.getName(),parent)); } else { classes.addAll(loadSPCClassesFromDirectory(file,parent + file.getName() + "/")); } } catch (Exception e) { e.printStackTrace(); } } } catch (Exception e) { e.printStackTrace(); } return classes; }
static List<Class<?>> function(File directory, String parent) { if(directory.toString().indexOf(STR) == -1) { System.out.println(STR + directory); return new Vector<Class<?>>(); } Vector<Class<?>> classes = new Vector<Class<?>>(); try { File files[] = directory.listFiles(); for (File file : files) { try { if (file.isFile()) { classes.add(loadClass(file.getName(),parent)); } else { classes.addAll(loadSPCClassesFromDirectory(file,parent + file.getName() + "/")); } } catch (Exception e) { e.printStackTrace(); } } } catch (Exception e) { e.printStackTrace(); } return classes; }
/** * Loads all of SPC's classes within the specified directory * * @param directory - The directory to load all of the classes from * @param parent - The path of the parent directory(s). This parent is used * as the package name of the classes that are loaded. * @return A Vector containing all of the loaded classes is returned */
Loads all of SPC's classes within the specified directory
loadSPCClassesFromDirectory
{ "repo_name": "simo415/spc", "path": "src/com/sijobe/spc/util/DynamicClassLoader.java", "license": "lgpl-3.0", "size": 10763 }
[ "java.io.File", "java.util.List", "java.util.Vector" ]
import java.io.File; import java.util.List; import java.util.Vector;
import java.io.*; import java.util.*;
[ "java.io", "java.util" ]
java.io; java.util;
1,869,508
public final List<Bookmark> getBookmarks() { return Collections.unmodifiableList(bookmarks); }
final List<Bookmark> function() { return Collections.unmodifiableList(bookmarks); }
/** * Gets the bookmarks. * * @return The bookmarks. * @see ChatRoomBookmark * @see WebPageBookmark */
Gets the bookmarks
getBookmarks
{ "repo_name": "jeozey/XmppServerTester", "path": "xmpp-extensions/src/main/java/rocks/xmpp/extensions/bookmarks/model/BookmarkStorage.java", "license": "mit", "size": 3318 }
[ "java.util.Collections", "java.util.List" ]
import java.util.Collections; import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
2,742,688
private static void removeBoxFileAndParentFiles(final BoxData boxData, final String baseBoxPath, final String pathSuffix, final List<String> removedPaths, final int maxLevel) { final File boxFile = boxData.getAbsoluteLevelFile(baseBoxPath, pathSuffix); if (boxFile.delete()) { removedPaths.add("\n" + boxFile); // try to remove all related parents since they are potentially obsolete BoxData parentBox = boxData.getParentBoxData(); while (parentBox.getLevel() <= maxLevel) { final File parentBoxFile = parentBox.getAbsoluteLevelFile(baseBoxPath, pathSuffix); boolean foundAndDeletedParentFile = false; if (parentBoxFile.exists()) { foundAndDeletedParentFile = parentBoxFile.delete(); } if (foundAndDeletedParentFile) { removedPaths.add("\n" + parentBoxFile); parentBox = parentBox.getParentBoxData(); } else { break; } } } else { LOG.warn("prior run box {} already removed (hopefully by a parallel process)", boxFile); } } private static final Logger LOG = LoggerFactory.getLogger(BoxClient.class);
static void function(final BoxData boxData, final String baseBoxPath, final String pathSuffix, final List<String> removedPaths, final int maxLevel) { final File boxFile = boxData.getAbsoluteLevelFile(baseBoxPath, pathSuffix); if (boxFile.delete()) { removedPaths.add("\n" + boxFile); BoxData parentBox = boxData.getParentBoxData(); while (parentBox.getLevel() <= maxLevel) { final File parentBoxFile = parentBox.getAbsoluteLevelFile(baseBoxPath, pathSuffix); boolean foundAndDeletedParentFile = false; if (parentBoxFile.exists()) { foundAndDeletedParentFile = parentBoxFile.delete(); } if (foundAndDeletedParentFile) { removedPaths.add("\n" + parentBoxFile); parentBox = parentBox.getParentBoxData(); } else { break; } } } else { LOG.warn(STR, boxFile); } } private static final Logger LOG = LoggerFactory.getLogger(BoxClient.class);
/** * Removes the image file for the specified box from disk * as well as any existing image files for the box's parents. * * @param boxData box to remove. * * @param baseBoxPath the base path for all boxes being rendered * (e.g. /nrs/spc/rendered_boxes/spc/aibs_mm2_data/1024x1024). * * @param pathSuffix the suffix (format extension including '.') to append to each box path (e.g. '.jpg'). * * @param removedPaths list of already removed paths. * Any paths removed by this method will be added to the list. * * @param maxLevel The maximum level parent to remove. */
Removes the image file for the specified box from disk as well as any existing image files for the box's parents
removeBoxFileAndParentFiles
{ "repo_name": "fcollman/render", "path": "render-ws-spark-client/src/main/java/org/janelia/render/client/spark/betterbox/BoxClient.java", "license": "gpl-2.0", "size": 35641 }
[ "java.io.File", "java.util.List", "org.janelia.alignment.betterbox.BoxData", "org.slf4j.Logger", "org.slf4j.LoggerFactory" ]
import java.io.File; import java.util.List; import org.janelia.alignment.betterbox.BoxData; import org.slf4j.Logger; import org.slf4j.LoggerFactory;
import java.io.*; import java.util.*; import org.janelia.alignment.betterbox.*; import org.slf4j.*;
[ "java.io", "java.util", "org.janelia.alignment", "org.slf4j" ]
java.io; java.util; org.janelia.alignment; org.slf4j;
1,812,182
public static CodeCoverageMetrics convertCobertura(CoverageResult result) { if (result == null) { return null; } float packagesCoverage = getCoveragePercentage(result, CoverageMetric.PACKAGES); float filesCoverage = getCoveragePercentage(result, CoverageMetric.FILES); float classesCoverage = getCoveragePercentage(result, CoverageMetric.CLASSES); float methodCoverage = getCoveragePercentage(result, CoverageMetric.METHOD); float lineCoverage = getCoveragePercentage(result, CoverageMetric.LINE); float conditionalCoverage = getCoveragePercentage(result, CoverageMetric.CONDITIONAL); return new CodeCoverageMetrics( packagesCoverage, filesCoverage, classesCoverage, methodCoverage, lineCoverage, conditionalCoverage ); }
static CodeCoverageMetrics function(CoverageResult result) { if (result == null) { return null; } float packagesCoverage = getCoveragePercentage(result, CoverageMetric.PACKAGES); float filesCoverage = getCoveragePercentage(result, CoverageMetric.FILES); float classesCoverage = getCoveragePercentage(result, CoverageMetric.CLASSES); float methodCoverage = getCoveragePercentage(result, CoverageMetric.METHOD); float lineCoverage = getCoveragePercentage(result, CoverageMetric.LINE); float conditionalCoverage = getCoveragePercentage(result, CoverageMetric.CONDITIONAL); return new CodeCoverageMetrics( packagesCoverage, filesCoverage, classesCoverage, methodCoverage, lineCoverage, conditionalCoverage ); }
/** * Convert Cobertura results to an internal CodeCoverageMetrics representation * * @param result The cobertura report * @return The internal representation of coverage */
Convert Cobertura results to an internal CodeCoverageMetrics representation
convertCobertura
{ "repo_name": "jenkinsci/phabricator-plugin", "path": "src/main/java/com/uber/jenkins/phabricator/coverage/CoberturaCoverageProvider.java", "license": "mit", "size": 8180 }
[ "hudson.plugins.cobertura.targets.CoverageMetric", "hudson.plugins.cobertura.targets.CoverageResult" ]
import hudson.plugins.cobertura.targets.CoverageMetric; import hudson.plugins.cobertura.targets.CoverageResult;
import hudson.plugins.cobertura.targets.*;
[ "hudson.plugins.cobertura" ]
hudson.plugins.cobertura;
1,610,835
private static void destroyGameTimer(EventData eventData) { try { if(((Event) eventData.getClz().newInstance()).isGameTimerEnabled() && eventData.getGameTimer() != null) { for(Player player : EventFunctions.getAllPlayers(eventData)) { eventData.getGameTimerTextdraw().hide(player); PlayerData playerData = EventSystem.getInstance().getPlayerLifecycleHolder().getObject(player, PlayerData.class); if(playerData.getColor() != null) player.setColor(playerData.getColor()); } eventData.getGameTimer().stop(); eventData.getGameTimer().destroy(); eventData.setGameTimer(null); eventData.setGameTime(-1); } } catch (InstantiationException | IllegalAccessException e) { System.out.println(e); e.printStackTrace(); } }
static void function(EventData eventData) { try { if(((Event) eventData.getClz().newInstance()).isGameTimerEnabled() && eventData.getGameTimer() != null) { for(Player player : EventFunctions.getAllPlayers(eventData)) { eventData.getGameTimerTextdraw().hide(player); PlayerData playerData = EventSystem.getInstance().getPlayerLifecycleHolder().getObject(player, PlayerData.class); if(playerData.getColor() != null) player.setColor(playerData.getColor()); } eventData.getGameTimer().stop(); eventData.getGameTimer().destroy(); eventData.setGameTimer(null); eventData.setGameTime(-1); } } catch (InstantiationException IllegalAccessException e) { System.out.println(e); e.printStackTrace(); } }
/** * stop the game timer * @param eventData the event */
stop the game timer
destroyGameTimer
{ "repo_name": "Alf21/event-system", "path": "src/main/java/me/alf21/eventsystem/EventBase.java", "license": "gpl-3.0", "size": 88807 }
[ "net.gtaun.shoebill.object.Player" ]
import net.gtaun.shoebill.object.Player;
import net.gtaun.shoebill.object.*;
[ "net.gtaun.shoebill" ]
net.gtaun.shoebill;
791,681
public ServiceFuture<List<MetricDefinitionInner>> listMetricDefinitionsAsync(String resourceGroupName, String accountName, String databaseRid, String collectionRid, final ServiceCallback<List<MetricDefinitionInner>> serviceCallback) { return ServiceFuture.fromResponse(listMetricDefinitionsWithServiceResponseAsync(resourceGroupName, accountName, databaseRid, collectionRid), serviceCallback); }
ServiceFuture<List<MetricDefinitionInner>> function(String resourceGroupName, String accountName, String databaseRid, String collectionRid, final ServiceCallback<List<MetricDefinitionInner>> serviceCallback) { return ServiceFuture.fromResponse(listMetricDefinitionsWithServiceResponseAsync(resourceGroupName, accountName, databaseRid, collectionRid), serviceCallback); }
/** * Retrieves metric definitions for the given collection. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param accountName Cosmos DB database account name. * @param databaseRid Cosmos DB database rid. * @param collectionRid Cosmos DB collection rid. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */
Retrieves metric definitions for the given collection
listMetricDefinitionsAsync
{ "repo_name": "selvasingh/azure-sdk-for-java", "path": "sdk/cosmos/mgmt-v2020_06_01_preview/src/main/java/com/microsoft/azure/management/cosmosdb/v2020_06_01_preview/implementation/CollectionsInner.java", "license": "mit", "size": 30736 }
[ "com.microsoft.rest.ServiceCallback", "com.microsoft.rest.ServiceFuture", "java.util.List" ]
import com.microsoft.rest.ServiceCallback; import com.microsoft.rest.ServiceFuture; import java.util.List;
import com.microsoft.rest.*; import java.util.*;
[ "com.microsoft.rest", "java.util" ]
com.microsoft.rest; java.util;
1,617,864
public Double visit(MetalFuture derivative, SimpleFutureDataBundle data) { ArgumentChecker.notNull(derivative, "derivative"); ArgumentChecker.notNull(data, "data"); return SIMPLE_FUTURE_CONVERTER.visitMetalFuture(derivative).accept(this, data); }
Double function(MetalFuture derivative, SimpleFutureDataBundle data) { ArgumentChecker.notNull(derivative, STR); ArgumentChecker.notNull(data, "data"); return SIMPLE_FUTURE_CONVERTER.visitMetalFuture(derivative).accept(this, data); }
/** * Main visitor entry point, takes a derivative and market data and returns the current value * * @param derivative the derivative to price * @param data market data * @return the current value */
Main visitor entry point, takes a derivative and market data and returns the current value
visit
{ "repo_name": "jeorme/OG-Platform", "path": "projects/OG-Analytics/src/main/java/com/opengamma/analytics/financial/commodity/calculator/CommodityFuturePresentValueCalculator.java", "license": "apache-2.0", "size": 4213 }
[ "com.opengamma.analytics.financial.commodity.derivative.MetalFuture", "com.opengamma.analytics.financial.simpleinstruments.pricing.SimpleFutureDataBundle", "com.opengamma.util.ArgumentChecker" ]
import com.opengamma.analytics.financial.commodity.derivative.MetalFuture; import com.opengamma.analytics.financial.simpleinstruments.pricing.SimpleFutureDataBundle; import com.opengamma.util.ArgumentChecker;
import com.opengamma.analytics.financial.commodity.derivative.*; import com.opengamma.analytics.financial.simpleinstruments.pricing.*; import com.opengamma.util.*;
[ "com.opengamma.analytics", "com.opengamma.util" ]
com.opengamma.analytics; com.opengamma.util;
1,598,914
public boolean unSubscribeCalendar(com.idega.user.data.User user, String calendarPath); /** * <p>Searches Bedework system for calendars, where given {@link User} is creator.</p> * @param userid {@link User#getPrimaryKey()}; * @return {@link Collection} of {@link BwCalendar}s or {@link Collections#EMPTY_SET}
boolean function(com.idega.user.data.User user, String calendarPath); /** * <p>Searches Bedework system for calendars, where given {@link User} is creator.</p> * @param userid {@link User#getPrimaryKey()}; * @return {@link Collection} of {@link BwCalendar}s or {@link Collections#EMPTY_SET}
/** * Sets that user will not get data from this calendar. * @param user user that will not get data from Calendar * @param calendarPath path to calendar that will not send data for user */
Sets that user will not get data from this calendar
unSubscribeCalendar
{ "repo_name": "idega/com.idega.block.cal", "path": "src/java/com/idega/block/cal/business/CalendarManagementService.java", "license": "gpl-3.0", "size": 12430 }
[ "com.idega.core.user.data.User", "java.util.Collection", "java.util.Collections" ]
import com.idega.core.user.data.User; import java.util.Collection; import java.util.Collections;
import com.idega.core.user.data.*; import java.util.*;
[ "com.idega.core", "java.util" ]
com.idega.core; java.util;
872,270
@Secured({ "IS_AUTHENTICATED_ANONYMOUSLY", "AFTER_ACL_VALUE_OBJECT_COLLECTION_READ" }) Collection<? extends DatabaseBackedGeneSetValueObject> loadValueObjects( Collection<Long> ids ); /** * Security filtering done at DAO level see {@link ubic.gemma.persistence.service.genome.gene.GeneSetDao}
@Secured({ STR, STR }) Collection<? extends DatabaseBackedGeneSetValueObject> loadValueObjects( Collection<Long> ids ); /** * Security filtering done at DAO level see {@link ubic.gemma.persistence.service.genome.gene.GeneSetDao}
/** * Ids of member genes will be filled in * * @param ids ids * @return gene set value object */
Ids of member genes will be filled in
loadValueObjects
{ "repo_name": "ppavlidis/Gemma", "path": "gemma-core/src/main/java/ubic/gemma/core/genome/gene/service/GeneSetService.java", "license": "apache-2.0", "size": 13094 }
[ "java.util.Collection", "org.springframework.security.access.annotation.Secured" ]
import java.util.Collection; import org.springframework.security.access.annotation.Secured;
import java.util.*; import org.springframework.security.access.annotation.*;
[ "java.util", "org.springframework.security" ]
java.util; org.springframework.security;
1,708,870
public Installer readConfig(File fileRoot, String fileName) throws IOException, ConfigurationException { installer.getResultContainer().setInstallRoot(fileRoot); File config = new File(fileRoot, fileName); if(!config.exists()){ // passed in incorrectly on the command line or bad installer throw new IOException(); } InputSource xmlInp = new InputSource(new FileInputStream(config)); readConfig(xmlInp); return installer; }
Installer function(File fileRoot, String fileName) throws IOException, ConfigurationException { installer.getResultContainer().setInstallRoot(fileRoot); File config = new File(fileRoot, fileName); if(!config.exists()){ throw new IOException(); } InputSource xmlInp = new InputSource(new FileInputStream(config)); readConfig(xmlInp); return installer; }
/** * Currently read the config using any available XML parser * This method reads the config from the file system * @param fileRoot The directory where the config file is stored * @param the name of the configuration file (usually antinstall-config.xml) * @return Installer */
Currently read the config using any available XML parser This method reads the config from the file system
readConfig
{ "repo_name": "neoautus/lucidj", "path": "extras/AntInstaller/AntInstaller-beta0.8/src/org/tp23/antinstaller/runtime/exe/LoadConfigFilter.java", "license": "apache-2.0", "size": 17145 }
[ "java.io.File", "java.io.FileInputStream", "java.io.IOException", "org.tp23.antinstaller.Installer", "org.tp23.antinstaller.runtime.ConfigurationException", "org.xml.sax.InputSource" ]
import java.io.File; import java.io.FileInputStream; import java.io.IOException; import org.tp23.antinstaller.Installer; import org.tp23.antinstaller.runtime.ConfigurationException; import org.xml.sax.InputSource;
import java.io.*; import org.tp23.antinstaller.*; import org.tp23.antinstaller.runtime.*; import org.xml.sax.*;
[ "java.io", "org.tp23.antinstaller", "org.xml.sax" ]
java.io; org.tp23.antinstaller; org.xml.sax;
2,460,438
public void testWifiConnected() { Intent intent = new Intent(ConnectivityManager.CONNECTIVITY_ACTION); mMockContext.setMockConnectivityManager(ConnectivityManager.TYPE_WIFI, true); when(mMockPreferences.getBoolean(ReplicationService.class.getName() + ".wasOnWifi", false)).thenReturn(false); when(mMockPreferences.getBoolean(ReplicationService.class.getName() + "" + ".replicationsPending", true)).thenReturn(true); mMockPreferencesEditor = mock(SharedPreferences.Editor.class); when(mMockPreferences.edit()).thenReturn(mMockPreferencesEditor); mReceiver.onReceive(mMockContext, intent); verify(mMockPreferencesEditor, times(1)).putBoolean(ReplicationService.class.getName() + ".wasOnWifi", true); assertEquals(1, mMockContext.getIntentsReceived().size()); Intent receivedIntent = mMockContext.getIntentsReceived().get(0); assertEquals(ReplicationService.class.getName(), receivedIntent.getComponent().getClassName()); assertNull(receivedIntent.getAction()); assertEquals(PeriodicReplicationService.COMMAND_START_REPLICATION, receivedIntent.getIntExtra(ReplicationService.EXTRA_COMMAND, ReplicationService.COMMAND_NONE)); }
void function() { Intent intent = new Intent(ConnectivityManager.CONNECTIVITY_ACTION); mMockContext.setMockConnectivityManager(ConnectivityManager.TYPE_WIFI, true); when(mMockPreferences.getBoolean(ReplicationService.class.getName() + STR, false)).thenReturn(false); when(mMockPreferences.getBoolean(ReplicationService.class.getName() + STR.replicationsPending", true)).thenReturn(true); mMockPreferencesEditor = mock(SharedPreferences.Editor.class); when(mMockPreferences.edit()).thenReturn(mMockPreferencesEditor); mReceiver.onReceive(mMockContext, intent); verify(mMockPreferencesEditor, times(1)).putBoolean(ReplicationService.class.getName() + STR, true); assertEquals(1, mMockContext.getIntentsReceived().size()); Intent receivedIntent = mMockContext.getIntentsReceived().get(0); assertEquals(ReplicationService.class.getName(), receivedIntent.getComponent().getClassName()); assertNull(receivedIntent.getAction()); assertEquals(PeriodicReplicationService.COMMAND_START_REPLICATION, receivedIntent.getIntExtra(ReplicationService.EXTRA_COMMAND, ReplicationService.COMMAND_NONE)); }
/** * Check that when {@link WifiPeriodicReplicationReceiver} receives * {@link ConnectivityManager#CONNECTIVITY_ACTION} and WiFi is connected and there is a pending * replication an {@link Intent} is sent out to start the Service * {@link ReplicationService} associated with * {@link WifiPeriodicReplicationReceiver} containing the extra * {@link ReplicationService#EXTRA_COMMAND} with the value * {@link PeriodicReplicationService#COMMAND_START_REPLICATION}. */
Check that when <code>WifiPeriodicReplicationReceiver</code> receives <code>ConnectivityManager#CONNECTIVITY_ACTION</code> and WiFi is connected and there is a pending replication an <code>Intent</code> is sent out to start the Service <code>ReplicationService</code> associated with <code>WifiPeriodicReplicationReceiver</code> containing the extra <code>ReplicationService#EXTRA_COMMAND</code> with the value <code>PeriodicReplicationService#COMMAND_START_REPLICATION</code>
testWifiConnected
{ "repo_name": "cloudant/sync-android", "path": "cloudant-sync-datastore-android/src/test/java/com/cloudant/sync/replication/WifiPeriodicReplicationReceiverTest.java", "license": "apache-2.0", "size": 13717 }
[ "android.content.Intent", "android.content.SharedPreferences", "android.net.ConnectivityManager", "org.mockito.Mockito" ]
import android.content.Intent; import android.content.SharedPreferences; import android.net.ConnectivityManager; import org.mockito.Mockito;
import android.content.*; import android.net.*; import org.mockito.*;
[ "android.content", "android.net", "org.mockito" ]
android.content; android.net; org.mockito;
1,812,996
public EList<GmlMark> getGmlMarks() { if (gmlMarks == null) { gmlMarks = new BasicInternalEList<GmlMark>(GmlMark.class); } return gmlMarks; }
EList<GmlMark> function() { if (gmlMarks == null) { gmlMarks = new BasicInternalEList<GmlMark>(GmlMark.class); } return gmlMarks; }
/** * Returns the value of the '<em><b>Gml Marks</b></em>' reference list. * The list contents are of type {@link CIM15.IEC61970.Informative.InfGMLSupport.GmlMark}. * It is bidirectional and its opposite is '{@link CIM15.IEC61970.Informative.InfGMLSupport.GmlMark#getGmlStrokes <em>Gml Strokes</em>}'. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Gml Marks</em>' reference list isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>Gml Marks</em>' reference list. * @see CIM15.IEC61970.Informative.InfGMLSupport.GmlMark#getGmlStrokes * @generated */
Returns the value of the 'Gml Marks' reference list. The list contents are of type <code>CIM15.IEC61970.Informative.InfGMLSupport.GmlMark</code>. It is bidirectional and its opposite is '<code>CIM15.IEC61970.Informative.InfGMLSupport.GmlMark#getGmlStrokes Gml Strokes</code>'. If the meaning of the 'Gml Marks' reference list isn't clear, there really should be more of a description here...
getGmlMarks
{ "repo_name": "SES-fortiss/SmartGridCoSimulation", "path": "core/cim15/src/CIM15/IEC61970/Informative/InfGMLSupport/GmlStroke.java", "license": "apache-2.0", "size": 35301 }
[ "org.eclipse.emf.common.util.EList", "org.eclipse.emf.ecore.util.BasicInternalEList" ]
import org.eclipse.emf.common.util.EList; import org.eclipse.emf.ecore.util.BasicInternalEList;
import org.eclipse.emf.common.util.*; import org.eclipse.emf.ecore.util.*;
[ "org.eclipse.emf" ]
org.eclipse.emf;
1,463,537
private void format(StorageDirectory bpSdir, NamespaceInfo nsInfo) throws IOException { LOG.info("Formatting block pool " + blockpoolID + " directory " + bpSdir.getCurrentDir()); bpSdir.clearDirectory(); // create directory this.layoutVersion = HdfsConstants.DATANODE_LAYOUT_VERSION; this.cTime = nsInfo.getCTime(); this.namespaceID = nsInfo.getNamespaceID(); this.blockpoolID = nsInfo.getBlockPoolID(); writeProperties(bpSdir); }
void function(StorageDirectory bpSdir, NamespaceInfo nsInfo) throws IOException { LOG.info(STR + blockpoolID + STR + bpSdir.getCurrentDir()); bpSdir.clearDirectory(); this.layoutVersion = HdfsConstants.DATANODE_LAYOUT_VERSION; this.cTime = nsInfo.getCTime(); this.namespaceID = nsInfo.getNamespaceID(); this.blockpoolID = nsInfo.getBlockPoolID(); writeProperties(bpSdir); }
/** * Format a block pool slice storage. * @param bpSdir the block pool storage * @param nsInfo the name space info * @throws IOException Signals that an I/O exception has occurred. */
Format a block pool slice storage
format
{ "repo_name": "fyqls/hadoop-2.4.0", "path": "hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockPoolSliceStorage.java", "license": "apache-2.0", "size": 23550 }
[ "java.io.IOException", "org.apache.hadoop.hdfs.protocol.HdfsConstants", "org.apache.hadoop.hdfs.server.protocol.NamespaceInfo" ]
import java.io.IOException; import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo;
import java.io.*; import org.apache.hadoop.hdfs.protocol.*; import org.apache.hadoop.hdfs.server.protocol.*;
[ "java.io", "org.apache.hadoop" ]
java.io; org.apache.hadoop;
2,298,532
public void destroy() { context.getManagementStrategy().removeEventNotifier(eventNotifier); try { ServiceHelper.stopService(eventNotifier); } catch (Exception e) { throw RuntimeCamelException.wrapRuntimeCamelException(e); } created = false; }
void function() { context.getManagementStrategy().removeEventNotifier(eventNotifier); try { ServiceHelper.stopService(eventNotifier); } catch (Exception e) { throw RuntimeCamelException.wrapRuntimeCamelException(e); } created = false; }
/** * De-registers this builder from its {@link CamelContext}. * <p/> * Once destroyed, this instance will not function again. */
De-registers this builder from its <code>CamelContext</code>. Once destroyed, this instance will not function again
destroy
{ "repo_name": "kevinearls/camel", "path": "camel-core/src/main/java/org/apache/camel/builder/NotifyBuilder.java", "license": "apache-2.0", "size": 54739 }
[ "org.apache.camel.RuntimeCamelException", "org.apache.camel.support.ServiceHelper" ]
import org.apache.camel.RuntimeCamelException; import org.apache.camel.support.ServiceHelper;
import org.apache.camel.*; import org.apache.camel.support.*;
[ "org.apache.camel" ]
org.apache.camel;
2,839,005
void deleteSnapshot(SnapshotId snapshotId);
void deleteSnapshot(SnapshotId snapshotId);
/** * Deletes snapshot * * @param snapshotId snapshot id */
Deletes snapshot
deleteSnapshot
{ "repo_name": "strahanjen/strahanjen.github.io", "path": "elasticsearch-master/core/src/main/java/org/elasticsearch/repositories/Repository.java", "license": "bsd-3-clause", "size": 8629 }
[ "org.elasticsearch.snapshots.SnapshotId" ]
import org.elasticsearch.snapshots.SnapshotId;
import org.elasticsearch.snapshots.*;
[ "org.elasticsearch.snapshots" ]
org.elasticsearch.snapshots;
1,380,586
void notifyProcessor(String rowLogId, String shardId) throws InterruptedException, KeeperException;
void notifyProcessor(String rowLogId, String shardId) throws InterruptedException, KeeperException;
/** * Notify the processor that a new message has been put on the rowlog. * <p>If the processor was in a wait mode, it will wake up and check the rowlog for new messages. */
Notify the processor that a new message has been put on the rowlog. If the processor was in a wait mode, it will wake up and check the rowlog for new messages
notifyProcessor
{ "repo_name": "ekoontz/Lily", "path": "global/rowlog/api/src/main/java/org/lilyproject/rowlog/api/RowLogConfigurationManager.java", "license": "apache-2.0", "size": 7767 }
[ "org.apache.zookeeper.KeeperException" ]
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.*;
[ "org.apache.zookeeper" ]
org.apache.zookeeper;
58,267
private String getPrettyDate(Date date) { DateFormat df = DateFormat.getDateTimeInstance(DateFormat.FULL, DateFormat.FULL); return df.format(date); }
String function(Date date) { DateFormat df = DateFormat.getDateTimeInstance(DateFormat.FULL, DateFormat.FULL); return df.format(date); }
/** * Returns a displable date. * * @param date date * @return date */
Returns a displable date
getPrettyDate
{ "repo_name": "jeking3/scheduling-server", "path": "Bookings/src/au/edu/uts/eng/remotelabs/schedserver/bookings/impl/BookingNotification.java", "license": "bsd-3-clause", "size": 7830 }
[ "java.text.DateFormat", "java.util.Date" ]
import java.text.DateFormat; import java.util.Date;
import java.text.*; import java.util.*;
[ "java.text", "java.util" ]
java.text; java.util;
630,093
List<I_C_ReferenceNo_Doc> retrieveDocAssignments(Properties ctx, int referenceNoTypeId, int tableId, int recordId, String trxName);
List<I_C_ReferenceNo_Doc> retrieveDocAssignments(Properties ctx, int referenceNoTypeId, int tableId, int recordId, String trxName);
/** * Retrieve all document assignments for given tableId/recordId * * @param ctx * @param referenceNoTypeId optional; if not specified, assignments for all types will be returned * @param tableId * @param recordId * @param trxName * @return assignments */
Retrieve all document assignments for given tableId/recordId
retrieveDocAssignments
{ "repo_name": "klst-com/metasfresh", "path": "de.metas.document.refid/src/main/java/de/metas/document/refid/api/IReferenceNoDAO.java", "license": "gpl-2.0", "size": 4830 }
[ "java.util.List", "java.util.Properties" ]
import java.util.List; import java.util.Properties;
import java.util.*;
[ "java.util" ]
java.util;
1,091,154
@Deprecated public static File getRapidMinerHome() throws IOException { return FileSystemService.getRapidMinerHome(); }
static File function() throws IOException { return FileSystemService.getRapidMinerHome(); }
/** * Deprecated method. Remains only for compatibility. Please use * {@link FileSystemService#getRapidMinerHome()} instead. */
Deprecated method. Remains only for compatibility. Please use <code>FileSystemService#getRapidMinerHome()</code> instead
getRapidMinerHome
{ "repo_name": "boob-sbcm/3838438", "path": "src/main/java/com/rapidminer/tools/ParameterService.java", "license": "agpl-3.0", "size": 22466 }
[ "java.io.File", "java.io.IOException" ]
import java.io.File; import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
1,225,942
public static DirectLaunchConfiguration parse( ClassLoader classLoader, String... arguments) throws LaunchConfigurationException { return parse(classLoader, Arrays.asList(arguments)); }
static DirectLaunchConfiguration function( ClassLoader classLoader, String... arguments) throws LaunchConfigurationException { return parse(classLoader, Arrays.asList(arguments)); }
/** * Analyzes launch arguments and returns {@link DirectLaunchConfiguration} from them. * @param classLoader the current class loader * @param arguments the launch arguments * @return the analyzed configuration * @throws LaunchConfigurationException if arguments are wrong */
Analyzes launch arguments and returns <code>DirectLaunchConfiguration</code> from them
parse
{ "repo_name": "asakusafw/asakusafw-compiler", "path": "dag/runtime/iterative/src/main/java/com/asakusafw/dag/iterative/DirectLaunchConfiguration.java", "license": "apache-2.0", "size": 6048 }
[ "com.asakusafw.bridge.launch.LaunchConfigurationException", "java.util.Arrays" ]
import com.asakusafw.bridge.launch.LaunchConfigurationException; import java.util.Arrays;
import com.asakusafw.bridge.launch.*; import java.util.*;
[ "com.asakusafw.bridge", "java.util" ]
com.asakusafw.bridge; java.util;
2,439,443
public boolean readBoundary() throws MalformedStreamException { byte[] marker = new byte[2]; boolean nextChunk = false; head += boundaryLength; try { marker[0] = readByte(); if (marker[0] == LF) { // Work around IE5 Mac bug with input type=image. // Because the boundary delimiter, not including the trailing // CRLF, must not appear within any file (RFC 2046, section // 5.1.1), we know the missing CR is due to a buggy browser // rather than a file containing something similar to a // boundary. return true; } marker[1] = readByte(); if (arrayequals(marker, STREAM_TERMINATOR, 2)) { nextChunk = false; } else if (arrayequals(marker, FIELD_SEPARATOR, 2)) { nextChunk = true; } else { throw new MalformedStreamException("Unexpected characters follow a boundary"); } } catch (IOException e) { throw new MalformedStreamException("Stream ended unexpectedly"); } return nextChunk; }
boolean function() throws MalformedStreamException { byte[] marker = new byte[2]; boolean nextChunk = false; head += boundaryLength; try { marker[0] = readByte(); if (marker[0] == LF) { return true; } marker[1] = readByte(); if (arrayequals(marker, STREAM_TERMINATOR, 2)) { nextChunk = false; } else if (arrayequals(marker, FIELD_SEPARATOR, 2)) { nextChunk = true; } else { throw new MalformedStreamException(STR); } } catch (IOException e) { throw new MalformedStreamException(STR); } return nextChunk; }
/** * Skips a <code>boundary</code> token, and checks whether more <code>encapsulations</code> * are contained in the stream. * * @return <code>true</code> if there are more encapsulations in this stream; * <code>false</code> otherwise. * * @exception MalformedStreamException * if the stream ends unexpectedly or fails to follow required syntax. */
Skips a <code>boundary</code> token, and checks whether more <code>encapsulations</code> are contained in the stream
readBoundary
{ "repo_name": "astubbs/wicket.get-portals2", "path": "wicket/src/main/java/org/apache/wicket/util/upload/MultipartFormInputStream.java", "license": "apache-2.0", "size": 22780 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
1,806,603
public void toPNML(FileChannel fc){ item.toPNML(fc); }
void function(FileChannel fc){ item.toPNML(fc); }
/** * Writes the PNML XML tree of this object into file channel. */
Writes the PNML XML tree of this object into file channel
toPNML
{ "repo_name": "lhillah/pnmlframework", "path": "pnmlFw-SNNet/src/fr/lip6/move/pnml/symmetricnet/hlcorestructure/hlapi/DeclarationHLAPI.java", "license": "epl-1.0", "size": 11245 }
[ "java.nio.channels.FileChannel" ]
import java.nio.channels.FileChannel;
import java.nio.channels.*;
[ "java.nio" ]
java.nio;
2,501,543
public void testUpdateLockInReadUncommitted() throws SQLException { getConnection().setTransactionIsolation(Connection.TRANSACTION_READ_UNCOMMITTED); Statement s = createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_UPDATABLE); ResultSet rs = s.executeQuery("select * from t1"); rs.next(); int firstKey = rs.getInt(1); println("T1: Read next Tuple:(" + rs.getInt(1) + "," + rs.getInt(2) + "," + rs.getInt(3) + ")"); Connection con2 = openDefaultConnection(); con2.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED); PreparedStatement ps2 = con2.prepareStatement ("delete from t1 where id=?"); ps2.setInt(1, firstKey); try { ps2.executeUpdate(); fail("expected record with id=" + firstKey + " to be locked"); } catch (SQLException e) { assertSQLState(LOCK_TIMEOUT_SQL_STATE, e); } ps2.close(); con2.rollback(); con2.close(); s.close(); }
void function() throws SQLException { getConnection().setTransactionIsolation(Connection.TRANSACTION_READ_UNCOMMITTED); Statement s = createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_UPDATABLE); ResultSet rs = s.executeQuery(STR); rs.next(); int firstKey = rs.getInt(1); println(STR + rs.getInt(1) + "," + rs.getInt(2) + "," + rs.getInt(3) + ")"); Connection con2 = openDefaultConnection(); con2.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED); PreparedStatement ps2 = con2.prepareStatement (STR); ps2.setInt(1, firstKey); try { ps2.executeUpdate(); fail(STR + firstKey + STR); } catch (SQLException e) { assertSQLState(LOCK_TIMEOUT_SQL_STATE, e); } ps2.close(); con2.rollback(); con2.close(); s.close(); }
/** * Test that Derby set updatelock on current row when using * read-uncommitted **/
Test that Derby set updatelock on current row when using read-uncommitted
testUpdateLockInReadUncommitted
{ "repo_name": "trejkaz/derby", "path": "java/testing/org/apache/derbyTesting/functionTests/tests/jdbcapi/ConcurrencyTest.java", "license": "apache-2.0", "size": 34448 }
[ "java.sql.Connection", "java.sql.PreparedStatement", "java.sql.ResultSet", "java.sql.SQLException", "java.sql.Statement" ]
import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement;
import java.sql.*;
[ "java.sql" ]
java.sql;
1,033,968
public static boolean isDone(CompletionStage<?> future) { return future.toCompletableFuture().isDone(); }
static boolean function(CompletionStage<?> future) { return future.toCompletableFuture().isDone(); }
/** * Indicates whether the future is done. * * <p>This utility is provided so consumers of futures need not even convert to {@link * CompletableFuture}, an interface that is only suitable for producers of futures. */
Indicates whether the future is done. This utility is provided so consumers of futures need not even convert to <code>CompletableFuture</code>, an interface that is only suitable for producers of futures
isDone
{ "repo_name": "RyanSkraba/beam", "path": "sdks/java/core/src/main/java/org/apache/beam/sdk/util/MoreFutures.java", "license": "apache-2.0", "size": 9243 }
[ "java.util.concurrent.CompletionStage" ]
import java.util.concurrent.CompletionStage;
import java.util.concurrent.*;
[ "java.util" ]
java.util;
2,616,060
String getLastUpdatedTimeOfEndpoint(String endpointId) throws APIMgtDAOException;
String getLastUpdatedTimeOfEndpoint(String endpointId) throws APIMgtDAOException;
/** * Retrieves the last updated time of the endpoint given its endpointId * * @param endpointId Id of the endpoint * @return last updated time * @throws APIMgtDAOException throws if any db level error occurred */
Retrieves the last updated time of the endpoint given its endpointId
getLastUpdatedTimeOfEndpoint
{ "repo_name": "sambaheerathan/carbon-apimgt", "path": "components/apimgt/org.wso2.carbon.apimgt.core/src/main/java/org/wso2/carbon/apimgt/core/dao/ApiDAO.java", "license": "apache-2.0", "size": 28726 }
[ "org.wso2.carbon.apimgt.core.exception.APIMgtDAOException" ]
import org.wso2.carbon.apimgt.core.exception.APIMgtDAOException;
import org.wso2.carbon.apimgt.core.exception.*;
[ "org.wso2.carbon" ]
org.wso2.carbon;
1,131,579
private MultiWrapDynaBean createBean(boolean withDynaBean) { params = new BasicBuilderParameters(); wrapBean = new WrappedBeanTestImpl(); Collection<Object> beans = new ArrayList<Object>(); beans.add(params); beans.add(wrapBean); if (withDynaBean) { wrapDynaBean = new LazyDynaBean(); wrapDynaBean.set(MAPPED_PROPERTY, "someKey", "somValue"); beans.add(wrapDynaBean); } return new MultiWrapDynaBean(beans); }
MultiWrapDynaBean function(boolean withDynaBean) { params = new BasicBuilderParameters(); wrapBean = new WrappedBeanTestImpl(); Collection<Object> beans = new ArrayList<Object>(); beans.add(params); beans.add(wrapBean); if (withDynaBean) { wrapDynaBean = new LazyDynaBean(); wrapDynaBean.set(MAPPED_PROPERTY, STR, STR); beans.add(wrapDynaBean); } return new MultiWrapDynaBean(beans); }
/** * Creates a new test object with a list of wrapped beans. * * @param withDynaBean a flag whether also a DynaBean should be added to the * wrapped beans * @return the test bean */
Creates a new test object with a list of wrapped beans
createBean
{ "repo_name": "mohanaraosv/commons-configuration", "path": "src/test/java/org/apache/commons/configuration2/builder/combined/TestMultiWrapDynaBean.java", "license": "apache-2.0", "size": 8976 }
[ "java.util.ArrayList", "java.util.Collection", "org.apache.commons.beanutils.LazyDynaBean", "org.apache.commons.configuration2.builder.BasicBuilderParameters" ]
import java.util.ArrayList; import java.util.Collection; import org.apache.commons.beanutils.LazyDynaBean; import org.apache.commons.configuration2.builder.BasicBuilderParameters;
import java.util.*; import org.apache.commons.beanutils.*; import org.apache.commons.configuration2.builder.*;
[ "java.util", "org.apache.commons" ]
java.util; org.apache.commons;
639,156
@Test void testVersions() throws IOException { // test default version try (PDDocument document = new PDDocument()) { // test default version assertEquals(1.4f, document.getVersion(), 0); assertEquals(1.4f, document.getDocument().getVersion(), 0); assertEquals("1.4", document.getDocumentCatalog().getVersion()); // force downgrading version (header) document.getDocument().setVersion(1.3f); document.getDocumentCatalog().setVersion(null); // test new version (header) assertEquals(1.3f, document.getVersion(), 0); assertEquals(1.3f, document.getDocument().getVersion(), 0); assertNull(document.getDocumentCatalog().getVersion()); } // check if version downgrade is denied try (PDDocument document = new PDDocument()) { document.setVersion(1.3f); // all versions shall have their default value assertEquals(1.4f, document.getVersion(), 0); assertEquals(1.4f, document.getDocument().getVersion(), 0); assertEquals("1.4", document.getDocumentCatalog().getVersion()); // check version upgrade document.setVersion(1.5f); // overall version has to be 1.5f assertEquals(1.5f, document.getVersion(), 0); // header version has to be unchanged assertEquals(1.4f, document.getDocument().getVersion(), 0); // catalog version version has to be 1.5 assertEquals("1.5", document.getDocumentCatalog().getVersion()); } }
void testVersions() throws IOException { try (PDDocument document = new PDDocument()) { assertEquals(1.4f, document.getVersion(), 0); assertEquals(1.4f, document.getDocument().getVersion(), 0); assertEquals("1.4", document.getDocumentCatalog().getVersion()); document.getDocument().setVersion(1.3f); document.getDocumentCatalog().setVersion(null); assertEquals(1.3f, document.getVersion(), 0); assertEquals(1.3f, document.getDocument().getVersion(), 0); assertNull(document.getDocumentCatalog().getVersion()); } try (PDDocument document = new PDDocument()) { document.setVersion(1.3f); assertEquals(1.4f, document.getVersion(), 0); assertEquals(1.4f, document.getDocument().getVersion(), 0); assertEquals("1.4", document.getDocumentCatalog().getVersion()); document.setVersion(1.5f); assertEquals(1.5f, document.getVersion(), 0); assertEquals(1.4f, document.getDocument().getVersion(), 0); assertEquals("1.5", document.getDocumentCatalog().getVersion()); } }
/** * Test get/setVersion. * @throws IOException if something went wrong */
Test get/setVersion
testVersions
{ "repo_name": "kalaspuffar/pdfbox", "path": "pdfbox/src/test/java/org/apache/pdfbox/pdmodel/TestPDDocument.java", "license": "apache-2.0", "size": 7830 }
[ "java.io.IOException", "org.junit.jupiter.api.Assertions" ]
import java.io.IOException; import org.junit.jupiter.api.Assertions;
import java.io.*; import org.junit.jupiter.api.*;
[ "java.io", "org.junit.jupiter" ]
java.io; org.junit.jupiter;
898,398
@Override @Before public void setUp() throws Exception { super.setUp(); entityManager = getEntityManager(); logger = Logger.getLogger(getClass()); createReportingPaymentData(); instance = new TotalPaymentSummaryReportService(); TestsHelper.setField(instance, "logger", logger); TestsHelper.setField(instance, "entityManager", entityManager); TestsHelper.setField(instance, "reportName", "SC-007-001B Summary of Total Payments"); TestsHelper.setField(instance, "chartWidth", 550); TestsHelper.setField(instance, "chartHeight", 300); }
void function() throws Exception { super.setUp(); entityManager = getEntityManager(); logger = Logger.getLogger(getClass()); createReportingPaymentData(); instance = new TotalPaymentSummaryReportService(); TestsHelper.setField(instance, STR, logger); TestsHelper.setField(instance, STR, entityManager); TestsHelper.setField(instance, STR, STR); TestsHelper.setField(instance, STR, 550); TestsHelper.setField(instance, STR, 300); }
/** * <p> * Sets up the unit tests. * </p> * * @throws Exception to JUnit. */
Sets up the unit tests.
setUp
{ "repo_name": "NASA-Tournament-Lab/CoECI-OPM-Service-Credit-Redeposit-Deposit-Application", "path": "Code/SCRD_BRE/src/java/tests/gov/opm/scrd/services/impl/reporting/TotalPaymentSummaryReportServiceTests.java", "license": "apache-2.0", "size": 4473 }
[ "gov.opm.scrd.TestsHelper", "gov.opm.scrd.services.impl.reporting.TotalPaymentSummaryReportService", "org.jboss.logging.Logger" ]
import gov.opm.scrd.TestsHelper; import gov.opm.scrd.services.impl.reporting.TotalPaymentSummaryReportService; import org.jboss.logging.Logger;
import gov.opm.scrd.*; import gov.opm.scrd.services.impl.reporting.*; import org.jboss.logging.*;
[ "gov.opm.scrd", "org.jboss.logging" ]
gov.opm.scrd; org.jboss.logging;
2,320,829
@Test public void whenInsertEightThenExchangeEight() { Machine m = new Machine(); m.decompose(8); List<String> res = m.exchangeAll(); List<String> list = new ArrayList<>(); list.add(String.format("%6d%8d", 5, 111)); list.add(String.format("%6d%8d", 11111, 111)); assertThat(res, is(list)); }
void function() { Machine m = new Machine(); m.decompose(8); List<String> res = m.exchangeAll(); List<String> list = new ArrayList<>(); list.add(String.format(STR, 5, 111)); list.add(String.format(STR, 11111, 111)); assertThat(res, is(list)); }
/** * Checks if the value of eight could be exchanged. */
Checks if the value of eight could be exchanged
whenInsertEightThenExchangeEight
{ "repo_name": "Ravmouse/vvasilyev", "path": "chapter_002/src/test/java/ru/job4j/changemachine/MachineTest.java", "license": "apache-2.0", "size": 1990 }
[ "java.util.ArrayList", "java.util.List", "org.hamcrest.core.Is", "org.junit.Assert" ]
import java.util.ArrayList; import java.util.List; import org.hamcrest.core.Is; import org.junit.Assert;
import java.util.*; import org.hamcrest.core.*; import org.junit.*;
[ "java.util", "org.hamcrest.core", "org.junit" ]
java.util; org.hamcrest.core; org.junit;
1,436,822
@ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<KeyInner>> listByWorkspaceSinglePageAsync( String resourceGroupName, String workspaceName, Context context) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); } if (workspaceName == null) { return Mono.error(new IllegalArgumentException("Parameter workspaceName is required and cannot be null.")); } final String apiVersion = "2021-06-01"; final String accept = "application/json"; context = this.client.mergeContext(context); return service .listByWorkspace( this.client.getEndpoint(), apiVersion, this.client.getSubscriptionId(), resourceGroupName, workspaceName, accept, context) .map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)); }
@ServiceMethod(returns = ReturnType.SINGLE) Mono<PagedResponse<KeyInner>> function( String resourceGroupName, String workspaceName, Context context) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( STR)); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( STR)); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException(STR)); } if (workspaceName == null) { return Mono.error(new IllegalArgumentException(STR)); } final String apiVersion = STR; final String accept = STR; context = this.client.mergeContext(context); return service .listByWorkspace( this.client.getEndpoint(), apiVersion, this.client.getSubscriptionId(), resourceGroupName, workspaceName, accept, context) .map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)); }
/** * Returns a list of keys in a workspace. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return list of keys along with {@link PagedResponse} on successful completion of {@link Mono}. */
Returns a list of keys in a workspace
listByWorkspaceSinglePageAsync
{ "repo_name": "Azure/azure-sdk-for-java", "path": "sdk/synapse/azure-resourcemanager-synapse/src/main/java/com/azure/resourcemanager/synapse/implementation/KeysClientImpl.java", "license": "mit", "size": 42981 }
[ "com.azure.core.annotation.ReturnType", "com.azure.core.annotation.ServiceMethod", "com.azure.core.http.rest.PagedResponse", "com.azure.core.http.rest.PagedResponseBase", "com.azure.core.util.Context", "com.azure.resourcemanager.synapse.fluent.models.KeyInner" ]
import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.http.rest.PagedResponse; import com.azure.core.http.rest.PagedResponseBase; import com.azure.core.util.Context; import com.azure.resourcemanager.synapse.fluent.models.KeyInner;
import com.azure.core.annotation.*; import com.azure.core.http.rest.*; import com.azure.core.util.*; import com.azure.resourcemanager.synapse.fluent.models.*;
[ "com.azure.core", "com.azure.resourcemanager" ]
com.azure.core; com.azure.resourcemanager;
1,065,150
private void writeValueMetadata() throws XMLStreamException { start(WaterMLConstants.QN_METADATA); start(WaterMLConstants.QN_TVP_MEASUREMENT_METADATA); empty(WaterMLConstants.QN_NIL_REASON); addXlinkHrefAttr("missing"); endInline(WaterMLConstants.QN_TVP_MEASUREMENT_METADATA); endInline(WaterMLConstants.QN_METADATA); }
void function() throws XMLStreamException { start(WaterMLConstants.QN_METADATA); start(WaterMLConstants.QN_TVP_MEASUREMENT_METADATA); empty(WaterMLConstants.QN_NIL_REASON); addXlinkHrefAttr(STR); endInline(WaterMLConstants.QN_TVP_MEASUREMENT_METADATA); endInline(WaterMLConstants.QN_METADATA); }
/** * Write missing value metadata to stream * * @throws XMLStreamException * If an error occurs when writing to stream */
Write missing value metadata to stream
writeValueMetadata
{ "repo_name": "ahuarte47/SOS", "path": "coding/wml-v20/src/main/java/org/n52/sos/encode/streaming/WmlTVPEncoderv20XmlStreamWriter.java", "license": "gpl-2.0", "size": 12289 }
[ "javax.xml.stream.XMLStreamException", "org.n52.sos.ogc.wml.WaterMLConstants" ]
import javax.xml.stream.XMLStreamException; import org.n52.sos.ogc.wml.WaterMLConstants;
import javax.xml.stream.*; import org.n52.sos.ogc.wml.*;
[ "javax.xml", "org.n52.sos" ]
javax.xml; org.n52.sos;
468,696
default StringMap supplyStringMap() { return new JdkMapAdapterStringMap(supplyContextData()); }
default StringMap supplyStringMap() { return new JdkMapAdapterStringMap(supplyContextData()); }
/** * Returns the context data as a StringMap. * @return the context data in a StringMap. */
Returns the context data as a StringMap
supplyStringMap
{ "repo_name": "apache/logging-log4j2", "path": "log4j-core/src/main/java/org/apache/logging/log4j/core/util/ContextDataProvider.java", "license": "apache-2.0", "size": 1553 }
[ "org.apache.logging.log4j.core.impl.JdkMapAdapterStringMap", "org.apache.logging.log4j.util.StringMap" ]
import org.apache.logging.log4j.core.impl.JdkMapAdapterStringMap; import org.apache.logging.log4j.util.StringMap;
import org.apache.logging.log4j.core.impl.*; import org.apache.logging.log4j.util.*;
[ "org.apache.logging" ]
org.apache.logging;
2,020,833
public java.util.List<fr.lip6.move.pnml.hlpn.multisets.hlapi.ContainsHLAPI> getSubterm_multisets_ContainsHLAPI(){ java.util.List<fr.lip6.move.pnml.hlpn.multisets.hlapi.ContainsHLAPI> retour = new ArrayList<fr.lip6.move.pnml.hlpn.multisets.hlapi.ContainsHLAPI>(); for (Term elemnt : getSubterm()) { if(elemnt.getClass().equals(fr.lip6.move.pnml.hlpn.multisets.impl.ContainsImpl.class)){ retour.add(new fr.lip6.move.pnml.hlpn.multisets.hlapi.ContainsHLAPI( (fr.lip6.move.pnml.hlpn.multisets.Contains)elemnt )); } } return retour; }
java.util.List<fr.lip6.move.pnml.hlpn.multisets.hlapi.ContainsHLAPI> function(){ java.util.List<fr.lip6.move.pnml.hlpn.multisets.hlapi.ContainsHLAPI> retour = new ArrayList<fr.lip6.move.pnml.hlpn.multisets.hlapi.ContainsHLAPI>(); for (Term elemnt : getSubterm()) { if(elemnt.getClass().equals(fr.lip6.move.pnml.hlpn.multisets.impl.ContainsImpl.class)){ retour.add(new fr.lip6.move.pnml.hlpn.multisets.hlapi.ContainsHLAPI( (fr.lip6.move.pnml.hlpn.multisets.Contains)elemnt )); } } return retour; }
/** * This accessor return a list of encapsulated subelement, only of ContainsHLAPI kind. * WARNING : this method can creates a lot of new object in memory. */
This accessor return a list of encapsulated subelement, only of ContainsHLAPI kind. WARNING : this method can creates a lot of new object in memory
getSubterm_multisets_ContainsHLAPI
{ "repo_name": "lhillah/pnmlframework", "path": "pnmlFw-HLPN/src/fr/lip6/move/pnml/hlpn/integers/hlapi/DivisionHLAPI.java", "license": "epl-1.0", "size": 108424 }
[ "fr.lip6.move.pnml.hlpn.terms.Term", "java.util.ArrayList", "java.util.List" ]
import fr.lip6.move.pnml.hlpn.terms.Term; import java.util.ArrayList; import java.util.List;
import fr.lip6.move.pnml.hlpn.terms.*; import java.util.*;
[ "fr.lip6.move", "java.util" ]
fr.lip6.move; java.util;
1,132,502
protected void addAttributePropertyDescriptor(Object object) { itemPropertyDescriptors.add (createItemPropertyDescriptor (((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(), getResourceLocator(), getString("_UI_FSM_attribute_feature"), getString("_UI_PropertyDescriptor_description", "_UI_FSM_attribute_feature", "_UI_FSM_type"), OCCIPackage.Literals.FSM__ATTRIBUTE, true, false, true, null, null, null)); }
void function(Object object) { itemPropertyDescriptors.add (createItemPropertyDescriptor (((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(), getResourceLocator(), getString(STR), getString(STR, STR, STR), OCCIPackage.Literals.FSM__ATTRIBUTE, true, false, true, null, null, null)); }
/** * This adds a property descriptor for the Attribute feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */
This adds a property descriptor for the Attribute feature.
addAttributePropertyDescriptor
{ "repo_name": "occiware/OCCI-Studio", "path": "plugins/org.eclipse.cmf.occi.core.edit/src-gen/org/eclipse/cmf/occi/core/provider/FSMItemProvider.java", "license": "epl-1.0", "size": 10191 }
[ "org.eclipse.cmf.occi.core.OCCIPackage", "org.eclipse.emf.edit.provider.ComposeableAdapterFactory" ]
import org.eclipse.cmf.occi.core.OCCIPackage; import org.eclipse.emf.edit.provider.ComposeableAdapterFactory;
import org.eclipse.cmf.occi.core.*; import org.eclipse.emf.edit.provider.*;
[ "org.eclipse.cmf", "org.eclipse.emf" ]
org.eclipse.cmf; org.eclipse.emf;
748,660
@JRubyMethod(name = "put_cstring") public ByteBuf putCString(ThreadContext context, final IRubyObject value) throws UnsupportedEncodingException { if (value instanceof RubyFixnum) { RubyString str = ((RubyFixnum) value).to_s(); String string = str.asJavaString(); this.writePosition += writeCharacters(string); } else if (value instanceof RubyString || value instanceof RubySymbol) { RubyString string; if (value instanceof RubySymbol) { string = (RubyString) ((RubySymbol) value).to_s(context); } else { string = (RubyString) value; } string = convertToUtf8(context, string); String javaString = string.asJavaString(); verifyNoNulls(javaString); this.writePosition += writeCharacters(javaString); } else { throw getRuntime().newTypeError(format("Invalid type for put_cstring: %s", value)); } return this; }
@JRubyMethod(name = STR) ByteBuf function(ThreadContext context, final IRubyObject value) throws UnsupportedEncodingException { if (value instanceof RubyFixnum) { RubyString str = ((RubyFixnum) value).to_s(); String string = str.asJavaString(); this.writePosition += writeCharacters(string); } else if (value instanceof RubyString value instanceof RubySymbol) { RubyString string; if (value instanceof RubySymbol) { string = (RubyString) ((RubySymbol) value).to_s(context); } else { string = (RubyString) value; } string = convertToUtf8(context, string); String javaString = string.asJavaString(); verifyNoNulls(javaString); this.writePosition += writeCharacters(javaString); } else { throw getRuntime().newTypeError(format(STR, value)); } return this; }
/** * Put a cstring onto the buffer. * * @param value The cstring to write. * * @author Durran Jordan * @since 2015.09.26 * @version 4.0.0 */
Put a cstring onto the buffer
putCString
{ "repo_name": "mongodb/bson-ruby", "path": "src/main/org/bson/ByteBuf.java", "license": "apache-2.0", "size": 21536 }
[ "java.io.UnsupportedEncodingException", "org.jruby.RubyFixnum", "org.jruby.RubyString", "org.jruby.RubySymbol", "org.jruby.anno.JRubyMethod", "org.jruby.runtime.ThreadContext", "org.jruby.runtime.builtin.IRubyObject" ]
import java.io.UnsupportedEncodingException; import org.jruby.RubyFixnum; import org.jruby.RubyString; import org.jruby.RubySymbol; import org.jruby.anno.JRubyMethod; import org.jruby.runtime.ThreadContext; import org.jruby.runtime.builtin.IRubyObject;
import java.io.*; import org.jruby.*; import org.jruby.anno.*; import org.jruby.runtime.*; import org.jruby.runtime.builtin.*;
[ "java.io", "org.jruby", "org.jruby.anno", "org.jruby.runtime" ]
java.io; org.jruby; org.jruby.anno; org.jruby.runtime;
2,910,394
// ! Creation date of the project. public Date getCreationDate() { return m_project.getConfiguration().getCreationDate(); }
Date function() { return m_project.getConfiguration().getCreationDate(); }
/** * Returns the creation date of the project. This is the date when the project was first written * to the database. * * @return The creation date of the project. */
Returns the creation date of the project. This is the date when the project was first written to the database
getCreationDate
{ "repo_name": "dgrif/binnavi", "path": "src/main/java/com/google/security/zynamics/binnavi/API/disassembly/Project.java", "license": "apache-2.0", "size": 26832 }
[ "java.util.Date" ]
import java.util.Date;
import java.util.*;
[ "java.util" ]
java.util;
894,903
byte[] salt = SecureRandom.getInstance("SHA1PRNG").generateSeed(saltLen); // store the salt with the password return Base64.encodeBase64String(salt) + "$" + hash(password, salt); }
byte[] salt = SecureRandom.getInstance(STR).generateSeed(saltLen); return Base64.encodeBase64String(salt) + "$" + hash(password, salt); }
/** * Computes a salted PBKDF2 hash of given plaintext password * suitable for storing in a database. * Empty passwords are not supported. * * @param password which will be hashed. * @return random salt combined with hashed password. These two parts are * divided by sign $ * @throws java.lang.Exception if an error occurs during password hashing. */
Computes a salted PBKDF2 hash of given plaintext password suitable for storing in a database. Empty passwords are not supported
getSaltedHash
{ "repo_name": "apelttom/ParqueoPublico", "path": "src/controller/PasswordHash.java", "license": "gpl-3.0", "size": 3091 }
[ "java.security.SecureRandom", "org.apache.commons.codec.binary.Base64" ]
import java.security.SecureRandom; import org.apache.commons.codec.binary.Base64;
import java.security.*; import org.apache.commons.codec.binary.*;
[ "java.security", "org.apache.commons" ]
java.security; org.apache.commons;
1,878,167
Observable<Sites> listAsync();
Observable<Sites> listAsync();
/** * Get all apps for a subscription. * Get all apps for a subscription. * * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable for the request */
Get all apps for a subscription. Get all apps for a subscription
listAsync
{ "repo_name": "hovsepm/azure-sdk-for-java", "path": "appservice/resource-manager/v2018_02_01/src/main/java/com/microsoft/azure/management/appservice/v2018_02_01/WebApps.java", "license": "mit", "size": 204730 }
[ "com.microsoft.azure.management.appservice.v2018_02_01.Sites" ]
import com.microsoft.azure.management.appservice.v2018_02_01.Sites;
import com.microsoft.azure.management.appservice.v2018_02_01.*;
[ "com.microsoft.azure" ]
com.microsoft.azure;
1,026,911
// if stand is true, it means the player will stand the play // If stand is false, it means the player will fold. boolean stand = true; // An arrayList of player which will only takes people who continue to play or stand. ArrayList<Player> standOrFold = new ArrayList<Player>(); // playerWon is the player who win the game. // playerWon is equal to null if nobody win the game. Player playerWon = null; System.out.println( "== Dealing Cards\n" ); //give initial cards for (int j=0; j<group.size(); j++ ){ group.get(j).addCard( d.dealCard() ); group.get(j).addCard( d.dealCard() ); } for( Player player : group){ if(player.getID() == true){ System.out.println( "============== Human " + player.getName() + " Cards ========" ); player.printHand(); } } // ask all players if they want to stand or not for( Player player : group){ if(player.getID() == true){ stand = player.stand(); if(stand){ standOrFold.add(player); } }else{ stand = player.stand(); // If the computer is the last player of the game, and everyone else folded // The last computer will automatically is the winner. if(standOrFold.size() == 0 && player == group.get(group.size() - 1) ){ System.out.println( "Computer " + player.getName() + " is the only player left, so they stand automatically" ); standOrFold.add(player); } else if(stand){ System.out.println( "Computer " + player.getName() + " Stands" ); standOrFold.add(player); } else{ System.out.println( "Computer " + player.getName() + " Folds" ); } } } for( Player player : group){ if(player.getID()== false){ System.out.println( "============== Computer " + player.getName() + " Cards ========" ); player.printHand(); } } // Nobody is playing if(standOrFold.size() == 0){ playerWon = null; } // Only one player else if(standOrFold.size() == 1){ playerWon = standOrFold.get(0); } // More than one player standing in the game. else{ Collections.sort(standOrFold); // First player of the collection has the best hand. Collections.reverse(standOrFold); // First player and second player have same hand. if(standOrFold.get(0) == standOrFold.get(1)){ playerWon = null; } // First one win the game. else{ playerWon = standOrFold.get(0); } } // have everyone throw in their cards for(Player player : group){ player.newHand(); } // Re-order the turn Player first = group.remove(0); group.add(first); return playerWon; }
boolean stand = true; ArrayList<Player> standOrFold = new ArrayList<Player>(); Player playerWon = null; System.out.println( STR ); for (int j=0; j<group.size(); j++ ){ group.get(j).addCard( d.dealCard() ); group.get(j).addCard( d.dealCard() ); } for( Player player : group){ if(player.getID() == true){ System.out.println( STR + player.getName() + STR ); player.printHand(); } } for( Player player : group){ if(player.getID() == true){ stand = player.stand(); if(stand){ standOrFold.add(player); } }else{ stand = player.stand(); if(standOrFold.size() == 0 && player == group.get(group.size() - 1) ){ System.out.println( STR + player.getName() + STR ); standOrFold.add(player); } else if(stand){ System.out.println( STR + player.getName() + STR ); standOrFold.add(player); } else{ System.out.println( STR + player.getName() + STR ); } } } for( Player player : group){ if(player.getID()== false){ System.out.println( STR + player.getName() + STR ); player.printHand(); } } if(standOrFold.size() == 0){ playerWon = null; } else if(standOrFold.size() == 1){ playerWon = standOrFold.get(0); } else{ Collections.sort(standOrFold); Collections.reverse(standOrFold); if(standOrFold.get(0) == standOrFold.get(1)){ playerWon = null; } else{ playerWon = standOrFold.get(0); } } for(Player player : group){ player.newHand(); } Player first = group.remove(0); group.add(first); return playerWon; }
/** * Plays a single hand of poker * * @param group The group a people/computer who play the game. * @param d The deck * @return An int telling if the user lost/tied/won (neg/0/pos) */
Plays a single hand of poker
playHand
{ "repo_name": "quangvu1994/PokerGame", "path": "src/Poker.java", "license": "mit", "size": 6519 }
[ "java.util.ArrayList", "java.util.Collections" ]
import java.util.ArrayList; import java.util.Collections;
import java.util.*;
[ "java.util" ]
java.util;
740,013
private String getClassNameForClassId(final User user, final String domain, final int classId, final ConnectionContext connectionContext) throws RemoteException { final String className; if (!this.classKeyCache.isDomainCached(domain)) { LOG.info("class key cache does not contain class ids for domain '" + domain + "', need to fill the cache first!"); this.getClasses(user, domain, connectionContext); } className = this.classKeyCache.getClassNameForClassId(domain, classId); if (className == null) { final String message = "could not find class with id '" + classId + "' at domain '" + domain + "' for user '" + user.getName() + "', class key map does not contain id."; LOG.error(message); // return null; throw new RemoteException(message); } return className; } // </editor-fold>
String function(final User user, final String domain, final int classId, final ConnectionContext connectionContext) throws RemoteException { final String className; if (!this.classKeyCache.isDomainCached(domain)) { LOG.info(STR + domain + STR); this.getClasses(user, domain, connectionContext); } className = this.classKeyCache.getClassNameForClassId(domain, classId); if (className == null) { final String message = STR + classId + STR + domain + STR + user.getName() + STR; LOG.error(message); throw new RemoteException(message); } return className; }
/** * DOCUMENT ME! * * @param user DOCUMENT ME! * @param domain DOCUMENT ME! * @param classId DOCUMENT ME! * @param connectionContext DOCUMENT ME! * * @return DOCUMENT ME! * * @throws RemoteException DOCUMENT ME! */
DOCUMENT ME
getClassNameForClassId
{ "repo_name": "cismet/cids-server", "path": "src/main/java/de/cismet/cidsx/client/connector/RESTfulInterfaceConnector.java", "license": "lgpl-3.0", "size": 121871 }
[ "de.cismet.connectioncontext.ConnectionContext", "java.rmi.RemoteException" ]
import de.cismet.connectioncontext.ConnectionContext; import java.rmi.RemoteException;
import de.cismet.connectioncontext.*; import java.rmi.*;
[ "de.cismet.connectioncontext", "java.rmi" ]
de.cismet.connectioncontext; java.rmi;
2,001,747
void updateCount(INodesInPath iip, long nsDelta, long ssDelta, short oldRep, short newRep, boolean checkQuota) throws QuotaExceededException { final INodeFile fileINode = iip.getLastINode().asFile(); EnumCounters<StorageType> typeSpaceDeltas = getStorageTypeDeltas(fileINode.getStoragePolicyID(), ssDelta, oldRep, newRep); updateCount(iip, iip.length() - 1, new QuotaCounts.Builder().nameSpace(nsDelta). storageSpace(ssDelta * (newRep - oldRep)). typeSpaces(typeSpaceDeltas).build(), checkQuota); }
void updateCount(INodesInPath iip, long nsDelta, long ssDelta, short oldRep, short newRep, boolean checkQuota) throws QuotaExceededException { final INodeFile fileINode = iip.getLastINode().asFile(); EnumCounters<StorageType> typeSpaceDeltas = getStorageTypeDeltas(fileINode.getStoragePolicyID(), ssDelta, oldRep, newRep); updateCount(iip, iip.length() - 1, new QuotaCounts.Builder().nameSpace(nsDelta). storageSpace(ssDelta * (newRep - oldRep)). typeSpaces(typeSpaceDeltas).build(), checkQuota); }
/** * Update usage count with replication factor change due to setReplication */
Update usage count with replication factor change due to setReplication
updateCount
{ "repo_name": "matrix-stone/hadoop", "path": "hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java", "license": "apache-2.0", "size": 57399 }
[ "org.apache.hadoop.fs.StorageType", "org.apache.hadoop.hdfs.protocol.QuotaExceededException", "org.apache.hadoop.hdfs.util.EnumCounters" ]
import org.apache.hadoop.fs.StorageType; import org.apache.hadoop.hdfs.protocol.QuotaExceededException; import org.apache.hadoop.hdfs.util.EnumCounters;
import org.apache.hadoop.fs.*; import org.apache.hadoop.hdfs.protocol.*; import org.apache.hadoop.hdfs.util.*;
[ "org.apache.hadoop" ]
org.apache.hadoop;
448,337
@Test public void testFindGuildPermissionId() { System.out.println("findGuildPermissionId"); final String[] values = { "ActivatePlaceables", // NOI18N. "ActivateWorldEvent", // NOI18N. "Admin", // NOI18N. "ClaimableActivate", // NOI18N. "ClaimableClaim", // NOI18N. "ClaimableEditOptions", // NOI18N. "ClaimableSpend", // NOI18N. "DecorationAdmin", // NOI18N. "DepositCoinsStash", // NOI18N. "DepositCoinsTrove", // NOI18N. "DepositItemsStash", // NOI18N. "DepositItemsTrove", // NOI18N. "EditAnthem", // NOI18N. "EditAssemblyQueue", // NOI18N. "EditBGM", // NOI18N. "EditEmblem", // NOI18N. "EditMonument", // NOI18N. "EditMOTD", // NOI18N. "EditRoles", // NOI18N. "MissionControl", // NOI18N. "OpenPortal", // NOI18N. "PlaceArenaDecoration", // NOI18N. "PlaceDecoration", // NOI18N. "PurchaseUpgrades", // NOI18N. "SetGuildHall", // NOI18N. "SpendFuel", // NOI18N. "StartingRole", // NOI18N. "TeamAdmin", // NOI18N. "WithdrawCoinsStash", // NOI18N. "WithdrawCoinsTrove", // NOI18N. "WithdrawItemsStash", // NOI18N. "WithdrawItemsTrove", // NOI18N. null, "" // NOI18N. }; final GuildPermissionId[] expResults = { GuildPermissionId.ACTIVATE_PLACEABLES, GuildPermissionId.ACTIVATE_WORLD_EVENT, GuildPermissionId.ADMIN, GuildPermissionId.CLAIMABLE_ACTIVATE, GuildPermissionId.CLAIMABLE_CLAIM, GuildPermissionId.CLAIMABLE_EDIT_OPTIONS, GuildPermissionId.CLAIMABLE_SPEND, GuildPermissionId.DECORATION_ADMIN, GuildPermissionId.DEPOSIT_COINS_STASH, GuildPermissionId.DEPOSIT_COINS_TROVE, GuildPermissionId.DEPOSIT_ITEMS_STASH, GuildPermissionId.DEPOSIT_ITEMS_TROVE, GuildPermissionId.EDIT_ANTHEM, GuildPermissionId.EDIT_ASSEMBLY_QUEUE, GuildPermissionId.EDIT_BGM, GuildPermissionId.EDIT_EMBLEM, GuildPermissionId.EDIT_MONUMENT, GuildPermissionId.EDIT_MOTD, GuildPermissionId.EDIT_ROLES, GuildPermissionId.MISSION_CONTROL, GuildPermissionId.OPEN_PORTAL, GuildPermissionId.PLACE_ARENA_DECORATION, GuildPermissionId.PLACE_DECORATION, GuildPermissionId.PURCHASE_UPGRADES, GuildPermissionId.SET_GUILD_HALL, GuildPermissionId.SPEND_FUEL, GuildPermissionId.STARTING_ROLE, GuildPermissionId.TEAM_ADMIN, GuildPermissionId.WITHDRAW_COINS_STASH, GuildPermissionId.WITHDRAW_COINS_TROVE, GuildPermissionId.WITHDRAW_ITEMS_STASH, GuildPermissionId.WITHDRAW_ITEMS_TROVE, GuildPermissionId.UNKNOWN, GuildPermissionId.UNKNOWN }; assertEquals(values.length, expResults.length); IntStream.range(0, values.length). forEach(index -> { final String value = values[index]; final GuildPermissionId expResult = expResults[index]; final GuildPermissionId result = EnumValueFactory.INSTANCE.mapEnumValue(GuildPermissionId.class, value); assertEquals(expResult, result); }); }
void function() { System.out.println(STR); final String[] values = { STR, STR, "Admin", STR, STR, STR, STR, STR, STR, STR, STR, STR, STR, STR, STR, STR, STR, STR, STR, STR, STR, STR, STR, STR, STR, STR, STR, STR, STR, STR, STR, STR, null, "" }; final GuildPermissionId[] expResults = { GuildPermissionId.ACTIVATE_PLACEABLES, GuildPermissionId.ACTIVATE_WORLD_EVENT, GuildPermissionId.ADMIN, GuildPermissionId.CLAIMABLE_ACTIVATE, GuildPermissionId.CLAIMABLE_CLAIM, GuildPermissionId.CLAIMABLE_EDIT_OPTIONS, GuildPermissionId.CLAIMABLE_SPEND, GuildPermissionId.DECORATION_ADMIN, GuildPermissionId.DEPOSIT_COINS_STASH, GuildPermissionId.DEPOSIT_COINS_TROVE, GuildPermissionId.DEPOSIT_ITEMS_STASH, GuildPermissionId.DEPOSIT_ITEMS_TROVE, GuildPermissionId.EDIT_ANTHEM, GuildPermissionId.EDIT_ASSEMBLY_QUEUE, GuildPermissionId.EDIT_BGM, GuildPermissionId.EDIT_EMBLEM, GuildPermissionId.EDIT_MONUMENT, GuildPermissionId.EDIT_MOTD, GuildPermissionId.EDIT_ROLES, GuildPermissionId.MISSION_CONTROL, GuildPermissionId.OPEN_PORTAL, GuildPermissionId.PLACE_ARENA_DECORATION, GuildPermissionId.PLACE_DECORATION, GuildPermissionId.PURCHASE_UPGRADES, GuildPermissionId.SET_GUILD_HALL, GuildPermissionId.SPEND_FUEL, GuildPermissionId.STARTING_ROLE, GuildPermissionId.TEAM_ADMIN, GuildPermissionId.WITHDRAW_COINS_STASH, GuildPermissionId.WITHDRAW_COINS_TROVE, GuildPermissionId.WITHDRAW_ITEMS_STASH, GuildPermissionId.WITHDRAW_ITEMS_TROVE, GuildPermissionId.UNKNOWN, GuildPermissionId.UNKNOWN }; assertEquals(values.length, expResults.length); IntStream.range(0, values.length). forEach(index -> { final String value = values[index]; final GuildPermissionId expResult = expResults[index]; final GuildPermissionId result = EnumValueFactory.INSTANCE.mapEnumValue(GuildPermissionId.class, value); assertEquals(expResult, result); }); }
/** * Test of GuildPermissionId. */
Test of GuildPermissionId
testFindGuildPermissionId
{ "repo_name": "fabricebouye/gw2-web-api-mapping", "path": "test/api/web/gw2/mapping/v2/guild/permissions/GuildPermissionsUtilsTest.java", "license": "bsd-3-clause", "size": 4922 }
[ "java.util.stream.IntStream", "org.junit.jupiter.api.Assertions" ]
import java.util.stream.IntStream; import org.junit.jupiter.api.Assertions;
import java.util.stream.*; import org.junit.jupiter.api.*;
[ "java.util", "org.junit.jupiter" ]
java.util; org.junit.jupiter;
2,077,090
public static String getBaseDirectory() { return GenericTestUtils.getTestDir("dfs").getAbsolutePath() + File.separator; }
static String function() { return GenericTestUtils.getTestDir("dfs").getAbsolutePath() + File.separator; }
/** * Get the base directory for any DFS cluster whose configuration does * not explicitly set it. This is done via * {@link GenericTestUtils#getTestDir()}. * @return a directory for use as a miniDFS filesystem. */
Get the base directory for any DFS cluster whose configuration does not explicitly set it. This is done via <code>GenericTestUtils#getTestDir()</code>
getBaseDirectory
{ "repo_name": "szegedim/hadoop", "path": "hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSCluster.java", "license": "apache-2.0", "size": 117291 }
[ "java.io.File", "org.apache.hadoop.test.GenericTestUtils" ]
import java.io.File; import org.apache.hadoop.test.GenericTestUtils;
import java.io.*; import org.apache.hadoop.test.*;
[ "java.io", "org.apache.hadoop" ]
java.io; org.apache.hadoop;
1,070,626
//----------------------------------------------------------------------- @FromString public static LocalDateTime parse(String str) { return parse(str, ISODateTimeFormat.localDateOptionalTimeParser()); }
static LocalDateTime function(String str) { return parse(str, ISODateTimeFormat.localDateOptionalTimeParser()); }
/** * Parses a {@code LocalDateTime} from the specified string. * <p> * This uses {@link ISODateTimeFormat#localDateOptionalTimeParser()}. * * @param str the string to parse, not null * @since 2.0 */
Parses a LocalDateTime from the specified string. This uses <code>ISODateTimeFormat#localDateOptionalTimeParser()</code>
parse
{ "repo_name": "Guardiola31337/joda-time", "path": "src/main/java/org/joda/time/LocalDateTime.java", "license": "apache-2.0", "size": 92293 }
[ "org.joda.time.format.ISODateTimeFormat" ]
import org.joda.time.format.ISODateTimeFormat;
import org.joda.time.format.*;
[ "org.joda.time" ]
org.joda.time;
1,934,722
public ArrayList<Player> getPlayers() { return m_players; }
ArrayList<Player> function() { return m_players; }
/** * Returns the arraylist of players on the current map * @return */
Returns the arraylist of players on the current map
getPlayers
{ "repo_name": "Nushio/ArenaClient", "path": "src/net/k3rnel/arena/client/backend/ClientMapMatrix.java", "license": "gpl-3.0", "size": 10600 }
[ "java.util.ArrayList", "net.k3rnel.arena.client.backend.entity.Player" ]
import java.util.ArrayList; import net.k3rnel.arena.client.backend.entity.Player;
import java.util.*; import net.k3rnel.arena.client.backend.entity.*;
[ "java.util", "net.k3rnel.arena" ]
java.util; net.k3rnel.arena;
1,808,255
GenericPK getRelatedDummyPK(String relationName, Map<String, ? extends Object> byAndFields, GenericValue value) throws GenericEntityException;
GenericPK getRelatedDummyPK(String relationName, Map<String, ? extends Object> byAndFields, GenericValue value) throws GenericEntityException;
/** * Get a dummy primary key for the named Related Entity for the GenericValue. * * @param relationName * String containing the relation name which is the combination * of relation.title and relation.rel-entity-name as specified in * the entity XML definition file * @param byAndFields * the fields that must equal in order to keep; may be null * @param value * GenericValue instance containing the entity * @return GenericPK containing a possibly incomplete PrimaryKey object * representing the related entity or entities */
Get a dummy primary key for the named Related Entity for the GenericValue
getRelatedDummyPK
{ "repo_name": "yuri0x7c1/ofbiz-explorer", "path": "src/test/resources/apache-ofbiz-17.12.04/framework/entity/src/main/java/org/apache/ofbiz/entity/Delegator.java", "license": "apache-2.0", "size": 38666 }
[ "java.util.Map" ]
import java.util.Map;
import java.util.*;
[ "java.util" ]
java.util;
2,086,714
void close(boolean closeFromWebUI) throws IOException, InterruptedException { close(closeFromWebUI, false); }
void close(boolean closeFromWebUI) throws IOException, InterruptedException { close(closeFromWebUI, false); }
/** * Cleanup after CoronaJobTracker operation. * If remote CJT error occured use overloaded version. * @param closeFromWebUI Indicates whether called from web UI. * @throws IOException * @throws InterruptedException */
Cleanup after CoronaJobTracker operation. If remote CJT error occured use overloaded version
close
{ "repo_name": "nvoron23/hadoop-20", "path": "src/contrib/corona/src/java/org/apache/hadoop/mapred/CoronaJobTracker.java", "license": "apache-2.0", "size": 121458 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
2,009,281