method
stringlengths
13
441k
clean_method
stringlengths
7
313k
doc
stringlengths
17
17.3k
comment
stringlengths
3
1.42k
method_name
stringlengths
1
273
extra
dict
imports
sequence
imports_info
stringlengths
19
34.8k
cluster_imports_info
stringlengths
15
3.66k
libraries
sequence
libraries_info
stringlengths
6
661
id
int64
0
2.92M
public double computeHyperplanePoint(double x) { return (w[0] + w[1] * x) / -w[2]; } } private static final int POINT_SIZE = 10; private static final int VIDEO_HEIGHT = 240; private static final int VIDEO_WIDTH = 320; private static final int GRAPH_WIDTH = 600; private static final int GRAPH_HEIGHT = 600; private static final int AXIS_WIDTH = 500; private static final int AXIS_HEIGHT = 500; private static final int AXIS_OFFSET_X = 50; private static final int AXIS_OFFSET_Y = 50; private static final int AXIS_EXTENSION = 5; private static final String[] CLASSES = { "RED", "BLUE" }; private static final Float[][] COLOURS = { RGBColour.RED, RGBColour.BLUE }; private VideoCaptureComponent vc; private ColourSpace colourSpace = ColourSpace.HS; private JTextField featureField; private MBFImage image; private ImageComponent imageComp; private BufferedImage bimg; private JComboBox<String> classType; private double[] lastMean; private JTextField guess; private volatile List<double[]> points; private volatile List<Integer> classes; private volatile SimplePerceptron classifier;
double function(double x) { return (w[0] + w[1] * x) / -w[2]; } } private static final int POINT_SIZE = 10; private static final int VIDEO_HEIGHT = 240; private static final int VIDEO_WIDTH = 320; private static final int GRAPH_WIDTH = 600; private static final int GRAPH_HEIGHT = 600; private static final int AXIS_WIDTH = 500; private static final int AXIS_HEIGHT = 500; private static final int AXIS_OFFSET_X = 50; private static final int AXIS_OFFSET_Y = 50; private static final int AXIS_EXTENSION = 5; private static final String[] CLASSES = { "RED", "BLUE" }; private static final Float[][] COLOURS = { RGBColour.RED, RGBColour.BLUE }; private VideoCaptureComponent vc; private ColourSpace colourSpace = ColourSpace.HS; private JTextField featureField; private MBFImage image; private ImageComponent imageComp; private BufferedImage bimg; private JComboBox<String> classType; private double[] lastMean; private JTextField guess; private volatile List<double[]> points; private volatile List<Integer> classes; private volatile SimplePerceptron classifier;
/** * Compute y-ordinate of a point on the hyperplane given the x-ordinate * * @param x * the x-ordinate * @return the y-ordinate */
Compute y-ordinate of a point on the hyperplane given the x-ordinate
computeHyperplanePoint
{ "repo_name": "jonhare/WAIS-ML101", "path": "slides-app/src/main/java/uk/ac/soton/ecs/jsh2/ml101/LinearClassifierDemo.java", "license": "apache-2.0", "size": 12840 }
[ "java.awt.image.BufferedImage", "java.util.List", "javax.swing.JComboBox", "javax.swing.JTextField", "org.openimaj.image.DisplayUtilities", "org.openimaj.image.MBFImage", "org.openimaj.image.colour.ColourSpace", "org.openimaj.image.colour.RGBColour", "uk.ac.soton.ecs.jsh2.ml101.utils.VideoCaptureComponent" ]
import java.awt.image.BufferedImage; import java.util.List; import javax.swing.JComboBox; import javax.swing.JTextField; import org.openimaj.image.DisplayUtilities; import org.openimaj.image.MBFImage; import org.openimaj.image.colour.ColourSpace; import org.openimaj.image.colour.RGBColour; import uk.ac.soton.ecs.jsh2.ml101.utils.VideoCaptureComponent;
import java.awt.image.*; import java.util.*; import javax.swing.*; import org.openimaj.image.*; import org.openimaj.image.colour.*; import uk.ac.soton.ecs.jsh2.ml101.utils.*;
[ "java.awt", "java.util", "javax.swing", "org.openimaj.image", "uk.ac.soton" ]
java.awt; java.util; javax.swing; org.openimaj.image; uk.ac.soton;
2,449,174
conf = new OzoneConfiguration(); path = GenericTestUtils .getTempPath(TestContainerStateMachineFailures.class.getSimpleName()); File baseDir = new File(path); baseDir.mkdirs(); conf.setTimeDuration(HDDS_CONTAINER_REPORT_INTERVAL, 200, TimeUnit.MILLISECONDS); conf.setTimeDuration(HDDS_COMMAND_STATUS_REPORT_INTERVAL, 200, TimeUnit.MILLISECONDS); conf.setTimeDuration(HDDS_SCM_WATCHER_TIMEOUT, 1000, TimeUnit.MILLISECONDS); conf.setTimeDuration(OZONE_SCM_STALENODE_INTERVAL, 3, TimeUnit.SECONDS); conf.setQuietMode(false); cluster = MiniOzoneCluster.newBuilder(conf).setNumDatanodes(1).setHbInterval(200) .build(); cluster.waitForClusterToBeReady(); //the easiest way to create an open container is creating a key client = OzoneClientFactory.getClient(conf); objectStore = client.getObjectStore(); volumeName = "testcontainerstatemachinefailures"; bucketName = volumeName; objectStore.createVolume(volumeName); objectStore.getVolume(volumeName).createBucket(bucketName); }
conf = new OzoneConfiguration(); path = GenericTestUtils .getTempPath(TestContainerStateMachineFailures.class.getSimpleName()); File baseDir = new File(path); baseDir.mkdirs(); conf.setTimeDuration(HDDS_CONTAINER_REPORT_INTERVAL, 200, TimeUnit.MILLISECONDS); conf.setTimeDuration(HDDS_COMMAND_STATUS_REPORT_INTERVAL, 200, TimeUnit.MILLISECONDS); conf.setTimeDuration(HDDS_SCM_WATCHER_TIMEOUT, 1000, TimeUnit.MILLISECONDS); conf.setTimeDuration(OZONE_SCM_STALENODE_INTERVAL, 3, TimeUnit.SECONDS); conf.setQuietMode(false); cluster = MiniOzoneCluster.newBuilder(conf).setNumDatanodes(1).setHbInterval(200) .build(); cluster.waitForClusterToBeReady(); client = OzoneClientFactory.getClient(conf); objectStore = client.getObjectStore(); volumeName = STR; bucketName = volumeName; objectStore.createVolume(volumeName); objectStore.getVolume(volumeName).createBucket(bucketName); }
/** * Create a MiniDFSCluster for testing. * * @throws IOException */
Create a MiniDFSCluster for testing
init
{ "repo_name": "littlezhou/hadoop", "path": "hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/client/rpc/TestContainerStateMachineFailures.java", "license": "apache-2.0", "size": 5759 }
[ "java.io.File", "java.util.concurrent.TimeUnit", "org.apache.hadoop.hdds.conf.OzoneConfiguration", "org.apache.hadoop.ozone.MiniOzoneCluster", "org.apache.hadoop.ozone.client.OzoneClientFactory", "org.apache.hadoop.test.GenericTestUtils" ]
import java.io.File; import java.util.concurrent.TimeUnit; import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.ozone.MiniOzoneCluster; import org.apache.hadoop.ozone.client.OzoneClientFactory; import org.apache.hadoop.test.GenericTestUtils;
import java.io.*; import java.util.concurrent.*; import org.apache.hadoop.hdds.conf.*; import org.apache.hadoop.ozone.*; import org.apache.hadoop.ozone.client.*; import org.apache.hadoop.test.*;
[ "java.io", "java.util", "org.apache.hadoop" ]
java.io; java.util; org.apache.hadoop;
2,485,806
public CreateThumbnailJobResponse createThumbnailJob(String pipelineName, String presetName, String sourceKey, String targetKeyPrefix, Area delogoArea, Area crop) { ThumbnailSource source = new ThumbnailSource(); source.setKey(sourceKey); ThumbnailTarget target = new ThumbnailTarget(); target.setKeyPrefix(targetKeyPrefix); CreateThumbnailJobRequest request = new CreateThumbnailJobRequest().withPipelineName(pipelineName).withPresetName(presetName) .withSource(source).withTarget(target).withDelogoArea(delogoArea).withCrop(crop); return createThumbnailJob(request); }
CreateThumbnailJobResponse function(String pipelineName, String presetName, String sourceKey, String targetKeyPrefix, Area delogoArea, Area crop) { ThumbnailSource source = new ThumbnailSource(); source.setKey(sourceKey); ThumbnailTarget target = new ThumbnailTarget(); target.setKeyPrefix(targetKeyPrefix); CreateThumbnailJobRequest request = new CreateThumbnailJobRequest().withPipelineName(pipelineName).withPresetName(presetName) .withSource(source).withTarget(target).withDelogoArea(delogoArea).withCrop(crop); return createThumbnailJob(request); }
/** * Creates a thumbnail job and return job ID. * * @param pipelineName The name of a pipeline. * @param presetName The name of a thumbnail preset. * @param sourceKey The key of source object. * @param targetKeyPrefix The property container of target object. * @param delogoArea The property container of delogo Area. * @param crop The property container of crop Area. * * @return the unique ID of the new thumbnail job. */
Creates a thumbnail job and return job ID
createThumbnailJob
{ "repo_name": "baidubce/bce-sdk-java", "path": "src/main/java/com/baidubce/services/media/MediaClient.java", "license": "apache-2.0", "size": 91398 }
[ "com.baidubce.services.media.model.Area", "com.baidubce.services.media.model.CreateThumbnailJobRequest", "com.baidubce.services.media.model.CreateThumbnailJobResponse", "com.baidubce.services.media.model.ThumbnailSource", "com.baidubce.services.media.model.ThumbnailTarget" ]
import com.baidubce.services.media.model.Area; import com.baidubce.services.media.model.CreateThumbnailJobRequest; import com.baidubce.services.media.model.CreateThumbnailJobResponse; import com.baidubce.services.media.model.ThumbnailSource; import com.baidubce.services.media.model.ThumbnailTarget;
import com.baidubce.services.media.model.*;
[ "com.baidubce.services" ]
com.baidubce.services;
1,847,330
public void forceKeyspaceCleanup(String keyspaceName, String... columnFamilies) throws IOException, ExecutionException, InterruptedException;
void function(String keyspaceName, String... columnFamilies) throws IOException, ExecutionException, InterruptedException;
/** * Trigger a cleanup of keys on a single keyspace */
Trigger a cleanup of keys on a single keyspace
forceKeyspaceCleanup
{ "repo_name": "lalithsuresh/cassandra-c3", "path": "src/java/org/apache/cassandra/service/StorageServiceMBean.java", "license": "apache-2.0", "size": 20752 }
[ "java.io.IOException", "java.util.concurrent.ExecutionException" ]
import java.io.IOException; import java.util.concurrent.ExecutionException;
import java.io.*; import java.util.concurrent.*;
[ "java.io", "java.util" ]
java.io; java.util;
657,084
@Test public void verifyFT36() throws IOException, InterruptedException { Subscriber subscriber = subscriberDataService.create(new Subscriber( 3000000000L)); Subscription subscription1 = subscriptionService.createSubscription( subscriber.getCallingNumber(), rh.hindiLanguage(), sh.childPack(), SubscriptionOrigin.IVR); Subscription subscription2 = subscriptionService.createSubscription( subscriber.getCallingNumber(), rh.hindiLanguage(), sh.pregnancyPack(), SubscriptionOrigin.IVR); HttpPost httpPost = createInboxCallDetailsRequestHttpPost(new InboxCallDetailsRequest( 1234567890L, // callingNumber "A", // operator "AP", // circle 123456789012345L, // callId 123L, // callStartTime 456L, // callEndTime 123, // callDurationInPulses 1, // callStatus 1, // callDisconnectReason new HashSet<>(Arrays.asList( new CallDataRequest( subscription1.getSubscriptionId(), // subscriptionId sh.childPack().getName(), // subscriptionPack "123", // inboxWeekId "foo", // contentFileName 123L, // startTime 456L), // endTime new CallDataRequest( subscription2.getSubscriptionId(), // subscriptionId "12WeeksPack", // Invalid subscriptionPack "123", // inboxWeekId "foo", // contentFileName 123L, // startTime 456L) // endTime )))); // content String expectedJsonResponse = createFailureResponseJson("<subscriptionPack: Invalid><content: Invalid>"); assertTrue(SimpleHttpClient.execHttpRequest(httpPost, HttpStatus.SC_BAD_REQUEST, expectedJsonResponse, ADMIN_USERNAME, ADMIN_PASSWORD)); }
void function() throws IOException, InterruptedException { Subscriber subscriber = subscriberDataService.create(new Subscriber( 3000000000L)); Subscription subscription1 = subscriptionService.createSubscription( subscriber.getCallingNumber(), rh.hindiLanguage(), sh.childPack(), SubscriptionOrigin.IVR); Subscription subscription2 = subscriptionService.createSubscription( subscriber.getCallingNumber(), rh.hindiLanguage(), sh.pregnancyPack(), SubscriptionOrigin.IVR); HttpPost httpPost = createInboxCallDetailsRequestHttpPost(new InboxCallDetailsRequest( 1234567890L, "A", "AP", 123456789012345L, 123L, 456L, 123, 1, 1, new HashSet<>(Arrays.asList( new CallDataRequest( subscription1.getSubscriptionId(), sh.childPack().getName(), "123", "foo", 123L, 456L), new CallDataRequest( subscription2.getSubscriptionId(), STR, "123", "foo", 123L, 456L) )))); String expectedJsonResponse = createFailureResponseJson(STR); assertTrue(SimpleHttpClient.execHttpRequest(httpPost, HttpStatus.SC_BAD_REQUEST, expectedJsonResponse, ADMIN_USERNAME, ADMIN_PASSWORD)); }
/** * To verify the behavior of Save Inbox call Details API if provided beneficiary's subscriptionPack does not exist. */
To verify the behavior of Save Inbox call Details API if provided beneficiary's subscriptionPack does not exist
verifyFT36
{ "repo_name": "ngraczewski/mim", "path": "testing/src/test/java/org/motechproject/nms/testing/it/api/KilkariControllerBundleIT.java", "license": "bsd-3-clause", "size": 193191 }
[ "java.io.IOException", "java.util.Arrays", "java.util.HashSet", "org.apache.commons.httpclient.HttpStatus", "org.apache.http.client.methods.HttpPost", "org.junit.Assert", "org.motechproject.nms.api.web.contract.kilkari.CallDataRequest", "org.motechproject.nms.api.web.contract.kilkari.InboxCallDetailsRequest", "org.motechproject.nms.kilkari.domain.Subscriber", "org.motechproject.nms.kilkari.domain.Subscription", "org.motechproject.nms.kilkari.domain.SubscriptionOrigin", "org.motechproject.testing.osgi.http.SimpleHttpClient" ]
import java.io.IOException; import java.util.Arrays; import java.util.HashSet; import org.apache.commons.httpclient.HttpStatus; import org.apache.http.client.methods.HttpPost; import org.junit.Assert; import org.motechproject.nms.api.web.contract.kilkari.CallDataRequest; import org.motechproject.nms.api.web.contract.kilkari.InboxCallDetailsRequest; import org.motechproject.nms.kilkari.domain.Subscriber; import org.motechproject.nms.kilkari.domain.Subscription; import org.motechproject.nms.kilkari.domain.SubscriptionOrigin; import org.motechproject.testing.osgi.http.SimpleHttpClient;
import java.io.*; import java.util.*; import org.apache.commons.httpclient.*; import org.apache.http.client.methods.*; import org.junit.*; import org.motechproject.nms.api.web.contract.kilkari.*; import org.motechproject.nms.kilkari.domain.*; import org.motechproject.testing.osgi.http.*;
[ "java.io", "java.util", "org.apache.commons", "org.apache.http", "org.junit", "org.motechproject.nms", "org.motechproject.testing" ]
java.io; java.util; org.apache.commons; org.apache.http; org.junit; org.motechproject.nms; org.motechproject.testing;
2,469,260
private boolean validTypeOfVarExpression(Node expr) { // The expression must have two children: // - The typeOfVar keyword // - A string if (!checkParameterCount(expr, Keywords.TYPEOFVAR)) { return false; } if (!getCallArgument(expr, 0).isString()) { warnInvalid("name", expr); warnInvalidInside(Keywords.TYPEOFVAR.name, expr); return false; } return true; }
boolean function(Node expr) { if (!checkParameterCount(expr, Keywords.TYPEOFVAR)) { return false; } if (!getCallArgument(expr, 0).isString()) { warnInvalid("name", expr); warnInvalidInside(Keywords.TYPEOFVAR.name, expr); return false; } return true; }
/** * A typeOfVar expression must be of the form typeOfVar('name') */
A typeOfVar expression must be of the form typeOfVar('name')
validTypeOfVarExpression
{ "repo_name": "weiwl/closure-compiler", "path": "src/com/google/javascript/jscomp/parsing/TypeTransformationParser.java", "license": "apache-2.0", "size": 27576 }
[ "com.google.javascript.rhino.Node" ]
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.*;
[ "com.google.javascript" ]
com.google.javascript;
2,864,066
public static void notNegative(int value, String argumentName) throws KuraException { if (value < 0) { throw new KuraException(KuraErrorCode.CONFIGURATION_REQUIRED_ATTRIBUTE_MISSING, argumentName); } }
static void function(int value, String argumentName) throws KuraException { if (value < 0) { throw new KuraException(KuraErrorCode.CONFIGURATION_REQUIRED_ATTRIBUTE_MISSING, argumentName); } }
/** * Throws an EdcIllegalNullArgumentException if the value for the specified argument is null. * * @param obj * @param argumentName * @throws EdcIllegalNullArgumentException */
Throws an EdcIllegalNullArgumentException if the value for the specified argument is null
notNegative
{ "repo_name": "unverbraucht/kura", "path": "kura/org.eclipse.kura.core/src/main/java/org/eclipse/kura/core/util/ValidationUtil.java", "license": "epl-1.0", "size": 2768 }
[ "org.eclipse.kura.KuraErrorCode", "org.eclipse.kura.KuraException" ]
import org.eclipse.kura.KuraErrorCode; import org.eclipse.kura.KuraException;
import org.eclipse.kura.*;
[ "org.eclipse.kura" ]
org.eclipse.kura;
2,686,706
@Optional Property<String> repositoryId();
Property<String> repositoryId();
/** * The ID of the Repository at the remote Sesame HTTP host. * * Default: polygene * * @return The configured ID at the remote Sesame HTTP host. */
The ID of the Repository at the remote Sesame HTTP host. Default: polygene
repositoryId
{ "repo_name": "Qi4j/qi4j-sdk", "path": "libraries/rdf/src/main/java/org/apache/polygene/library/rdf/repository/HttpRepositoryConfiguration.java", "license": "apache-2.0", "size": 1542 }
[ "org.apache.polygene.api.property.Property" ]
import org.apache.polygene.api.property.Property;
import org.apache.polygene.api.property.*;
[ "org.apache.polygene" ]
org.apache.polygene;
1,022,137
@Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View v = inflater.inflate(R.layout.fragment_pager_list, container, false); View tv = v.findViewById(R.id.text); ((TextView)tv).setText("Fragment #" + mNum); return v; }
View function(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View v = inflater.inflate(R.layout.fragment_pager_list, container, false); View tv = v.findViewById(R.id.text); ((TextView)tv).setText(STR + mNum); return v; }
/** * The Fragment's UI is just a simple text view showing its * instance number. */
The Fragment's UI is just a simple text view showing its instance number
onCreateView
{ "repo_name": "AndroidX/androidx", "path": "samples/Support4Demos/src/main/java/com/example/android/supportv4/app/FragmentPagerSupport.java", "license": "apache-2.0", "size": 4724 }
[ "android.os.Bundle", "android.view.LayoutInflater", "android.view.View", "android.view.ViewGroup", "android.widget.TextView" ]
import android.os.Bundle; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.TextView;
import android.os.*; import android.view.*; import android.widget.*;
[ "android.os", "android.view", "android.widget" ]
android.os; android.view; android.widget;
1,251,312
public StatisticByDomainPersistence getStatisticByDomainPersistence() { return statisticByDomainPersistence; }
StatisticByDomainPersistence function() { return statisticByDomainPersistence; }
/** * Returns the statistic by domain persistence. * * @return the statistic by domain persistence */
Returns the statistic by domain persistence
getStatisticByDomainPersistence
{ "repo_name": "openegovplatform/OEPv2", "path": "oep-dossier-portlet/docroot/WEB-INF/src/org/oep/dossiermgt/service/base/DossierTagServiceBaseImpl.java", "license": "apache-2.0", "size": 49313 }
[ "org.oep.dossiermgt.service.persistence.StatisticByDomainPersistence" ]
import org.oep.dossiermgt.service.persistence.StatisticByDomainPersistence;
import org.oep.dossiermgt.service.persistence.*;
[ "org.oep.dossiermgt" ]
org.oep.dossiermgt;
112,508
@Test public void testCoveredStartKey() throws Exception { String table = "tableCoveredStartKey"; try { setupTable(table); assertEquals(ROWKEYS.length, countRows()); // Mess it up by creating an overlap in the metadata HRegionInfo hriOverlap = createRegion(conf, tbl.getTableDescriptor(), Bytes.toBytes("A2"), Bytes.toBytes("B2")); TEST_UTIL.getHBaseCluster().getMaster().assignRegion(hriOverlap); TEST_UTIL.getHBaseCluster().getMaster().getAssignmentManager() .waitForAssignment(hriOverlap); HBaseFsck hbck = doFsck(conf, false); assertErrors(hbck, new ERROR_CODE[] { ERROR_CODE.OVERLAP_IN_REGION_CHAIN, ERROR_CODE.OVERLAP_IN_REGION_CHAIN }); assertEquals(3, hbck.getOverlapGroups(table).size()); assertEquals(ROWKEYS.length, countRows()); // fix the problem. doFsck(conf, true); // verify that overlaps are fixed HBaseFsck hbck2 = doFsck(conf, false); assertErrors(hbck2, new ERROR_CODE[0]); assertEquals(0, hbck2.getOverlapGroups(table).size()); assertEquals(ROWKEYS.length, countRows()); } finally { deleteTable(table); } }
void function() throws Exception { String table = STR; try { setupTable(table); assertEquals(ROWKEYS.length, countRows()); HRegionInfo hriOverlap = createRegion(conf, tbl.getTableDescriptor(), Bytes.toBytes("A2"), Bytes.toBytes("B2")); TEST_UTIL.getHBaseCluster().getMaster().assignRegion(hriOverlap); TEST_UTIL.getHBaseCluster().getMaster().getAssignmentManager() .waitForAssignment(hriOverlap); HBaseFsck hbck = doFsck(conf, false); assertErrors(hbck, new ERROR_CODE[] { ERROR_CODE.OVERLAP_IN_REGION_CHAIN, ERROR_CODE.OVERLAP_IN_REGION_CHAIN }); assertEquals(3, hbck.getOverlapGroups(table).size()); assertEquals(ROWKEYS.length, countRows()); doFsck(conf, true); HBaseFsck hbck2 = doFsck(conf, false); assertErrors(hbck2, new ERROR_CODE[0]); assertEquals(0, hbck2.getOverlapGroups(table).size()); assertEquals(ROWKEYS.length, countRows()); } finally { deleteTable(table); } }
/** * This creates and fixes a bad table where a region overlaps two regions -- * a start key contained in another region and its end key is contained in * yet another region. */
This creates and fixes a bad table where a region overlaps two regions -- a start key contained in another region and its end key is contained in yet another region
testCoveredStartKey
{ "repo_name": "ay65535/hbase-0.94.0", "path": "src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsck.java", "license": "apache-2.0", "size": 33952 }
[ "org.apache.hadoop.hbase.HRegionInfo", "org.apache.hadoop.hbase.util.hbck.HbckTestingUtil", "org.junit.Assert" ]
import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.util.hbck.HbckTestingUtil; import org.junit.Assert;
import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.util.hbck.*; import org.junit.*;
[ "org.apache.hadoop", "org.junit" ]
org.apache.hadoop; org.junit;
353,693
private void createDummySSLSocketFactory() throws NotificationServiceException { try { SSLContext sslContext = SSLContext.getInstance(DEFAULT_SSL_PROTOCOL); sslContext.init(null, new TrustManager[] { new X509TrustManager() {
void function() throws NotificationServiceException { try { SSLContext sslContext = SSLContext.getInstance(DEFAULT_SSL_PROTOCOL); sslContext.init(null, new TrustManager[] { new X509TrustManager() {
/** * Creates dummy SSL Socket Factory factory which should be used by setting 'true' to * {@code NotificationProperties.SSL_IGNORE_CERTIFICATE_ERRORS}. * @throws NotificationServiceException */
Creates dummy SSL Socket Factory factory which should be used by setting 'true' to NotificationProperties.SSL_IGNORE_CERTIFICATE_ERRORS
createDummySSLSocketFactory
{ "repo_name": "Dhandapani/gluster-ovirt", "path": "backend/manager/tools/engine-notifier/engine-notifier-service/src/main/java/org/ovirt/engine/core/notifier/EngineMonitorService.java", "license": "apache-2.0", "size": 23318 }
[ "javax.net.ssl.SSLContext", "javax.net.ssl.TrustManager", "javax.net.ssl.X509TrustManager" ]
import javax.net.ssl.SSLContext; import javax.net.ssl.TrustManager; import javax.net.ssl.X509TrustManager;
import javax.net.ssl.*;
[ "javax.net" ]
javax.net;
1,883,523
public List<AbstractFilePatchInProgress> execute(@NotNull final List<? extends FilePatch> list, boolean useProjectRootAsPredefinedBase) { final PatchBaseDirectoryDetector directoryDetector = PatchBaseDirectoryDetector.getInstance(myProject); myUseProjectRootAsPredefinedBase = useProjectRootAsPredefinedBase; final List<PatchAndVariants> candidates = new ArrayList<>(list.size()); final List<FilePatch> newOrWithoutMatches = new ArrayList<>(); findCandidates(list, directoryDetector, candidates, newOrWithoutMatches); final MultiMap<VirtualFile, AbstractFilePatchInProgress> result = new MultiMap<>(); // process exact matches: if one, leave and extract. if several - leave only them filterExactMatches(candidates, result); // partially check by context selectByContextOrByStrip(candidates, result); // for text only // created or no variants workWithNotExisting(directoryDetector, newOrWithoutMatches, result); return new ArrayList<>(result.values()); }
List<AbstractFilePatchInProgress> function(@NotNull final List<? extends FilePatch> list, boolean useProjectRootAsPredefinedBase) { final PatchBaseDirectoryDetector directoryDetector = PatchBaseDirectoryDetector.getInstance(myProject); myUseProjectRootAsPredefinedBase = useProjectRootAsPredefinedBase; final List<PatchAndVariants> candidates = new ArrayList<>(list.size()); final List<FilePatch> newOrWithoutMatches = new ArrayList<>(); findCandidates(list, directoryDetector, candidates, newOrWithoutMatches); final MultiMap<VirtualFile, AbstractFilePatchInProgress> result = new MultiMap<>(); filterExactMatches(candidates, result); selectByContextOrByStrip(candidates, result); workWithNotExisting(directoryDetector, newOrWithoutMatches, result); return new ArrayList<>(result.values()); }
/** * Find the best matched bases for file patches; e.g. Unshelve has to use project dir as best base by default, * while Apply patch should process through context, because it may have been created outside IDE for a certain vcs root * * @param list * @param useProjectRootAsPredefinedBase if true then we use project dir as default base despite context matching * @return */
Find the best matched bases for file patches; e.g. Unshelve has to use project dir as best base by default, while Apply patch should process through context, because it may have been created outside IDE for a certain vcs root
execute
{ "repo_name": "jk1/intellij-community", "path": "platform/vcs-impl/src/com/intellij/openapi/vcs/changes/patch/MatchPatchPaths.java", "license": "apache-2.0", "size": 15707 }
[ "com.intellij.openapi.diff.impl.patch.FilePatch", "com.intellij.openapi.vfs.VirtualFile", "com.intellij.util.containers.MultiMap", "java.util.ArrayList", "java.util.List", "org.jetbrains.annotations.NotNull" ]
import com.intellij.openapi.diff.impl.patch.FilePatch; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.util.containers.MultiMap; import java.util.ArrayList; import java.util.List; import org.jetbrains.annotations.NotNull;
import com.intellij.openapi.diff.impl.patch.*; import com.intellij.openapi.vfs.*; import com.intellij.util.containers.*; import java.util.*; import org.jetbrains.annotations.*;
[ "com.intellij.openapi", "com.intellij.util", "java.util", "org.jetbrains.annotations" ]
com.intellij.openapi; com.intellij.util; java.util; org.jetbrains.annotations;
1,026,254
public GridSwapSpaceManager swap();
GridSwapSpaceManager function();
/** * Gets swap space manager. * * @return Swap space manager. */
Gets swap space manager
swap
{ "repo_name": "dlnufox/ignite", "path": "modules/core/src/main/java/org/apache/ignite/internal/GridKernalContext.java", "license": "apache-2.0", "size": 15723 }
[ "org.apache.ignite.internal.managers.swapspace.GridSwapSpaceManager" ]
import org.apache.ignite.internal.managers.swapspace.GridSwapSpaceManager;
import org.apache.ignite.internal.managers.swapspace.*;
[ "org.apache.ignite" ]
org.apache.ignite;
2,094,426
public static boolean isLastHttpContent(HttpContent httpContent) { return httpContent instanceof LastHttpContent; }
static boolean function(HttpContent httpContent) { return httpContent instanceof LastHttpContent; }
/** * Check if a given content is last httpContent. * * @param httpContent new content. * @return true or false. */
Check if a given content is last httpContent
isLastHttpContent
{ "repo_name": "wso2/carbon-transports", "path": "http/org.wso2.carbon.transport.http.netty/src/main/java/org/wso2/carbon/transport/http/netty/common/Util.java", "license": "apache-2.0", "size": 22428 }
[ "io.netty.handler.codec.http.HttpContent", "io.netty.handler.codec.http.LastHttpContent" ]
import io.netty.handler.codec.http.HttpContent; import io.netty.handler.codec.http.LastHttpContent;
import io.netty.handler.codec.http.*;
[ "io.netty.handler" ]
io.netty.handler;
2,682,237
public Builder addPaths( @CompileTimeConstant String arg, @Nullable NestedSet<PathFragment> values) { return addNestedSetInternal(arg, values); }
Builder function( @CompileTimeConstant String arg, @Nullable NestedSet<PathFragment> values) { return addNestedSetInternal(arg, values); }
/** * Adds the arg followed by the path fragments. * * <p>If values is empty, the arg isn't added. */
Adds the arg followed by the path fragments. If values is empty, the arg isn't added
addPaths
{ "repo_name": "dslomov/bazel", "path": "src/main/java/com/google/devtools/build/lib/analysis/actions/CustomCommandLine.java", "license": "apache-2.0", "size": 49236 }
[ "com.google.devtools.build.lib.collect.nestedset.NestedSet", "com.google.devtools.build.lib.vfs.PathFragment", "com.google.errorprone.annotations.CompileTimeConstant", "javax.annotation.Nullable" ]
import com.google.devtools.build.lib.collect.nestedset.NestedSet; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.errorprone.annotations.CompileTimeConstant; import javax.annotation.Nullable;
import com.google.devtools.build.lib.collect.nestedset.*; import com.google.devtools.build.lib.vfs.*; import com.google.errorprone.annotations.*; import javax.annotation.*;
[ "com.google.devtools", "com.google.errorprone", "javax.annotation" ]
com.google.devtools; com.google.errorprone; javax.annotation;
2,911,323
public String quickStream( String filename, boolean toLoop, SimpleVector jPCTposition ) { return quickStream( filename, toLoop, jPCTposition, SoundSystemConfig.getDefaultAttenuation() ); }
String function( String filename, boolean toLoop, SimpleVector jPCTposition ) { return quickStream( filename, toLoop, jPCTposition, SoundSystemConfig.getDefaultAttenuation() ); }
/** * Creates a temporary non-priority source and streams it. Default values are * used for attenuation. After the source finishes playing, it is removed. * Returns a randomly generated name for the new source. NOTE: to make a * source created by this method permanant, call the setActive() method using * the return value for sourcename. * @param filename The name of the sound file to play at this source. * @param toLoop Should this source loop, or play only once. * @param jPCTposition SimpleVector containing jPCT coordinates. * @return The new sorce's name. */
Creates a temporary non-priority source and streams it. Default values are used for attenuation. After the source finishes playing, it is removed. source created by this method permanant, call the setActive() method using the return value for sourcename
quickStream
{ "repo_name": "rekh127/Catacomb-Snatch-Reloaded", "path": "paulscodesrc/paulscode/sound/SoundSystemJPCT.java", "license": "mit", "size": 154879 }
[ "com.threed.jpct.SimpleVector" ]
import com.threed.jpct.SimpleVector;
import com.threed.jpct.*;
[ "com.threed.jpct" ]
com.threed.jpct;
2,032,521
@Override public Adapter createTimedEdgeAdapter() { if (timedEdgeItemProvider == null) { timedEdgeItemProvider = new TimedEdgeItemProvider(this); } return timedEdgeItemProvider; } protected EventGuardedEdgeItemProvider eventGuardedEdgeItemProvider;
Adapter function() { if (timedEdgeItemProvider == null) { timedEdgeItemProvider = new TimedEdgeItemProvider(this); } return timedEdgeItemProvider; } protected EventGuardedEdgeItemProvider eventGuardedEdgeItemProvider;
/** * This creates an adapter for a {@link org.tud.inf.st.mbt.ocm.TimedEdge}. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */
This creates an adapter for a <code>org.tud.inf.st.mbt.ocm.TimedEdge</code>.
createTimedEdgeAdapter
{ "repo_name": "paetti1988/qmate", "path": "MATE/org.tud.inf.st.mbt.emf.edit/src-gen/org/tud/inf/st/mbt/ocm/provider/OcmItemProviderAdapterFactory.java", "license": "apache-2.0", "size": 9077 }
[ "org.eclipse.emf.common.notify.Adapter" ]
import org.eclipse.emf.common.notify.Adapter;
import org.eclipse.emf.common.notify.*;
[ "org.eclipse.emf" ]
org.eclipse.emf;
1,361,527
public boolean hasStaticMethod(String methodName) { List<MethodInfo> methods = getOperations(methodName); if (methods == null || methods.isEmpty()) { return false; } for (MethodInfo method : methods) { if (method.isStaticMethod()) { return true; } } return false; }
boolean function(String methodName) { List<MethodInfo> methods = getOperations(methodName); if (methods == null methods.isEmpty()) { return false; } for (MethodInfo method : methods) { if (method.isStaticMethod()) { return true; } } return false; }
/** * Do we have a static method with the given name. * <p/> * Shorthand method names for getters is supported, so you can pass in eg 'name' and Camel * will can find the real 'getName' method instead. * * @param methodName the method name * @return <tt>true</tt> if we have such a static method. */
Do we have a static method with the given name. Shorthand method names for getters is supported, so you can pass in eg 'name' and Camel will can find the real 'getName' method instead
hasStaticMethod
{ "repo_name": "CodeSmell/camel", "path": "components/camel-bean/src/main/java/org/apache/camel/component/bean/BeanInfo.java", "license": "apache-2.0", "size": 57994 }
[ "java.util.List" ]
import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
1,516,807
protected static String printException(final Throwable throwable) { if (null == throwable) { return null; } final ByteArrayOutputStream baos = new ByteArrayOutputStream(); final PrintStream printStream = new PrintStream(baos); throwable.printStackTrace(printStream); String exceptionStr = ""; try { exceptionStr = baos.toString("UTF-8"); } catch (Exception ex) { exceptionStr = "Unavailable"; } finally { closeStream(printStream); closeStream(baos); } return exceptionStr; }
static String function(final Throwable throwable) { if (null == throwable) { return null; } final ByteArrayOutputStream baos = new ByteArrayOutputStream(); final PrintStream printStream = new PrintStream(baos); throwable.printStackTrace(printStream); String exceptionStr = STRUTF-8STRUnavailable"; } finally { closeStream(printStream); closeStream(baos); } return exceptionStr; }
/** * prints a exception into a string * @param throwable * @return */
prints a exception into a string
printException
{ "repo_name": "andrehertwig/admintool", "path": "admin-tools-dbbrowser/src/main/java/de/chandre/admintool/db/AdminToolDBBrowserServiceImpl.java", "license": "mit", "size": 14806 }
[ "java.io.ByteArrayOutputStream", "java.io.PrintStream" ]
import java.io.ByteArrayOutputStream; import java.io.PrintStream;
import java.io.*;
[ "java.io" ]
java.io;
2,838,040
public List<SQLStatement> getSQLStatements(Repository repository, ProgressMonitorListener monitor) throws KettleException { if (monitor != null) monitor.beginTask(Messages.getString("JobMeta.Monitor.GettingSQLNeededForThisJob"), nrJobEntries() + 1); //$NON-NLS-1$ List<SQLStatement> stats = new ArrayList<SQLStatement>(); for (int i = 0; i < nrJobEntries(); i++) { JobEntryCopy copy = getJobEntry(i); if (monitor != null) monitor.subTask(Messages.getString("JobMeta.Monitor.GettingSQLForJobEntryCopy") + copy + "]"); //$NON-NLS-1$ //$NON-NLS-2$ List<SQLStatement> list = copy.getEntry().getSQLStatements(repository, this); stats.addAll(list); if (monitor != null) monitor.worked(1); } // Also check the sql for the logtable... if (monitor != null) monitor.subTask(Messages.getString("JobMeta.Monitor.GettingSQLStatementsForJobLogTables")); //$NON-NLS-1$ if (logConnection != null && logTable != null && logTable.length() > 0) { Database db = new Database(logConnection); try { db.connect(); RowMetaInterface fields = Database.getJobLogrecordFields(false, useBatchId, logfieldUsed); String sql = db.getDDL(logTable, fields); if (sql != null && sql.length() > 0) { SQLStatement stat = new SQLStatement(Messages.getString("JobMeta.SQLFeedback.ThisJob"), logConnection, sql); //$NON-NLS-1$ stats.add(stat); } } catch (KettleDatabaseException dbe) { SQLStatement stat = new SQLStatement(Messages.getString("JobMeta.SQLFeedback.ThisJob"), logConnection, null); //$NON-NLS-1$ stat.setError(Messages.getString("JobMeta.SQLFeedback.ErrorObtainingJobLogTableInfo") + dbe.getMessage()); //$NON-NLS-1$ stats.add(stat); } finally { db.disconnect(); } } if (monitor != null) monitor.worked(1); if (monitor != null) monitor.done(); return stats; }
List<SQLStatement> function(Repository repository, ProgressMonitorListener monitor) throws KettleException { if (monitor != null) monitor.beginTask(Messages.getString(STR), nrJobEntries() + 1); List<SQLStatement> stats = new ArrayList<SQLStatement>(); for (int i = 0; i < nrJobEntries(); i++) { JobEntryCopy copy = getJobEntry(i); if (monitor != null) monitor.subTask(Messages.getString(STR) + copy + "]"); List<SQLStatement> list = copy.getEntry().getSQLStatements(repository, this); stats.addAll(list); if (monitor != null) monitor.worked(1); } if (monitor != null) monitor.subTask(Messages.getString(STR)); if (logConnection != null && logTable != null && logTable.length() > 0) { Database db = new Database(logConnection); try { db.connect(); RowMetaInterface fields = Database.getJobLogrecordFields(false, useBatchId, logfieldUsed); String sql = db.getDDL(logTable, fields); if (sql != null && sql.length() > 0) { SQLStatement stat = new SQLStatement(Messages.getString(STR), logConnection, sql); stats.add(stat); } } catch (KettleDatabaseException dbe) { SQLStatement stat = new SQLStatement(Messages.getString(STR), logConnection, null); stat.setError(Messages.getString(STR) + dbe.getMessage()); stats.add(stat); } finally { db.disconnect(); } } if (monitor != null) monitor.worked(1); if (monitor != null) monitor.done(); return stats; }
/** * Builds a list of all the SQL statements that this transformation needs in * order to work properly. * * @return An ArrayList of SQLStatement objects. */
Builds a list of all the SQL statements that this transformation needs in order to work properly
getSQLStatements
{ "repo_name": "icholy/geokettle-2.0", "path": "src/org/pentaho/di/job/JobMeta.java", "license": "lgpl-2.1", "size": 93295 }
[ "java.util.ArrayList", "java.util.List", "org.pentaho.di.core.ProgressMonitorListener", "org.pentaho.di.core.SQLStatement", "org.pentaho.di.core.database.Database", "org.pentaho.di.core.exception.KettleDatabaseException", "org.pentaho.di.core.exception.KettleException", "org.pentaho.di.core.row.RowMetaInterface", "org.pentaho.di.job.entry.JobEntryCopy", "org.pentaho.di.repository.Repository" ]
import java.util.ArrayList; import java.util.List; import org.pentaho.di.core.ProgressMonitorListener; import org.pentaho.di.core.SQLStatement; import org.pentaho.di.core.database.Database; import org.pentaho.di.core.exception.KettleDatabaseException; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.job.entry.JobEntryCopy; import org.pentaho.di.repository.Repository;
import java.util.*; import org.pentaho.di.core.*; import org.pentaho.di.core.database.*; import org.pentaho.di.core.exception.*; import org.pentaho.di.core.row.*; import org.pentaho.di.job.entry.*; import org.pentaho.di.repository.*;
[ "java.util", "org.pentaho.di" ]
java.util; org.pentaho.di;
1,238,401
public void setCustomActionButton(Drawable drawable, String description, OnClickListener listener) { }
void function(Drawable drawable, String description, OnClickListener listener) { }
/** * Sets/adds a custom action button to the {@link ToolbarLayout} if it is supported. * @param description The content description for the button. * @param listener The {@link OnClickListener} to use for clicks to the button. * @param buttonSource The {@link Bitmap} resource to use as the source for the button. */
Sets/adds a custom action button to the <code>ToolbarLayout</code> if it is supported
setCustomActionButton
{ "repo_name": "Bysmyyr/chromium-crosswalk", "path": "chrome/android/java/src/org/chromium/chrome/browser/toolbar/ToolbarLayout.java", "license": "bsd-3-clause", "size": 21183 }
[ "android.graphics.drawable.Drawable" ]
import android.graphics.drawable.Drawable;
import android.graphics.drawable.*;
[ "android.graphics" ]
android.graphics;
2,532,822
public double getUnitTotalPurchase(Composition to) { return (to.getQuantity() == 0) ? 0 : round(getTotalPurchase(to) / to.getQuantity(), PRICE_PRECISION); }
double function(Composition to) { return (to.getQuantity() == 0) ? 0 : round(getTotalPurchase(to) / to.getQuantity(), PRICE_PRECISION); }
/** * Returns the unitary total of purchase of the composition. * * @return double */
Returns the unitary total of purchase of the composition
getUnitTotalPurchase
{ "repo_name": "Esleelkartea/aonGTA", "path": "aongta_v1.0.0_src/Fuentes y JavaDoc/aon-ui-composite/src/com/code/aon/ui/composition/util/CompositionPriceProvider.java", "license": "gpl-2.0", "size": 11302 }
[ "com.code.aon.composition.Composition" ]
import com.code.aon.composition.Composition;
import com.code.aon.composition.*;
[ "com.code.aon" ]
com.code.aon;
966,928
EList<PointPropertyType> getPointRep();
EList<PointPropertyType> getPointRep();
/** * Returns the value of the '<em><b>Point Rep</b></em>' containment reference list. * The list contents are of type {@link net.opengis.gml311.PointPropertyType}. * <!-- begin-user-doc --> * <!-- end-user-doc --> * <!-- begin-model-doc --> * Deprecated with GML version 3.1.0. Use "pointProperty" instead. Included for backwards compatibility with GML 3.0.0. * <!-- end-model-doc --> * @return the value of the '<em>Point Rep</em>' containment reference list. * @see net.opengis.gml311.Gml311Package#getLinearRingType_PointRep() * @model containment="true" transient="true" volatile="true" derived="true" * extendedMetaData="kind='element' name='pointRep' namespace='##targetNamespace' group='#group:10'" * @generated */
Returns the value of the 'Point Rep' containment reference list. The list contents are of type <code>net.opengis.gml311.PointPropertyType</code>. Deprecated with GML version 3.1.0. Use "pointProperty" instead. Included for backwards compatibility with GML 3.0.0.
getPointRep
{ "repo_name": "geotools/geotools", "path": "modules/ogc/net.opengis.wmts/src/net/opengis/gml311/LinearRingType.java", "license": "lgpl-2.1", "size": 7691 }
[ "org.eclipse.emf.common.util.EList" ]
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.common.util.*;
[ "org.eclipse.emf" ]
org.eclipse.emf;
2,690,453
@ServiceMethod(returns = ReturnType.SINGLE) Mono<Response<MicrosoftGraphExtensionPropertyInner>> createExtensionPropertiesWithResponseAsync( String applicationId, MicrosoftGraphExtensionPropertyInner body);
@ServiceMethod(returns = ReturnType.SINGLE) Mono<Response<MicrosoftGraphExtensionPropertyInner>> createExtensionPropertiesWithResponseAsync( String applicationId, MicrosoftGraphExtensionPropertyInner body);
/** * Create new navigation property to extensionProperties for applications. * * @param applicationId key: id of application. * @param body New navigation property. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.resourcemanager.authorization.fluent.models.OdataErrorMainException thrown if the request is * rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return represents an Azure Active Directory object. */
Create new navigation property to extensionProperties for applications
createExtensionPropertiesWithResponseAsync
{ "repo_name": "Azure/azure-sdk-for-java", "path": "sdk/resourcemanager/azure-resourcemanager-authorization/src/main/java/com/azure/resourcemanager/authorization/fluent/ApplicationsClient.java", "license": "mit", "size": 113060 }
[ "com.azure.core.annotation.ReturnType", "com.azure.core.annotation.ServiceMethod", "com.azure.core.http.rest.Response", "com.azure.resourcemanager.authorization.fluent.models.MicrosoftGraphExtensionPropertyInner" ]
import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.http.rest.Response; import com.azure.resourcemanager.authorization.fluent.models.MicrosoftGraphExtensionPropertyInner;
import com.azure.core.annotation.*; import com.azure.core.http.rest.*; import com.azure.resourcemanager.authorization.fluent.models.*;
[ "com.azure.core", "com.azure.resourcemanager" ]
com.azure.core; com.azure.resourcemanager;
2,756,533
private String jsonStringOf(Policy policy) throws IOException { generator.writeStartObject(); writeJsonKeyValue(JsonDocumentField.VERSION, policy.getVersion()); if (isNotNull(policy.getId())) { writeJsonKeyValue(JsonDocumentField.POLICY_ID, policy.getId()); } writeJsonArrayStart(JsonDocumentField.STATEMENT); for (Statement statement : policy.getStatements()) { generator.writeStartObject(); if (isNotNull(statement.getId())) { writeJsonKeyValue(JsonDocumentField.STATEMENT_ID, statement.getId()); } writeJsonKeyValue(JsonDocumentField.STATEMENT_EFFECT, statement .getEffect().toString()); List<Principal> principals = statement.getPrincipals(); if (isNotNull(principals) && !principals.isEmpty()) { writePrincipals(principals); } List<Action> actions = statement.getActions(); if (isNotNull(actions) && !actions.isEmpty()) { writeActions(actions); } List<Resource> resources = statement.getResources(); if (isNotNull(resources) && !resources.isEmpty()) { writeResources(resources); } List<Condition> conditions = statement.getConditions(); if (isNotNull(conditions) && !conditions.isEmpty()) { writeConditions(conditions); } generator.writeEndObject(); } writeJsonArrayEnd(); generator.writeEndObject(); generator.flush(); return writer.toString(); }
String function(Policy policy) throws IOException { generator.writeStartObject(); writeJsonKeyValue(JsonDocumentField.VERSION, policy.getVersion()); if (isNotNull(policy.getId())) { writeJsonKeyValue(JsonDocumentField.POLICY_ID, policy.getId()); } writeJsonArrayStart(JsonDocumentField.STATEMENT); for (Statement statement : policy.getStatements()) { generator.writeStartObject(); if (isNotNull(statement.getId())) { writeJsonKeyValue(JsonDocumentField.STATEMENT_ID, statement.getId()); } writeJsonKeyValue(JsonDocumentField.STATEMENT_EFFECT, statement .getEffect().toString()); List<Principal> principals = statement.getPrincipals(); if (isNotNull(principals) && !principals.isEmpty()) { writePrincipals(principals); } List<Action> actions = statement.getActions(); if (isNotNull(actions) && !actions.isEmpty()) { writeActions(actions); } List<Resource> resources = statement.getResources(); if (isNotNull(resources) && !resources.isEmpty()) { writeResources(resources); } List<Condition> conditions = statement.getConditions(); if (isNotNull(conditions) && !conditions.isEmpty()) { writeConditions(conditions); } generator.writeEndObject(); } writeJsonArrayEnd(); generator.writeEndObject(); generator.flush(); return writer.toString(); }
/** * Converts the given <code>Policy</code> into a JSON String. * * @param policy * the policy to be converted. * @return a JSON String of the specified policy object. */
Converts the given <code>Policy</code> into a JSON String
jsonStringOf
{ "repo_name": "aws/aws-sdk-java-v2", "path": "test/test-utils/src/main/java/software/amazon/awssdk/core/auth/policy/internal/JsonPolicyWriter.java", "license": "apache-2.0", "size": 13130 }
[ "java.io.IOException", "java.util.List", "software.amazon.awssdk.core.auth.policy.Action", "software.amazon.awssdk.core.auth.policy.Condition", "software.amazon.awssdk.core.auth.policy.Policy", "software.amazon.awssdk.core.auth.policy.Principal", "software.amazon.awssdk.core.auth.policy.Resource", "software.amazon.awssdk.core.auth.policy.Statement" ]
import java.io.IOException; import java.util.List; import software.amazon.awssdk.core.auth.policy.Action; import software.amazon.awssdk.core.auth.policy.Condition; import software.amazon.awssdk.core.auth.policy.Policy; import software.amazon.awssdk.core.auth.policy.Principal; import software.amazon.awssdk.core.auth.policy.Resource; import software.amazon.awssdk.core.auth.policy.Statement;
import java.io.*; import java.util.*; import software.amazon.awssdk.core.auth.policy.*;
[ "java.io", "java.util", "software.amazon.awssdk" ]
java.io; java.util; software.amazon.awssdk;
480,807
protected void useConfiguration(String... args) throws Exception { String[] actualArgs; if (useDynamicConfigs) { actualArgs = Arrays.copyOf(args, args.length + 1); actualArgs[args.length] = "--experimental_dynamic_configs=on"; } else { actualArgs = args; } masterConfig = createConfigurations(actualArgs); targetConfig = getTargetConfiguration(); configurationArgs = Arrays.asList(actualArgs); createBuildView(); }
void function(String... args) throws Exception { String[] actualArgs; if (useDynamicConfigs) { actualArgs = Arrays.copyOf(args, args.length + 1); actualArgs[args.length] = STR; } else { actualArgs = args; } masterConfig = createConfigurations(actualArgs); targetConfig = getTargetConfiguration(); configurationArgs = Arrays.asList(actualArgs); createBuildView(); }
/** * Sets host and target configuration using the specified options, falling back to the default * options for unspecified ones, and recreates the build view. * * @throws IllegalArgumentException */
Sets host and target configuration using the specified options, falling back to the default options for unspecified ones, and recreates the build view
useConfiguration
{ "repo_name": "mrdomino/bazel", "path": "src/test/java/com/google/devtools/build/lib/analysis/util/BuildViewTestCase.java", "license": "apache-2.0", "size": 77290 }
[ "java.util.Arrays" ]
import java.util.Arrays;
import java.util.*;
[ "java.util" ]
java.util;
2,373,933
@Override protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { processRequest(request, response); }
void function(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { processRequest(request, response); }
/** * Handles the HTTP <code>POST</code> method. * * @param request servlet request * @param response servlet response * @throws ServletException if a servlet-specific error occurs * @throws IOException if an I/O error occurs */
Handles the HTTP <code>POST</code> method
doPost
{ "repo_name": "ludoch/benchmarkjavawebapp", "path": "benchmark-java-webapp/src/main/java/com/google/appengine/benchmark/webapp/Servlet1.java", "license": "apache-2.0", "size": 2585 }
[ "java.io.IOException", "javax.servlet.ServletException", "javax.servlet.http.HttpServletRequest", "javax.servlet.http.HttpServletResponse" ]
import java.io.IOException; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse;
import java.io.*; import javax.servlet.*; import javax.servlet.http.*;
[ "java.io", "javax.servlet" ]
java.io; javax.servlet;
351,542
Resource getAppResource(AppRegistration appRegistration);
Resource getAppResource(AppRegistration appRegistration);
/** * Converts application's URI into Spring resource object. Supports File:, Http:, Maven: * and Docker: schemas * @param appRegistration the application registration * @return Returns {@link Resource} instance that corresponds to application's URI */
Converts application's URI into Spring resource object. Supports File:, Http:, Maven: and Docker: schemas
getAppResource
{ "repo_name": "trisberg/spring-cloud-dataflow", "path": "spring-cloud-dataflow-registry/src/main/java/org/springframework/cloud/dataflow/registry/service/AppRegistryService.java", "license": "apache-2.0", "size": 7206 }
[ "org.springframework.cloud.dataflow.core.AppRegistration", "org.springframework.core.io.Resource" ]
import org.springframework.cloud.dataflow.core.AppRegistration; import org.springframework.core.io.Resource;
import org.springframework.cloud.dataflow.core.*; import org.springframework.core.io.*;
[ "org.springframework.cloud", "org.springframework.core" ]
org.springframework.cloud; org.springframework.core;
1,925,616
@Test public void copy() throws Exception { flyway.setLocations("migration/dbsupport/postgresql/sql/copy"); flyway.migrate(); assertEquals(6, jdbcTemplate.queryForInt("select count(*) from copy_test")); }
void function() throws Exception { flyway.setLocations(STR); flyway.migrate(); assertEquals(6, jdbcTemplate.queryForInt(STR)); }
/** * Tests support for COPY FROM STDIN statements generated by pg_dump.. */
Tests support for COPY FROM STDIN statements generated by pg_dump.
copy
{ "repo_name": "Muni10/flyway", "path": "flyway-core/src/test/java/org/flywaydb/core/internal/dbsupport/postgresql/PostgreSQLMigrationMediumTest.java", "license": "apache-2.0", "size": 11242 }
[ "org.junit.Assert" ]
import org.junit.Assert;
import org.junit.*;
[ "org.junit" ]
org.junit;
2,088,515
private synchronized void addTernServerConfigurations( IConfigurationElement[] cf, List<IWebResourcesFinderTypeProvider> list) { for (IConfigurationElement ce : cf) { try { list.add((IWebResourcesFinderTypeProvider) ce .createExecutableExtension(CLASS)); Trace.trace( Trace.EXTENSION_POINT, " Loaded console connectors: " + ce.getAttribute(CLASS)); } catch (Throwable t) { Trace.trace( Trace.SEVERE, " Could not load console connectors: " + ce.getAttribute(CLASS), t); } } }
synchronized void function( IConfigurationElement[] cf, List<IWebResourcesFinderTypeProvider> list) { for (IConfigurationElement ce : cf) { try { list.add((IWebResourcesFinderTypeProvider) ce .createExecutableExtension(CLASS)); Trace.trace( Trace.EXTENSION_POINT, STR + ce.getAttribute(CLASS)); } catch (Throwable t) { Trace.trace( Trace.SEVERE, STR + ce.getAttribute(CLASS), t); } } }
/** * Load the tern server types. */
Load the tern server types
addTernServerConfigurations
{ "repo_name": "angelozerr/eclipse-wtp-webresources", "path": "org.eclipse.a.wst.html.webresources.core/src/org/eclipse/wst/html/webresources/internal/core/WebResourcesFinderTypeProviderManager.java", "license": "epl-1.0", "size": 5712 }
[ "java.util.List", "org.eclipse.core.runtime.IConfigurationElement", "org.eclipse.wst.html.webresources.core.IWebResourcesFinderTypeProvider" ]
import java.util.List; import org.eclipse.core.runtime.IConfigurationElement; import org.eclipse.wst.html.webresources.core.IWebResourcesFinderTypeProvider;
import java.util.*; import org.eclipse.core.runtime.*; import org.eclipse.wst.html.webresources.core.*;
[ "java.util", "org.eclipse.core", "org.eclipse.wst" ]
java.util; org.eclipse.core; org.eclipse.wst;
783,729
public String getIncrementerFactoryClassName() { return incrementerFactoryClassName; } private int PSEUDO_SERIAL_VERSION_UID = JRConstants.PSEUDO_SERIAL_VERSION_UID_3_7_2; //NOPMD private byte calculation;
String function() { return incrementerFactoryClassName; } private int PSEUDO_SERIAL_VERSION_UID = JRConstants.PSEUDO_SERIAL_VERSION_UID_3_7_2; private byte calculation;
/** * Returns the incrementer factory class name. * <p> * The factory will be used to increment the value of the master report variable * with the value from the subreport. * * @return the incrementer factory class name. */
Returns the incrementer factory class name. The factory will be used to increment the value of the master report variable with the value from the subreport
getIncrementerFactoryClassName
{ "repo_name": "aleatorio12/ProVentasConnector", "path": "jasperreports-6.2.1-project/jasperreports-6.2.1/src/net/sf/jasperreports/engine/base/JRBaseSubreportReturnValue.java", "license": "gpl-3.0", "size": 5097 }
[ "net.sf.jasperreports.engine.JRConstants" ]
import net.sf.jasperreports.engine.JRConstants;
import net.sf.jasperreports.engine.*;
[ "net.sf.jasperreports" ]
net.sf.jasperreports;
2,688,251
private void recheckElectability() { // Maintain lock ordering of elector -> ZKFC synchronized (elector) { synchronized (this) { boolean healthy = lastHealthState == State.SERVICE_HEALTHY; long remainingDelay = delayJoiningUntilNanotime - System.nanoTime(); if (remainingDelay > 0) { if (healthy) { LOG.info("Would have joined master election, but this node is " + "prohibited from doing so for " + TimeUnit.NANOSECONDS.toMillis(remainingDelay) + " more ms"); } scheduleRecheck(remainingDelay); return; } switch (lastHealthState) { case SERVICE_HEALTHY: elector.joinElection(targetToData(localTarget)); if (quitElectionOnBadState) { quitElectionOnBadState = false; } break; case INITIALIZING: LOG.info("Ensuring that " + localTarget + " does not " + "participate in active master election"); elector.quitElection(false); serviceState = HAServiceState.INITIALIZING; break; case SERVICE_UNHEALTHY: case SERVICE_NOT_RESPONDING: LOG.info("Quitting master election for " + localTarget + " and marking that fencing is necessary"); elector.quitElection(true); serviceState = HAServiceState.INITIALIZING; break; case HEALTH_MONITOR_FAILED: fatalError("Health monitor failed!"); break; default: throw new IllegalArgumentException("Unhandled state:" + lastHealthState); } } } }
void function() { synchronized (elector) { synchronized (this) { boolean healthy = lastHealthState == State.SERVICE_HEALTHY; long remainingDelay = delayJoiningUntilNanotime - System.nanoTime(); if (remainingDelay > 0) { if (healthy) { LOG.info(STR + STR + TimeUnit.NANOSECONDS.toMillis(remainingDelay) + STR); } scheduleRecheck(remainingDelay); return; } switch (lastHealthState) { case SERVICE_HEALTHY: elector.joinElection(targetToData(localTarget)); if (quitElectionOnBadState) { quitElectionOnBadState = false; } break; case INITIALIZING: LOG.info(STR + localTarget + STR + STR); elector.quitElection(false); serviceState = HAServiceState.INITIALIZING; break; case SERVICE_UNHEALTHY: case SERVICE_NOT_RESPONDING: LOG.info(STR + localTarget + STR); elector.quitElection(true); serviceState = HAServiceState.INITIALIZING; break; case HEALTH_MONITOR_FAILED: fatalError(STR); break; default: throw new IllegalArgumentException(STR + lastHealthState); } } } }
/** * Check the current state of the service, and join the election * if it should be in the election. */
Check the current state of the service, and join the election if it should be in the election
recheckElectability
{ "repo_name": "robzor92/hops", "path": "hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java", "license": "apache-2.0", "size": 31953 }
[ "java.util.concurrent.TimeUnit", "org.apache.hadoop.ha.HAServiceProtocol", "org.apache.hadoop.ha.HealthMonitor" ]
import java.util.concurrent.TimeUnit; import org.apache.hadoop.ha.HAServiceProtocol; import org.apache.hadoop.ha.HealthMonitor;
import java.util.concurrent.*; import org.apache.hadoop.ha.*;
[ "java.util", "org.apache.hadoop" ]
java.util; org.apache.hadoop;
163,068
@Nullable() public final String getChangelogDN() { return getAttributeValue(ATTR_CHANGELOG_DN); }
@Nullable() final String function() { return getAttributeValue(ATTR_CHANGELOG_DN); }
/** * Retrieves the DN of the base entry for the directory server changelog * information, if available. * * @return The DN of the base entry for the directory server changelog * information, or {@code null} if the server does not publish that * information or no changelog is available. */
Retrieves the DN of the base entry for the directory server changelog information, if available
getChangelogDN
{ "repo_name": "UnboundID/ldapsdk", "path": "src/com/unboundid/ldap/sdk/RootDSE.java", "license": "gpl-2.0", "size": 19587 }
[ "com.unboundid.util.Nullable" ]
import com.unboundid.util.Nullable;
import com.unboundid.util.*;
[ "com.unboundid.util" ]
com.unboundid.util;
1,800,714
@Override public Writer append(CharSequence value) { builder.append(value); return this; }
Writer function(CharSequence value) { builder.append(value); return this; }
/** * Append a character sequence to this Writer. * * @param value The character to append * @return This writer instance */
Append a character sequence to this Writer
append
{ "repo_name": "1gravity/Android-RTEditor", "path": "RTEditor/src/main/java/com/onegravity/rteditor/utils/io/output/StringBuilderWriter.java", "license": "apache-2.0", "size": 4523 }
[ "java.io.Writer" ]
import java.io.Writer;
import java.io.*;
[ "java.io" ]
java.io;
2,206,742
void getSentMessageIDs(){ String providerNo= this.getProviderNo(); messageid = new java.util.Vector(); status = new java.util.Vector(); sentby = new java.util.Vector(); date = new java.util.Vector(); ime = new java.util.Vector(); subject = new java.util.Vector(); try{ java.sql.ResultSet rs; String sql = new String("select messageid, thedate, theime, thesubject, sentby from messagetbl where sentbyNo = '"+ providerNo+"' and sentByLocation = '"+getCurrentLocationId()+"'"); rs = DBHandler.GetSQL(sql); int cou = 0; while (rs.next()) { messageid.add( oscar.Misc.getString(rs, "messageid") ); status.add("sent"); sentby.add(oscar.Misc.getString(rs, "sentby")); date.add(oscar.Misc.getString(rs, "thedate")); ime.add(oscar.Misc.getString(rs, "theime")); subject.add(oscar.Misc.getString(rs, "thesubject")); cou++; } rs.close(); }catch (java.sql.SQLException e){MiscUtils.getLogger().error("Error", e); } }//getSentMessageIDs
void getSentMessageIDs(){ String providerNo= this.getProviderNo(); messageid = new java.util.Vector(); status = new java.util.Vector(); sentby = new java.util.Vector(); date = new java.util.Vector(); ime = new java.util.Vector(); subject = new java.util.Vector(); try{ java.sql.ResultSet rs; String sql = new String(STR+ providerNo+STR+getCurrentLocationId()+"'"); rs = DBHandler.GetSQL(sql); int cou = 0; while (rs.next()) { messageid.add( oscar.Misc.getString(rs, STR) ); status.add("sent"); sentby.add(oscar.Misc.getString(rs, STR)); date.add(oscar.Misc.getString(rs, STR)); ime.add(oscar.Misc.getString(rs, STR)); subject.add(oscar.Misc.getString(rs, STR)); cou++; } rs.close(); }catch (java.sql.SQLException e){MiscUtils.getLogger().error("Error", e); } }
/** * This method uses the ProviderNo and searches for messages for this providerNo * in the messagelisttbl */
This method uses the ProviderNo and searches for messages for this providerNo in the messagelisttbl
getSentMessageIDs
{ "repo_name": "vvanherk/oscar_emr", "path": "src/main/java/oscar/oscarMessenger/pageUtil/MsgDisplayMessagesBean.java", "license": "gpl-2.0", "size": 23771 }
[ "org.oscarehr.util.MiscUtils" ]
import org.oscarehr.util.MiscUtils;
import org.oscarehr.util.*;
[ "org.oscarehr.util" ]
org.oscarehr.util;
1,873,819
private Element generateExcerptEntryForFunctionalNode(Node functionalOpNode, Node lhsNode, XmlProcessor hqmfXmlProcessor, Node clonedNodeToAppendExcerpt) throws XPathExpressionException { Element excerptElement = hqmfXmlProcessor.getOriginalDoc().createElement(EXCERPT); String functionalOpName = functionalOpNode.getAttributes().getNamedItem(TYPE).getNodeValue(); Element criteriaElement = null; if (FUNCTIONAL_OPS_NON_SUBSET.containsKey(functionalOpName)) { Element sequenceElement = hqmfXmlProcessor.getOriginalDoc().createElement(SEQUENCE_NUMBER); sequenceElement.setAttribute(VALUE, FUNCTIONAL_OPS_NON_SUBSET.get(functionalOpName.toUpperCase())); excerptElement.appendChild(sequenceElement); if (clonedNodeToAppendExcerpt != null) { if (clonedNodeToAppendExcerpt.getNodeName().contains(GROUPER)) { criteriaElement = generateCriteriaElementForSetOpExcerpt(hqmfXmlProcessor, clonedNodeToAppendExcerpt); excerptElement.appendChild(criteriaElement); } else { NodeList entryChildNodes = clonedNodeToAppendExcerpt.getChildNodes(); criteriaElement = generateCriteriaElementForExcerpt(hqmfXmlProcessor, entryChildNodes); excerptElement.appendChild(criteriaElement); } } } else if (FUNCTIONAL_OPS_SUBSET.containsKey(functionalOpName)) { NamedNodeMap attributeMap = functionalOpNode.getAttributes(); if (clonedNodeToAppendExcerpt.getNodeName().contains(GROUPER)) { criteriaElement = generateCriteriaElementForSetOpExcerpt(hqmfXmlProcessor, clonedNodeToAppendExcerpt); excerptElement.appendChild(criteriaElement); } else { NodeList entryChildNodes = clonedNodeToAppendExcerpt.getChildNodes(); criteriaElement = generateCriteriaElementForExcerpt(hqmfXmlProcessor, entryChildNodes); excerptElement.appendChild(criteriaElement); } if (clonedNodeToAppendExcerpt != null) { if ("count".equalsIgnoreCase(functionalOpName)) { createRepeatNumberTagForCountFuncttion(hqmfXmlProcessor, attributeMap, criteriaElement); Element qdmSubSetElement = hqmfXmlProcessor.getOriginalDoc().createElement("qdm:subsetCode"); qdmSubSetElement.setAttribute(CODE, FUNCTIONAL_OPS_SUBSET.get(functionalOpName.toUpperCase())); Element subSetCodeElement = hqmfXmlProcessor.getOriginalDoc().createElement("subsetCode"); subSetCodeElement.setAttribute(CODE, "SUM"); excerptElement.appendChild(subSetCodeElement); excerptElement.appendChild(qdmSubSetElement); excerptElement.appendChild(criteriaElement); } else { if ((attributeMap.getNamedItem(OPERATOR_TYPE) != null) && (lhsNode != null)) { String lhsNodeType = lhsNode.getNodeName(); if (ELEMENT_REF.equalsIgnoreCase(lhsNodeType)) { String qdmUUID = lhsNode.getAttributes().getNamedItem(ID).getNodeValue(); String xPath = "/measure/elementLookUp/qdm[@uuid ='" + qdmUUID + "']"; Node node = measureExport.getSimpleXMLProcessor() .findNode(measureExport.getSimpleXMLProcessor().getOriginalDoc(), xPath); if ((node != null) && lhsNode.hasChildNodes()) { Node qdmNode = node.cloneNode(true); Node attributeNode = lhsNode.getFirstChild().cloneNode(true); attributeNode.setUserData(ATTRIBUTE_NAME, attributeNode.getAttributes().getNamedItem(NAME).getNodeValue(), null); attributeNode.setUserData(ATTRIBUTE_MODE, attributeMap.getNamedItem(OPERATOR_TYPE).getNodeValue(), null); attributeNode.setUserData(ATTRIBUTE_UUID, attributeNode.getAttributes().getNamedItem(ATTR_UUID).getNodeValue(), null); Element attributeElement = (Element) attributeNode; attributeElement.setAttribute(MODE, attributeMap.getNamedItem(OPERATOR_TYPE).getNodeValue()); if (attributeElement.getAttributes().getNamedItem(ATTR_DATE) != null) { attributeNode.setUserData(ATTRIBUTE_DATE, attributeMap.getNamedItem(QUANTITY).getNodeValue(), null); } else { attributeElement.setAttribute(COMPARISON_VALUE, attributeMap.getNamedItem(QUANTITY).getNodeValue()); } if (attributeMap.getNamedItem(UNIT) != null) { attributeElement.setAttribute(UNIT, attributeMap.getNamedItem(UNIT).getNodeValue()); } else { if (attributeElement.getAttributes().getNamedItem(UNIT) != null) { attributeElement.removeAttribute(UNIT); } } attributeNode = attributeElement; // HQMFDataCriteriaElementGenerator hqmfDataCriteriaElementGenerator = new // HQMFDataCriteriaElementGenerator(); // hqmfDataCriteriaElementGenerator.generateAttributeTagForFunctionalOp(measureExport,qdmNode, // criteriaElement, attributeNode); HQMFAttributeGenerator attributeGenerator = new HQMFAttributeGenerator(); attributeGenerator.generateAttributeTagForFunctionalOp(measureExport, qdmNode, criteriaElement, attributeNode); } } } Element qdmSubSetElement = hqmfXmlProcessor.getOriginalDoc().createElement("qdm:subsetCode"); qdmSubSetElement.setAttribute(CODE, FUNCTIONAL_OPS_SUBSET.get(functionalOpName.toUpperCase())); if ("sum".equalsIgnoreCase(functionalOpName)) { Element subSetCodeElement = hqmfXmlProcessor.getOriginalDoc().createElement("subsetCode"); subSetCodeElement.setAttribute(CODE, "SUM"); excerptElement.appendChild(subSetCodeElement); } excerptElement.appendChild(qdmSubSetElement); excerptElement.appendChild(criteriaElement); } } } return excerptElement; }
Element function(Node functionalOpNode, Node lhsNode, XmlProcessor hqmfXmlProcessor, Node clonedNodeToAppendExcerpt) throws XPathExpressionException { Element excerptElement = hqmfXmlProcessor.getOriginalDoc().createElement(EXCERPT); String functionalOpName = functionalOpNode.getAttributes().getNamedItem(TYPE).getNodeValue(); Element criteriaElement = null; if (FUNCTIONAL_OPS_NON_SUBSET.containsKey(functionalOpName)) { Element sequenceElement = hqmfXmlProcessor.getOriginalDoc().createElement(SEQUENCE_NUMBER); sequenceElement.setAttribute(VALUE, FUNCTIONAL_OPS_NON_SUBSET.get(functionalOpName.toUpperCase())); excerptElement.appendChild(sequenceElement); if (clonedNodeToAppendExcerpt != null) { if (clonedNodeToAppendExcerpt.getNodeName().contains(GROUPER)) { criteriaElement = generateCriteriaElementForSetOpExcerpt(hqmfXmlProcessor, clonedNodeToAppendExcerpt); excerptElement.appendChild(criteriaElement); } else { NodeList entryChildNodes = clonedNodeToAppendExcerpt.getChildNodes(); criteriaElement = generateCriteriaElementForExcerpt(hqmfXmlProcessor, entryChildNodes); excerptElement.appendChild(criteriaElement); } } } else if (FUNCTIONAL_OPS_SUBSET.containsKey(functionalOpName)) { NamedNodeMap attributeMap = functionalOpNode.getAttributes(); if (clonedNodeToAppendExcerpt.getNodeName().contains(GROUPER)) { criteriaElement = generateCriteriaElementForSetOpExcerpt(hqmfXmlProcessor, clonedNodeToAppendExcerpt); excerptElement.appendChild(criteriaElement); } else { NodeList entryChildNodes = clonedNodeToAppendExcerpt.getChildNodes(); criteriaElement = generateCriteriaElementForExcerpt(hqmfXmlProcessor, entryChildNodes); excerptElement.appendChild(criteriaElement); } if (clonedNodeToAppendExcerpt != null) { if ("count".equalsIgnoreCase(functionalOpName)) { createRepeatNumberTagForCountFuncttion(hqmfXmlProcessor, attributeMap, criteriaElement); Element qdmSubSetElement = hqmfXmlProcessor.getOriginalDoc().createElement(STR); qdmSubSetElement.setAttribute(CODE, FUNCTIONAL_OPS_SUBSET.get(functionalOpName.toUpperCase())); Element subSetCodeElement = hqmfXmlProcessor.getOriginalDoc().createElement(STR); subSetCodeElement.setAttribute(CODE, "SUM"); excerptElement.appendChild(subSetCodeElement); excerptElement.appendChild(qdmSubSetElement); excerptElement.appendChild(criteriaElement); } else { if ((attributeMap.getNamedItem(OPERATOR_TYPE) != null) && (lhsNode != null)) { String lhsNodeType = lhsNode.getNodeName(); if (ELEMENT_REF.equalsIgnoreCase(lhsNodeType)) { String qdmUUID = lhsNode.getAttributes().getNamedItem(ID).getNodeValue(); String xPath = STR + qdmUUID + "']"; Node node = measureExport.getSimpleXMLProcessor() .findNode(measureExport.getSimpleXMLProcessor().getOriginalDoc(), xPath); if ((node != null) && lhsNode.hasChildNodes()) { Node qdmNode = node.cloneNode(true); Node attributeNode = lhsNode.getFirstChild().cloneNode(true); attributeNode.setUserData(ATTRIBUTE_NAME, attributeNode.getAttributes().getNamedItem(NAME).getNodeValue(), null); attributeNode.setUserData(ATTRIBUTE_MODE, attributeMap.getNamedItem(OPERATOR_TYPE).getNodeValue(), null); attributeNode.setUserData(ATTRIBUTE_UUID, attributeNode.getAttributes().getNamedItem(ATTR_UUID).getNodeValue(), null); Element attributeElement = (Element) attributeNode; attributeElement.setAttribute(MODE, attributeMap.getNamedItem(OPERATOR_TYPE).getNodeValue()); if (attributeElement.getAttributes().getNamedItem(ATTR_DATE) != null) { attributeNode.setUserData(ATTRIBUTE_DATE, attributeMap.getNamedItem(QUANTITY).getNodeValue(), null); } else { attributeElement.setAttribute(COMPARISON_VALUE, attributeMap.getNamedItem(QUANTITY).getNodeValue()); } if (attributeMap.getNamedItem(UNIT) != null) { attributeElement.setAttribute(UNIT, attributeMap.getNamedItem(UNIT).getNodeValue()); } else { if (attributeElement.getAttributes().getNamedItem(UNIT) != null) { attributeElement.removeAttribute(UNIT); } } attributeNode = attributeElement; HQMFAttributeGenerator attributeGenerator = new HQMFAttributeGenerator(); attributeGenerator.generateAttributeTagForFunctionalOp(measureExport, qdmNode, criteriaElement, attributeNode); } } } Element qdmSubSetElement = hqmfXmlProcessor.getOriginalDoc().createElement(STR); qdmSubSetElement.setAttribute(CODE, FUNCTIONAL_OPS_SUBSET.get(functionalOpName.toUpperCase())); if ("sum".equalsIgnoreCase(functionalOpName)) { Element subSetCodeElement = hqmfXmlProcessor.getOriginalDoc().createElement(STR); subSetCodeElement.setAttribute(CODE, "SUM"); excerptElement.appendChild(subSetCodeElement); } excerptElement.appendChild(qdmSubSetElement); excerptElement.appendChild(criteriaElement); } } } return excerptElement; }
/** * Generate Excerpt for Functional Op used with timing/Relationship. * * @param functionalOpNode * the functional op node * @param lhsNode * the lhs node * @param hqmfXmlProcessor * the hqmf xml processor * @param clonedNodeToAppendExcerpt * the cloned node to append excerpt * @return the element * @throws XPathExpressionException * the x path expression exception */
Generate Excerpt for Functional Op used with timing/Relationship
generateExcerptEntryForFunctionalNode
{ "repo_name": "MeasureAuthoringTool/MeasureAuthoringTool_LatestSprint", "path": "mat/src/main/java/mat/server/hqmf/qdm_5_4/HQMFClauseLogicGenerator.java", "license": "cc0-1.0", "size": 134287 }
[ "javax.xml.xpath.XPathExpressionException", "org.w3c.dom.Element", "org.w3c.dom.NamedNodeMap", "org.w3c.dom.Node", "org.w3c.dom.NodeList" ]
import javax.xml.xpath.XPathExpressionException; import org.w3c.dom.Element; import org.w3c.dom.NamedNodeMap; import org.w3c.dom.Node; import org.w3c.dom.NodeList;
import javax.xml.xpath.*; import org.w3c.dom.*;
[ "javax.xml", "org.w3c.dom" ]
javax.xml; org.w3c.dom;
1,109,477
@Accessor public Dimension getMinimumSize() { return new Dimension(minimumSize); }
Dimension function() { return new Dimension(minimumSize); }
/** * Returns a copy of this component's minimum size. Modifications to the * returned object have no effect on this component. */
Returns a copy of this component's minimum size. Modifications to the returned object have no effect on this component
getMinimumSize
{ "repo_name": "SQLPower/power-architect", "path": "src/main/java/ca/sqlpower/architect/swingui/PlayPenComponent.java", "license": "gpl-3.0", "size": 25368 }
[ "java.awt.Dimension" ]
import java.awt.Dimension;
import java.awt.*;
[ "java.awt" ]
java.awt;
824,218
public static int readInt(int iParentNode, String sName, int iDefault) throws XMLException { String sValue = readString(iParentNode, sName, null); if ((sValue == null) || sValue.equals("")) { return iDefault; } try { return Integer.parseInt(sValue); } catch (Exception e) { throw new XMLException("Invalid number '" + sValue + "' : " + e); } }
static int function(int iParentNode, String sName, int iDefault) throws XMLException { String sValue = readString(iParentNode, sName, null); if ((sValue == null) sValue.equals(STRInvalid number 'STR' : " + e); } }
/** * Reads an integer value from the node text. * * @param iParentNode Parent node. * @param sName Element name. * @param iDefault Default value. * * @return Found value or the default one. * * @throws XMLException Thrown if the operation failed. */
Reads an integer value from the node text
readInt
{ "repo_name": "MatthiasEberl/cordysfilecon", "path": "src/cws/FileConnector/com-cordys-coe/fileconnector/java/source/com/cordys/coe/ac/fileconnector/utils/XMLSerializer.java", "license": "apache-2.0", "size": 13701 }
[ "com.eibus.xml.nom.XMLException" ]
import com.eibus.xml.nom.XMLException;
import com.eibus.xml.nom.*;
[ "com.eibus.xml" ]
com.eibus.xml;
1,981,339
public void setConf(Configuration conf) { super.setConf(conf); if (conf == null) return; urlMetaTags = conf.getStrings(CONF_PROPERTY); }
void function(Configuration conf) { super.setConf(conf); if (conf == null) return; urlMetaTags = conf.getStrings(CONF_PROPERTY); }
/** * handles conf assignment and pulls the value assignment from the * "urlmeta.tags" property */
handles conf assignment and pulls the value assignment from the "urlmeta.tags" property
setConf
{ "repo_name": "fogbeam/Heceta_nutch", "path": "src/plugin/urlmeta/src/java/org/apache/nutch/scoring/urlmeta/URLMetaScoringFilter.java", "license": "apache-2.0", "size": 5472 }
[ "org.apache.hadoop.conf.Configuration" ]
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.*;
[ "org.apache.hadoop" ]
org.apache.hadoop;
2,133,372
private void cleanupOnTaskChanged(ContainerTask containerTask) { Preconditions.checkState(!containerTask.shouldDie()); Preconditions.checkState(containerTask.getTaskSpec() != null); TezVertexID newVertexID = containerTask.getTaskSpec().getTaskAttemptID().getTaskID() .getVertexID(); if (lastVertexID != null) { if (!lastVertexID.equals(newVertexID)) { objectRegistry.clearCache(ObjectRegistryImpl.ObjectLifeCycle.VERTEX); } if (!lastVertexID.getDAGId().equals(newVertexID.getDAGId())) { objectRegistry.clearCache(ObjectRegistryImpl.ObjectLifeCycle.DAG); startedInputsMap = HashMultimap.create(); } } lastVertexID = newVertexID; }
void function(ContainerTask containerTask) { Preconditions.checkState(!containerTask.shouldDie()); Preconditions.checkState(containerTask.getTaskSpec() != null); TezVertexID newVertexID = containerTask.getTaskSpec().getTaskAttemptID().getTaskID() .getVertexID(); if (lastVertexID != null) { if (!lastVertexID.equals(newVertexID)) { objectRegistry.clearCache(ObjectRegistryImpl.ObjectLifeCycle.VERTEX); } if (!lastVertexID.getDAGId().equals(newVertexID.getDAGId())) { objectRegistry.clearCache(ObjectRegistryImpl.ObjectLifeCycle.DAG); startedInputsMap = HashMultimap.create(); } } lastVertexID = newVertexID; }
/** * Cleans entries from the object registry, and resets the startedInputsMap if required * * @param containerTask * the new task specification. Must be a valid task */
Cleans entries from the object registry, and resets the startedInputsMap if required
cleanupOnTaskChanged
{ "repo_name": "guiling/tez", "path": "tez-runtime-internals/src/main/java/org/apache/tez/runtime/task/TezChild.java", "license": "apache-2.0", "size": 20275 }
[ "com.google.common.base.Preconditions", "com.google.common.collect.HashMultimap", "org.apache.tez.common.ContainerTask", "org.apache.tez.dag.records.TezVertexID", "org.apache.tez.runtime.common.objectregistry.ObjectRegistryImpl" ]
import com.google.common.base.Preconditions; import com.google.common.collect.HashMultimap; import org.apache.tez.common.ContainerTask; import org.apache.tez.dag.records.TezVertexID; import org.apache.tez.runtime.common.objectregistry.ObjectRegistryImpl;
import com.google.common.base.*; import com.google.common.collect.*; import org.apache.tez.common.*; import org.apache.tez.dag.records.*; import org.apache.tez.runtime.common.objectregistry.*;
[ "com.google.common", "org.apache.tez" ]
com.google.common; org.apache.tez;
1,909,181
public ShapedCrystalRecipe addRecipe(ItemStack stack, Object... recipeComponents) { String s = ""; int i = 0; int j = 0; int k = 0; if (recipeComponents[i] instanceof String[]) { String[] astring = ((String[])recipeComponents[i++]); for (int l = 0; l < astring.length; ++l) { String s2 = astring[l]; ++k; j = s2.length(); s = s + s2; } } else { while (recipeComponents[i] instanceof String) { String s1 = (String)recipeComponents[i++]; ++k; j = s1.length(); s = s + s1; } } Map<Character, ItemStack> map; for (map = Maps.<Character, ItemStack>newHashMap(); i < recipeComponents.length; i += 2) { Character character = (Character)recipeComponents[i]; ItemStack itemstack = ItemStackTools.getEmptyStack(); if (recipeComponents[i + 1] instanceof Item) { itemstack = new ItemStack((Item)recipeComponents[i + 1]); } else if (recipeComponents[i + 1] instanceof Block) { itemstack = new ItemStack((Block)recipeComponents[i + 1], 1, 32767); } else if (recipeComponents[i + 1] instanceof ItemStack) { itemstack = (ItemStack)recipeComponents[i + 1]; } map.put(character, itemstack); } ItemStack[] aitemstack = new ItemStack[j * k]; for (int i1 = 0; i1 < j * k; ++i1) { char c0 = s.charAt(i1); if (map.containsKey(Character.valueOf(c0))) { aitemstack[i1] = map.get(Character.valueOf(c0)).copy(); } else { aitemstack[i1] = ItemStackTools.getEmptyStack(); } } ShapedCrystalRecipe shapedrecipes = new ShapedCrystalRecipe(j, k, aitemstack, stack); this.recipes.add(shapedrecipes); return shapedrecipes; }
ShapedCrystalRecipe function(ItemStack stack, Object... recipeComponents) { String s = ""; int i = 0; int j = 0; int k = 0; if (recipeComponents[i] instanceof String[]) { String[] astring = ((String[])recipeComponents[i++]); for (int l = 0; l < astring.length; ++l) { String s2 = astring[l]; ++k; j = s2.length(); s = s + s2; } } else { while (recipeComponents[i] instanceof String) { String s1 = (String)recipeComponents[i++]; ++k; j = s1.length(); s = s + s1; } } Map<Character, ItemStack> map; for (map = Maps.<Character, ItemStack>newHashMap(); i < recipeComponents.length; i += 2) { Character character = (Character)recipeComponents[i]; ItemStack itemstack = ItemStackTools.getEmptyStack(); if (recipeComponents[i + 1] instanceof Item) { itemstack = new ItemStack((Item)recipeComponents[i + 1]); } else if (recipeComponents[i + 1] instanceof Block) { itemstack = new ItemStack((Block)recipeComponents[i + 1], 1, 32767); } else if (recipeComponents[i + 1] instanceof ItemStack) { itemstack = (ItemStack)recipeComponents[i + 1]; } map.put(character, itemstack); } ItemStack[] aitemstack = new ItemStack[j * k]; for (int i1 = 0; i1 < j * k; ++i1) { char c0 = s.charAt(i1); if (map.containsKey(Character.valueOf(c0))) { aitemstack[i1] = map.get(Character.valueOf(c0)).copy(); } else { aitemstack[i1] = ItemStackTools.getEmptyStack(); } } ShapedCrystalRecipe shapedrecipes = new ShapedCrystalRecipe(j, k, aitemstack, stack); this.recipes.add(shapedrecipes); return shapedrecipes; }
/** * Adds a shaped recipe to the games recipe list. */
Adds a shaped recipe to the games recipe list
addRecipe
{ "repo_name": "Alec-WAM/CrystalMod", "path": "src/main/java/alec_wam/CrystalMod/crafting/CrystalCraftingManager.java", "license": "mit", "size": 16563 }
[ "com.google.common.collect.Maps", "java.util.Map", "net.minecraft.block.Block", "net.minecraft.item.Item", "net.minecraft.item.ItemStack" ]
import com.google.common.collect.Maps; import java.util.Map; import net.minecraft.block.Block; import net.minecraft.item.Item; import net.minecraft.item.ItemStack;
import com.google.common.collect.*; import java.util.*; import net.minecraft.block.*; import net.minecraft.item.*;
[ "com.google.common", "java.util", "net.minecraft.block", "net.minecraft.item" ]
com.google.common; java.util; net.minecraft.block; net.minecraft.item;
1,215,810
public Set<ViewDescription> getComponents(final String componentName, final String viewName, final VirtualFile deploymentRoot) { final List<ViewInformation> info = componentsByViewName.get(viewName); if (info == null) { return Collections.<ViewDescription>emptySet(); } if (componentName.contains("#")) { final String[] parts = componentName.split("#"); String path = parts[0]; if (!path.startsWith("../")) { path = "../" + path; } final VirtualFile virtualPath = deploymentRoot.getChild(path); final String name = parts[1]; final Set<ViewDescription> ret = new HashSet<ViewDescription>(); for (ViewInformation i : info) { if (i.beanName.equals(name)) { //now we need to check the path if (virtualPath.equals(i.deploymentRoot)) { ret.add(i.viewDescription); } } } return ret; } else { final Set<ViewDescription> all = new HashSet<ViewDescription>(); final Set<ViewDescription> thisDeployment = new HashSet<ViewDescription>(); for (ViewInformation i : info) { if (i.beanName.equals(componentName)) { all.add(i.viewDescription); if (i.deploymentRoot.equals(deploymentRoot)) { thisDeployment.add(i.viewDescription); } } } if (all.size() > 1) { return thisDeployment; } return all; } }
Set<ViewDescription> function(final String componentName, final String viewName, final VirtualFile deploymentRoot) { final List<ViewInformation> info = componentsByViewName.get(viewName); if (info == null) { return Collections.<ViewDescription>emptySet(); } if (componentName.contains("#")) { final String[] parts = componentName.split("#"); String path = parts[0]; if (!path.startsWith("../")) { path = "../" + path; } final VirtualFile virtualPath = deploymentRoot.getChild(path); final String name = parts[1]; final Set<ViewDescription> ret = new HashSet<ViewDescription>(); for (ViewInformation i : info) { if (i.beanName.equals(name)) { if (virtualPath.equals(i.deploymentRoot)) { ret.add(i.viewDescription); } } } return ret; } else { final Set<ViewDescription> all = new HashSet<ViewDescription>(); final Set<ViewDescription> thisDeployment = new HashSet<ViewDescription>(); for (ViewInformation i : info) { if (i.beanName.equals(componentName)) { all.add(i.viewDescription); if (i.deploymentRoot.equals(deploymentRoot)) { thisDeployment.add(i.viewDescription); } } } if (all.size() > 1) { return thisDeployment; } return all; } }
/** * Get all views in the application that have the given name and view type * * @param componentName The name of the component * @param viewName The view type * @param deploymentRoot The deployment root of the component doing the lookup * @return A set of all views for the given component name and type */
Get all views in the application that have the given name and view type
getComponents
{ "repo_name": "xasx/wildfly", "path": "ee/src/main/java/org/jboss/as/ee/component/EEApplicationDescription.java", "license": "lgpl-2.1", "size": 11704 }
[ "java.util.Collections", "java.util.HashSet", "java.util.List", "java.util.Set", "org.jboss.vfs.VirtualFile" ]
import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; import org.jboss.vfs.VirtualFile;
import java.util.*; import org.jboss.vfs.*;
[ "java.util", "org.jboss.vfs" ]
java.util; org.jboss.vfs;
1,934,322
public Object removeNotificationListener(ObjectName observed, NotificationListener listener) throws InstanceNotFoundException, ListenerNotFoundException, IOException;
Object function(ObjectName observed, NotificationListener listener) throws InstanceNotFoundException, ListenerNotFoundException, IOException;
/** * Removes the specified listener from the named source MBean. * If the listener is registered more than once, for example with different filters or handbacks, * this method will remove all those registrations. * * @param observed The ObjectName of the source MBean on which the listener should be removed. * @param listener The listener to be removed. * @throws InstanceNotFoundException If the source MBean is not registered in the MBeanServer. * @throws ListenerNotFoundException If the listener is not registered in the MBean. * @throws IOException If a communication problem occurred. * @see #addNotificationListener(ObjectName, NotificationListener, NotificationFilter, Object) */
Removes the specified listener from the named source MBean. If the listener is registered more than once, for example with different filters or handbacks, this method will remove all those registrations
removeNotificationListener
{ "repo_name": "xien777/yajsw", "path": "yajsw/ahessian/src/main/java/org/rzo/netty/ahessian/application/jmx/remote/service/AsyncMBeanServerConnection.java", "license": "lgpl-2.1", "size": 23146 }
[ "java.io.IOException", "javax.management.InstanceNotFoundException", "javax.management.ListenerNotFoundException", "javax.management.NotificationListener", "javax.management.ObjectName" ]
import java.io.IOException; import javax.management.InstanceNotFoundException; import javax.management.ListenerNotFoundException; import javax.management.NotificationListener; import javax.management.ObjectName;
import java.io.*; import javax.management.*;
[ "java.io", "javax.management" ]
java.io; javax.management;
1,443,161
public List<SAXParseException> getParseExceptions() { return parseExceptions; }
List<SAXParseException> function() { return parseExceptions; }
/** * Contains a list of parse failures in the document. */
Contains a list of parse failures in the document
getParseExceptions
{ "repo_name": "mareknovotny/windup", "path": "rules-xml/impl/src/main/java/org/jboss/windup/rules/apps/xml/xml/ValidateXmlHandler.java", "license": "epl-1.0", "size": 4497 }
[ "java.util.List", "org.xml.sax.SAXParseException" ]
import java.util.List; import org.xml.sax.SAXParseException;
import java.util.*; import org.xml.sax.*;
[ "java.util", "org.xml.sax" ]
java.util; org.xml.sax;
3,160
public boolean isErrorSubset(@Nonnull final List<ValidatorProtos.ValidationError> errorsA, @Nonnull final List<ValidatorProtos.ValidationError> errorsB) { Map<ValidatorProtos.ValidationError.Code, Integer> codesA = new HashMap<>(); for (final ValidatorProtos.ValidationError error : errorsA) { codesA.put(error.getCode(), 1); } Map<ValidatorProtos.ValidationError.Code, Integer> codesB = new HashMap<>(); for (final ValidatorProtos.ValidationError error : errorsB) { codesB.put(error.getCode(), 1); if (!codesA.containsKey(error.getCode())) { return false; } } // Every code in B is also in A. If they are the same, not a subset. return codesA.size() > codesB.size(); }
boolean function(@Nonnull final List<ValidatorProtos.ValidationError> errorsA, @Nonnull final List<ValidatorProtos.ValidationError> errorsB) { Map<ValidatorProtos.ValidationError.Code, Integer> codesA = new HashMap<>(); for (final ValidatorProtos.ValidationError error : errorsA) { codesA.put(error.getCode(), 1); } Map<ValidatorProtos.ValidationError.Code, Integer> codesB = new HashMap<>(); for (final ValidatorProtos.ValidationError error : errorsB) { codesB.put(error.getCode(), 1); if (!codesA.containsKey(error.getCode())) { return false; } } return codesA.size() > codesB.size(); }
/** * Returns true iff the error codes in errorsB are a subset of the error * codes in errorsA. * * @param errorsA a list of validation errors. * @param errorsB a list of validation errors. * @return returns true iff the error codes in errorsB are a subset of the error * codes in errorsA. */
Returns true iff the error codes in errorsB are a subset of the error codes in errorsA
isErrorSubset
{ "repo_name": "taboola/amphtml", "path": "validator/java/src/main/java/dev/amp/validator/ParsedValidatorRules.java", "license": "apache-2.0", "size": 37623 }
[ "java.util.HashMap", "java.util.List", "java.util.Map", "javax.annotation.Nonnull" ]
import java.util.HashMap; import java.util.List; import java.util.Map; import javax.annotation.Nonnull;
import java.util.*; import javax.annotation.*;
[ "java.util", "javax.annotation" ]
java.util; javax.annotation;
1,484,258
@Override public void contextInitialized(ServletContextEvent event) { this.context = event.getServletContext(); log("contextInitialized()"); } // -------------------------------------------------------- Private Methods
void function(ServletContextEvent event) { this.context = event.getServletContext(); log(STR); }
/** * Record the fact that this web application has been initialized. * * @param event The servlet context event */
Record the fact that this web application has been initialized
contextInitialized
{ "repo_name": "attrs/plexi.http", "path": "www/WEB-INF/classes/listeners/ContextListener.java", "license": "mit", "size": 3924 }
[ "javax.servlet.ServletContextEvent" ]
import javax.servlet.ServletContextEvent;
import javax.servlet.*;
[ "javax.servlet" ]
javax.servlet;
991,371
private String getPostData() { // Write the data into local buffer StringBuffer postData = new StringBuffer(); // TODO: the action should be configured to retrieve the data. // Get all the param name/value pairs and build the data string Set paramNames = getParamNames(); if (paramNames != null && !paramNames.isEmpty()) { Iterator iter = paramNames.iterator(); try { while (iter.hasNext()) { String name = (String) iter.next(); postData.append('&').append(name).append('='); postData.append(getParamValue(name)); } } catch (Exception ex) { // RG: append(char) throws IOException in J2SE 5.0 } // Replace the first & with a ? postData.setCharAt(0, '?'); } LOG.finer("ServerAction: POST data: " + postData.toString()); return postData.toString(); }
String function() { StringBuffer postData = new StringBuffer(); Set paramNames = getParamNames(); if (paramNames != null && !paramNames.isEmpty()) { Iterator iter = paramNames.iterator(); try { while (iter.hasNext()) { String name = (String) iter.next(); postData.append('&').append(name).append('='); postData.append(getParamValue(name)); } } catch (Exception ex) { } postData.setCharAt(0, '?'); } LOG.finer(STR + postData.toString()); return postData.toString(); }
/** * Retrieves a string which represents the parameter data for a server action. * @return a string of name value pairs prefixed by a '?' and delimited by an '&' */
Retrieves a string which represents the parameter data for a server action
getPostData
{ "repo_name": "mbshopM/openconcerto", "path": "OpenConcerto/src/org/jdesktop/swingx/action/ServerAction.java", "license": "gpl-3.0", "size": 10314 }
[ "java.util.Iterator", "java.util.Set" ]
import java.util.Iterator; import java.util.Set;
import java.util.*;
[ "java.util" ]
java.util;
2,639,263
public void writeI18N(String key) throws IOException { write(i18n.getString(key)); }
void function(String key) throws IOException { write(i18n.getString(key)); }
/** * Write a localized message, using the default resource bundle. * @param key the key for the message to be localized * @throws IOException if there is a problem closing the underlying stream */
Write a localized message, using the default resource bundle
writeI18N
{ "repo_name": "Distrotech/icedtea7-2.3", "path": "test/jtreg/com/sun/javatest/util/HTMLWriter.java", "license": "gpl-2.0", "size": 20661 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
2,155,735
public List<TagEntry> getTags() throws IOException, ServiceException { return getTags("default"); }
List<TagEntry> function() throws IOException, ServiceException { return getTags(STR); }
/** * Retrieves the tags for the currently logged-in user. This is equivalent * to calling {@link #getTags(String)} with "default" as the username. */
Retrieves the tags for the currently logged-in user. This is equivalent to calling <code>#getTags(String)</code> with "default" as the username
getTags
{ "repo_name": "vanta/gdata-java-client", "path": "java/sample/photos/PicasawebClient.java", "license": "apache-2.0", "size": 8005 }
[ "com.google.gdata.data.photos.TagEntry", "com.google.gdata.util.ServiceException", "java.io.IOException", "java.util.List" ]
import com.google.gdata.data.photos.TagEntry; import com.google.gdata.util.ServiceException; import java.io.IOException; import java.util.List;
import com.google.gdata.data.photos.*; import com.google.gdata.util.*; import java.io.*; import java.util.*;
[ "com.google.gdata", "java.io", "java.util" ]
com.google.gdata; java.io; java.util;
1,807,661
public void setChildDivider(Drawable childDivider) { mChildDivider = childDivider; }
void function(Drawable childDivider) { mChildDivider = childDivider; }
/** * Sets the drawable that will be drawn adjacent to every child in the list. This will * be drawn using the same height as the normal divider ({@link #setDivider(Drawable)}) or * if it does not have an intrinsic height, the height set by {@link #setDividerHeight(int)}. * * @param childDivider The drawable to use. */
Sets the drawable that will be drawn adjacent to every child in the list. This will be drawn using the same height as the normal divider (<code>#setDivider(Drawable)</code>) or if it does not have an intrinsic height, the height set by <code>#setDividerHeight(int)</code>
setChildDivider
{ "repo_name": "syslover33/ctank", "path": "java/android-sdk-linux_r24.4.1_src/sources/android-23/android/widget/ExpandableListView.java", "license": "gpl-3.0", "size": 53296 }
[ "android.graphics.drawable.Drawable" ]
import android.graphics.drawable.Drawable;
import android.graphics.drawable.*;
[ "android.graphics" ]
android.graphics;
109,294
protected void checkInitialized( String property ) { if ( initialized ) { throw new IllegalStateException( I18n.err( I18n.ERR_576, property ) ); } }
void function( String property ) { if ( initialized ) { throw new IllegalStateException( I18n.err( I18n.ERR_576, property ) ); } }
/** * Check that the operation is done on an initialized store * @param property */
Check that the operation is done on an initialized store
checkInitialized
{ "repo_name": "darranl/directory-server", "path": "core-api/src/main/java/org/apache/directory/server/core/api/partition/AbstractPartition.java", "license": "apache-2.0", "size": 6665 }
[ "org.apache.directory.server.i18n.I18n" ]
import org.apache.directory.server.i18n.I18n;
import org.apache.directory.server.i18n.*;
[ "org.apache.directory" ]
org.apache.directory;
266,770
protected Map<ProcessDefinitionEntity, ProcessDefinitionEntity> getPreviousVersionsOfProcessDefinitions( ParsedDeployment parsedDeployment) { Map<ProcessDefinitionEntity, ProcessDefinitionEntity> result = new LinkedHashMap<ProcessDefinitionEntity, ProcessDefinitionEntity>(); for (ProcessDefinitionEntity newDefinition : parsedDeployment.getAllProcessDefinitions()) { ProcessDefinitionEntity existingDefinition = bpmnDeploymentHelper.getMostRecentVersionOfProcessDefinition(newDefinition); if (existingDefinition != null) { result.put(newDefinition, existingDefinition); } } return result; }
Map<ProcessDefinitionEntity, ProcessDefinitionEntity> function( ParsedDeployment parsedDeployment) { Map<ProcessDefinitionEntity, ProcessDefinitionEntity> result = new LinkedHashMap<ProcessDefinitionEntity, ProcessDefinitionEntity>(); for (ProcessDefinitionEntity newDefinition : parsedDeployment.getAllProcessDefinitions()) { ProcessDefinitionEntity existingDefinition = bpmnDeploymentHelper.getMostRecentVersionOfProcessDefinition(newDefinition); if (existingDefinition != null) { result.put(newDefinition, existingDefinition); } } return result; }
/** * Constructs a map from new ProcessDefinitionEntities to the previous version by key and tenant. * If no previous version exists, no map entry is created. */
Constructs a map from new ProcessDefinitionEntities to the previous version by key and tenant. If no previous version exists, no map entry is created
getPreviousVersionsOfProcessDefinitions
{ "repo_name": "roberthafner/flowable-engine", "path": "modules/flowable-engine/src/main/java/org/activiti/engine/impl/bpmn/deployer/BpmnDeployer.java", "license": "apache-2.0", "size": 21511 }
[ "java.util.LinkedHashMap", "java.util.Map", "org.activiti.engine.impl.persistence.entity.ProcessDefinitionEntity" ]
import java.util.LinkedHashMap; import java.util.Map; import org.activiti.engine.impl.persistence.entity.ProcessDefinitionEntity;
import java.util.*; import org.activiti.engine.impl.persistence.entity.*;
[ "java.util", "org.activiti.engine" ]
java.util; org.activiti.engine;
376,329
public MROperPlan getMRPlan() { return MRPlan; }
MROperPlan function() { return MRPlan; }
/** * Used to get the compiled plan * * @return map reduce plan built by the compiler */
Used to get the compiled plan
getMRPlan
{ "repo_name": "hxquangnhat/PIG-ROLLUP-AUTO-HII", "path": "src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/MRCompiler.java", "license": "apache-2.0", "size": 155423 }
[ "org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.plans.MROperPlan" ]
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.plans.MROperPlan;
import org.apache.pig.backend.hadoop.executionengine.*;
[ "org.apache.pig" ]
org.apache.pig;
1,194,521
public List getSteps() { return steps; }
List function() { return steps; }
/** * Gets the configures steps. * * @return List of Step. */
Gets the configures steps
getSteps
{ "repo_name": "ervandew/formic", "path": "src/java/org/formic/ant/type/Path.java", "license": "lgpl-2.1", "size": 1913 }
[ "java.util.List" ]
import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
19,196
private void addIntField(Ignite ignite, String fieldName, int fieldVal, int cacheIdx) { BinaryObjectBuilder builder = ignite.binary().builder(BINARY_TYPE_NAME); IgniteCache<Object, Object> cache = ignite.cache(DEFAULT_CACHE_NAME).withKeepBinary(); builder.setField(fieldName, fieldVal); cache.put(cacheIdx, builder.build()); }
void function(Ignite ignite, String fieldName, int fieldVal, int cacheIdx) { BinaryObjectBuilder builder = ignite.binary().builder(BINARY_TYPE_NAME); IgniteCache<Object, Object> cache = ignite.cache(DEFAULT_CACHE_NAME).withKeepBinary(); builder.setField(fieldName, fieldVal); cache.put(cacheIdx, builder.build()); }
/** * Adds field of integer type to fixed binary type. * * @param ignite Ignite. * @param fieldName Field name. * @param fieldVal Field value. * @param cacheIdx Cache index. */
Adds field of integer type to fixed binary type
addIntField
{ "repo_name": "alexzaitzev/ignite", "path": "modules/core/src/test/java/org/apache/ignite/internal/processors/cache/binary/GridCacheBinaryObjectMetadataExchangeMultinodeTest.java", "license": "apache-2.0", "size": 16635 }
[ "org.apache.ignite.Ignite", "org.apache.ignite.IgniteCache", "org.apache.ignite.binary.BinaryObjectBuilder" ]
import org.apache.ignite.Ignite; import org.apache.ignite.IgniteCache; import org.apache.ignite.binary.BinaryObjectBuilder;
import org.apache.ignite.*; import org.apache.ignite.binary.*;
[ "org.apache.ignite" ]
org.apache.ignite;
556,087
public Document getEdgeList(String prefix) { Document doc = XMLHelper.newDocument(); Element docEl = XMLHelper.addDocumentElement(doc, "edges"); for (ProcessEdge edge : this.model.getEdges()) { Element nodeEl = XMLHelper.addElement(doc, docEl, "node"); XMLHelper.addElement(doc, nodeEl, "uri").setTextContent( prefix + this.model.getProcessModelURI() + "/edges/" + edge.getId()); } return doc; }
Document function(String prefix) { Document doc = XMLHelper.newDocument(); Element docEl = XMLHelper.addDocumentElement(doc, "edges"); for (ProcessEdge edge : this.model.getEdges()) { Element nodeEl = XMLHelper.addElement(doc, docEl, "node"); XMLHelper.addElement(doc, nodeEl, "uri").setTextContent( prefix + this.model.getProcessModelURI() + STR + edge.getId()); } return doc; }
/** * Create an XML-list of all edges contained in the given model * @param prefix server base address * @return XML-list of all edges */
Create an XML-list of all edges contained in the given model
getEdgeList
{ "repo_name": "bptlab/processeditor", "path": "src/com/inubit/research/server/request/handler/util/ProcessModelUtils.java", "license": "apache-2.0", "size": 4982 }
[ "com.inubit.research.server.request.XMLHelper", "net.frapu.code.visualization.ProcessEdge", "org.w3c.dom.Document", "org.w3c.dom.Element" ]
import com.inubit.research.server.request.XMLHelper; import net.frapu.code.visualization.ProcessEdge; import org.w3c.dom.Document; import org.w3c.dom.Element;
import com.inubit.research.server.request.*; import net.frapu.code.visualization.*; import org.w3c.dom.*;
[ "com.inubit.research", "net.frapu.code", "org.w3c.dom" ]
com.inubit.research; net.frapu.code; org.w3c.dom;
1,784,137
@SuppressWarnings("unchecked") protected static <T> T toStream(final T iPojo, final Proxy iProxiedPojo, ODatabaseObject db) throws IllegalArgumentException, IllegalAccessException { final ODocument iRecord = getDocument(iProxiedPojo); final long timer = Orient.instance().getProfiler().startChrono(); final Integer identityRecord = System.identityHashCode(iPojo); if (OObjectSerializationThreadLocal.INSTANCE.get().containsKey(identityRecord)) return (T) OObjectSerializationThreadLocal.INSTANCE.get().get(identityRecord); OObjectSerializationThreadLocal.INSTANCE.get().put(identityRecord, iProxiedPojo); OProperty schemaProperty; final Class<?> pojoClass = iPojo.getClass(); final OClass schemaClass = iRecord.getSchemaClass(); // CHECK FOR ID BINDING final Field idField = getIdField(pojoClass); if (idField != null) { Object id = getFieldValue(idField, iPojo); if (id != null) { // FOUND if (id instanceof ORecordId) { iRecord.setIdentity((ORecordId) id); } else if (id instanceof Number) { // TREATS AS CLUSTER POSITION ((ORecordId) iRecord.getIdentity()).clusterId = schemaClass.getDefaultClusterId(); ((ORecordId) iRecord.getIdentity()).clusterPosition = OClusterPositionFactory.INSTANCE.valueOf(((Number) id).longValue()); } else if (id instanceof String) ((ORecordId) iRecord.getIdentity()).fromString((String) id); else if (id.getClass().equals(Object.class)) iRecord.setIdentity((ORecordId) id); else OLogManager.instance().warn(OObjectSerializerHelper.class, "@Id field has been declared as %s while the supported are: ORID, Number, String, Object", id.getClass()); } if (iRecord.getIdentity().isValid() && iRecord.getIdentity().isPersistent()) iRecord.reload(); } // CHECK FOR VERSION BINDING final Field vField = getVersionField(pojoClass); boolean versionConfigured = false; if (vField != null) { versionConfigured = true; Object ver = getFieldValue(vField, iPojo); if (ver != null) { // FOUND final ORecordVersion version = iRecord.getRecordVersion(); if (ver instanceof ORecordVersion) { version.copyFrom((ORecordVersion) ver); } else if (ver instanceof Number) { if (version instanceof OSimpleVersion) // TREATS AS CLUSTER POSITION version.setCounter(((Number) ver).intValue()); else OLogManager .instance() .warn(OObjectEntitySerializer.class, "@Version field can't be declared as Number in distributed mode. Should be one of following: String, Object, ORecordVersion"); } else if (ver instanceof String) { version.getSerializer().fromString((String) ver, version); } else if (ver.getClass().equals(Object.class)) version.copyFrom((ORecordVersion) ver); else OLogManager.instance().warn(OObjectSerializerHelper.class, "@Version field has been declared as %s while the supported are: Number, String, Object", ver.getClass()); } } if (db.isMVCC() && !versionConfigured && db.getTransaction() instanceof OTransactionOptimistic) throw new OTransactionException( "Cannot involve an object of class '" + pojoClass + "' in an Optimistic Transaction commit because it does not define @Version or @OVersion and therefore cannot handle MVCC"); String fieldName; Object fieldValue; // CALL BEFORE MARSHALLING invokeCallback(pojoClass, iPojo, iRecord, OBeforeSerialization.class); Class<?> currentClass = pojoClass; while (!currentClass.equals(Object.class) && classes.contains(pojoClass)) { for (Field p : currentClass.getDeclaredFields()) { if (Modifier.isStatic(p.getModifiers()) || Modifier.isNative(p.getModifiers()) || Modifier.isTransient(p.getModifiers()) || p.getType().isAnonymousClass()) continue; fieldName = p.getName(); List<String> classTransientFields = transientFields.get(pojoClass); if ((idField != null && fieldName.equals(idField.getName()) || (vField != null && fieldName.equals(vField.getName())) || (classTransientFields != null && classTransientFields .contains(fieldName)))) continue; fieldValue = getFieldValue(p, iPojo); if (fieldValue != null && fieldValue.getClass().isAnonymousClass()) continue; if (isSerializedType(p)) fieldValue = serializeFieldValue(p.getType(), fieldValue); schemaProperty = schemaClass != null ? schemaClass.getProperty(fieldName) : null; OType fieldType = schemaProperty != null ? schemaProperty.getType() : getTypeByClass(currentClass, fieldName); if (fieldValue != null) { if (isEmbeddedObject(p)) { // AUTO CREATE SCHEMA CLASS if (iRecord.getSchemaClass() == null) { db.getMetadata().getSchema().createClass(iPojo.getClass()); iRecord.setClassNameIfExists(iPojo.getClass().getSimpleName()); } } } fieldValue = typeToStream(fieldValue, fieldType, db, iRecord); iRecord.field(fieldName, fieldValue, fieldType); } currentClass = currentClass.getSuperclass(); if (currentClass == null || currentClass.equals(ODocument.class)) // POJO EXTENDS ODOCUMENT: SPECIAL CASE: AVOID TO CONSIDER // ODOCUMENT FIELDS currentClass = Object.class; } // CALL AFTER MARSHALLING invokeCallback(pojoClass, iPojo, iRecord, OAfterSerialization.class); OObjectSerializationThreadLocal.INSTANCE.get().remove(identityRecord); Orient.instance().getProfiler().stopChrono("Object.toStream", "Serialize a POJO", timer); return (T) iProxiedPojo; }
@SuppressWarnings(STR) static <T> T function(final T iPojo, final Proxy iProxiedPojo, ODatabaseObject db) throws IllegalArgumentException, IllegalAccessException { final ODocument iRecord = getDocument(iProxiedPojo); final long timer = Orient.instance().getProfiler().startChrono(); final Integer identityRecord = System.identityHashCode(iPojo); if (OObjectSerializationThreadLocal.INSTANCE.get().containsKey(identityRecord)) return (T) OObjectSerializationThreadLocal.INSTANCE.get().get(identityRecord); OObjectSerializationThreadLocal.INSTANCE.get().put(identityRecord, iProxiedPojo); OProperty schemaProperty; final Class<?> pojoClass = iPojo.getClass(); final OClass schemaClass = iRecord.getSchemaClass(); final Field idField = getIdField(pojoClass); if (idField != null) { Object id = getFieldValue(idField, iPojo); if (id != null) { if (id instanceof ORecordId) { iRecord.setIdentity((ORecordId) id); } else if (id instanceof Number) { ((ORecordId) iRecord.getIdentity()).clusterId = schemaClass.getDefaultClusterId(); ((ORecordId) iRecord.getIdentity()).clusterPosition = OClusterPositionFactory.INSTANCE.valueOf(((Number) id).longValue()); } else if (id instanceof String) ((ORecordId) iRecord.getIdentity()).fromString((String) id); else if (id.getClass().equals(Object.class)) iRecord.setIdentity((ORecordId) id); else OLogManager.instance().warn(OObjectSerializerHelper.class, STR, id.getClass()); } if (iRecord.getIdentity().isValid() && iRecord.getIdentity().isPersistent()) iRecord.reload(); } final Field vField = getVersionField(pojoClass); boolean versionConfigured = false; if (vField != null) { versionConfigured = true; Object ver = getFieldValue(vField, iPojo); if (ver != null) { final ORecordVersion version = iRecord.getRecordVersion(); if (ver instanceof ORecordVersion) { version.copyFrom((ORecordVersion) ver); } else if (ver instanceof Number) { if (version instanceof OSimpleVersion) version.setCounter(((Number) ver).intValue()); else OLogManager .instance() .warn(OObjectEntitySerializer.class, STR); } else if (ver instanceof String) { version.getSerializer().fromString((String) ver, version); } else if (ver.getClass().equals(Object.class)) version.copyFrom((ORecordVersion) ver); else OLogManager.instance().warn(OObjectSerializerHelper.class, STR, ver.getClass()); } } if (db.isMVCC() && !versionConfigured && db.getTransaction() instanceof OTransactionOptimistic) throw new OTransactionException( STR + pojoClass + STR); String fieldName; Object fieldValue; invokeCallback(pojoClass, iPojo, iRecord, OBeforeSerialization.class); Class<?> currentClass = pojoClass; while (!currentClass.equals(Object.class) && classes.contains(pojoClass)) { for (Field p : currentClass.getDeclaredFields()) { if (Modifier.isStatic(p.getModifiers()) Modifier.isNative(p.getModifiers()) Modifier.isTransient(p.getModifiers()) p.getType().isAnonymousClass()) continue; fieldName = p.getName(); List<String> classTransientFields = transientFields.get(pojoClass); if ((idField != null && fieldName.equals(idField.getName()) (vField != null && fieldName.equals(vField.getName())) (classTransientFields != null && classTransientFields .contains(fieldName)))) continue; fieldValue = getFieldValue(p, iPojo); if (fieldValue != null && fieldValue.getClass().isAnonymousClass()) continue; if (isSerializedType(p)) fieldValue = serializeFieldValue(p.getType(), fieldValue); schemaProperty = schemaClass != null ? schemaClass.getProperty(fieldName) : null; OType fieldType = schemaProperty != null ? schemaProperty.getType() : getTypeByClass(currentClass, fieldName); if (fieldValue != null) { if (isEmbeddedObject(p)) { if (iRecord.getSchemaClass() == null) { db.getMetadata().getSchema().createClass(iPojo.getClass()); iRecord.setClassNameIfExists(iPojo.getClass().getSimpleName()); } } } fieldValue = typeToStream(fieldValue, fieldType, db, iRecord); iRecord.field(fieldName, fieldValue, fieldType); } currentClass = currentClass.getSuperclass(); if (currentClass == null currentClass.equals(ODocument.class)) currentClass = Object.class; } invokeCallback(pojoClass, iPojo, iRecord, OAfterSerialization.class); OObjectSerializationThreadLocal.INSTANCE.get().remove(identityRecord); Orient.instance().getProfiler().stopChrono(STR, STR, timer); return (T) iProxiedPojo; }
/** * Serialize the user POJO to a ORecordDocument instance. * * @param iPojo * User pojo to serialize * @throws IllegalAccessException * @throws IllegalArgumentException */
Serialize the user POJO to a ORecordDocument instance
toStream
{ "repo_name": "delebash/orientdb-parent", "path": "object/src/main/java/com/orientechnologies/orient/object/enhancement/OObjectEntitySerializer.java", "license": "apache-2.0", "size": 54558 }
[ "com.orientechnologies.common.log.OLogManager", "com.orientechnologies.orient.core.Orient", "com.orientechnologies.orient.core.annotation.OAfterSerialization", "com.orientechnologies.orient.core.annotation.OBeforeSerialization", "com.orientechnologies.orient.core.db.object.ODatabaseObject", "com.orientechnologies.orient.core.exception.OTransactionException", "com.orientechnologies.orient.core.id.OClusterPositionFactory", "com.orientechnologies.orient.core.id.ORecordId", "com.orientechnologies.orient.core.metadata.schema.OClass", "com.orientechnologies.orient.core.metadata.schema.OProperty", "com.orientechnologies.orient.core.metadata.schema.OType", "com.orientechnologies.orient.core.record.impl.ODocument", "com.orientechnologies.orient.core.tx.OTransactionOptimistic", "com.orientechnologies.orient.core.version.ORecordVersion", "com.orientechnologies.orient.core.version.OSimpleVersion", "com.orientechnologies.orient.object.serialization.OObjectSerializationThreadLocal", "com.orientechnologies.orient.object.serialization.OObjectSerializerHelper", "java.lang.reflect.Field", "java.lang.reflect.Modifier", "java.util.List", "javassist.util.proxy.Proxy" ]
import com.orientechnologies.common.log.OLogManager; import com.orientechnologies.orient.core.Orient; import com.orientechnologies.orient.core.annotation.OAfterSerialization; import com.orientechnologies.orient.core.annotation.OBeforeSerialization; import com.orientechnologies.orient.core.db.object.ODatabaseObject; import com.orientechnologies.orient.core.exception.OTransactionException; import com.orientechnologies.orient.core.id.OClusterPositionFactory; import com.orientechnologies.orient.core.id.ORecordId; import com.orientechnologies.orient.core.metadata.schema.OClass; import com.orientechnologies.orient.core.metadata.schema.OProperty; import com.orientechnologies.orient.core.metadata.schema.OType; import com.orientechnologies.orient.core.record.impl.ODocument; import com.orientechnologies.orient.core.tx.OTransactionOptimistic; import com.orientechnologies.orient.core.version.ORecordVersion; import com.orientechnologies.orient.core.version.OSimpleVersion; import com.orientechnologies.orient.object.serialization.OObjectSerializationThreadLocal; import com.orientechnologies.orient.object.serialization.OObjectSerializerHelper; import java.lang.reflect.Field; import java.lang.reflect.Modifier; import java.util.List; import javassist.util.proxy.Proxy;
import com.orientechnologies.common.log.*; import com.orientechnologies.orient.core.*; import com.orientechnologies.orient.core.annotation.*; import com.orientechnologies.orient.core.db.object.*; import com.orientechnologies.orient.core.exception.*; import com.orientechnologies.orient.core.id.*; import com.orientechnologies.orient.core.metadata.schema.*; import com.orientechnologies.orient.core.record.impl.*; import com.orientechnologies.orient.core.tx.*; import com.orientechnologies.orient.core.version.*; import com.orientechnologies.orient.object.serialization.*; import java.lang.reflect.*; import java.util.*; import javassist.util.proxy.*;
[ "com.orientechnologies.common", "com.orientechnologies.orient", "java.lang", "java.util", "javassist.util.proxy" ]
com.orientechnologies.common; com.orientechnologies.orient; java.lang; java.util; javassist.util.proxy;
1,878,918
public static void checkIndexClosing(ClusterState currentState, Set<IndexMetaData> indices) { RestoreInProgress restore = currentState.custom(RestoreInProgress.TYPE); if (restore != null) { Set<Index> indicesToFail = null; for (RestoreInProgress.Entry entry : restore.entries()) { for (ObjectObjectCursor<ShardId, RestoreInProgress.ShardRestoreStatus> shard : entry.shards()) { if (!shard.value.state().completed()) { IndexMetaData indexMetaData = currentState.metaData().index(shard.key.getIndex()); if (indexMetaData != null && indices.contains(indexMetaData)) { if (indicesToFail == null) { indicesToFail = new HashSet<>(); } indicesToFail.add(shard.key.getIndex()); } } } } if (indicesToFail != null) { throw new IllegalArgumentException("Cannot close indices that are being restored: " + indicesToFail); } } }
static void function(ClusterState currentState, Set<IndexMetaData> indices) { RestoreInProgress restore = currentState.custom(RestoreInProgress.TYPE); if (restore != null) { Set<Index> indicesToFail = null; for (RestoreInProgress.Entry entry : restore.entries()) { for (ObjectObjectCursor<ShardId, RestoreInProgress.ShardRestoreStatus> shard : entry.shards()) { if (!shard.value.state().completed()) { IndexMetaData indexMetaData = currentState.metaData().index(shard.key.getIndex()); if (indexMetaData != null && indices.contains(indexMetaData)) { if (indicesToFail == null) { indicesToFail = new HashSet<>(); } indicesToFail.add(shard.key.getIndex()); } } } } if (indicesToFail != null) { throw new IllegalArgumentException(STR + indicesToFail); } } }
/** * Check if any of the indices to be closed are currently being restored from a snapshot and fail closing if such an index * is found as closing an index that is being restored makes the index unusable (it cannot be recovered). */
Check if any of the indices to be closed are currently being restored from a snapshot and fail closing if such an index is found as closing an index that is being restored makes the index unusable (it cannot be recovered)
checkIndexClosing
{ "repo_name": "ricardocerq/elasticsearch", "path": "core/src/main/java/org/elasticsearch/snapshots/RestoreService.java", "license": "apache-2.0", "size": 56180 }
[ "com.carrotsearch.hppc.cursors.ObjectObjectCursor", "java.util.HashSet", "java.util.Map", "java.util.Set", "org.elasticsearch.cluster.ClusterState", "org.elasticsearch.cluster.RestoreInProgress", "org.elasticsearch.cluster.metadata.IndexMetaData", "org.elasticsearch.index.Index", "org.elasticsearch.index.shard.ShardId" ]
import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import java.util.HashSet; import java.util.Map; import java.util.Set; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.RestoreInProgress; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId;
import com.carrotsearch.hppc.cursors.*; import java.util.*; import org.elasticsearch.cluster.*; import org.elasticsearch.cluster.metadata.*; import org.elasticsearch.index.*; import org.elasticsearch.index.shard.*;
[ "com.carrotsearch.hppc", "java.util", "org.elasticsearch.cluster", "org.elasticsearch.index" ]
com.carrotsearch.hppc; java.util; org.elasticsearch.cluster; org.elasticsearch.index;
452,997
public Date getValidateTo() { return validateTo; }
Date function() { return validateTo; }
/** * Gets the validate to date. * * @return the validate to date. */
Gets the validate to date
getValidateTo
{ "repo_name": "lorislab/appky", "path": "appky-application/src/main/java/org/lorislab/appky/application/tmpresource/criteria/TemporaryResourceSearchCriteria.java", "license": "apache-2.0", "size": 2498 }
[ "java.util.Date" ]
import java.util.Date;
import java.util.*;
[ "java.util" ]
java.util;
1,569,892
public boolean supportsConvert() throws SQLException { return true; }
boolean function() throws SQLException { return true; }
/** * (JDBC4 clarification:) * Retrieves whether this database supports the JDBC scalar function * <code>CONVERT</code> for the conversion of one JDBC type to another. * The JDBC types are the generic SQL data types defined * in <code>java.sql.Types</code>. * * <!-- start release-specific documentation --> * <div class="ReleaseSpecificDocumentation"> * <h3>HSQLDB-Specific Information:</h3> <p> * * HSQLDB supports conversions; this method always * returns <code>true</code>. * </div> * <!-- end release-specific documentation --> * * * @return <code>true</code> if so; <code>false</code> otherwise * @exception SQLException if a database access error occurs */
(JDBC4 clarification:) Retrieves whether this database supports the JDBC scalar function <code>CONVERT</code> for the conversion of one JDBC type to another. The JDBC types are the generic SQL data types defined in <code>java.sql.Types</code>. HSQLDB-Specific Information: HSQLDB supports conversions; this method always returns <code>true</code>.
supportsConvert
{ "repo_name": "ThangBK2009/android-source-browsing.platform--external--hsqldb", "path": "src/org/hsqldb/jdbc/JDBCDatabaseMetaData.java", "license": "bsd-3-clause", "size": 263631 }
[ "java.sql.SQLException" ]
import java.sql.SQLException;
import java.sql.*;
[ "java.sql" ]
java.sql;
765,456
public void enableKerberosAuthentication(final Subject subject) { m_subject = subject; }
void function(final Subject subject) { m_subject = subject; }
/** * <p>Enable Kerberos authentication with the provided subject credentials<p> * @param subject */
Enable Kerberos authentication with the provided subject credentials
enableKerberosAuthentication
{ "repo_name": "s-store/sstore-soft", "path": "src/frontend/org/voltdb/client/ClientConfig.java", "license": "gpl-3.0", "size": 12054 }
[ "javax.security.auth.Subject" ]
import javax.security.auth.Subject;
import javax.security.auth.*;
[ "javax.security" ]
javax.security;
2,783,062
public void verifyUser() throws NotAuthorizedException { String requestingUser = getRequestingUser(); if (requestingUser == null) { String msg = "No user found."; if (!UserGroupInformation.isSecurityEnabled()) { msg += " Missing " + PseudoAuthenticator.USER_NAME + " parameter."; } throw new NotAuthorizedException(msg); } if(doAs != null && !doAs.equals(requestingUser)) { ProxyUserSupport.validate(requestingUser, getRequestingHost(requestingUser, request), doAs); } } /** * All 'tasks' spawned by WebHCat should be run as this user. W/o doAs query parameter * this is just the user making the request (or * {@link org.apache.hadoop.security.authentication.client.PseudoAuthenticator#USER_NAME} * query param). * @return value of doAs query parameter or {@link #getRequestingUser()}
void function() throws NotAuthorizedException { String requestingUser = getRequestingUser(); if (requestingUser == null) { String msg = STR; if (!UserGroupInformation.isSecurityEnabled()) { msg += STR + PseudoAuthenticator.USER_NAME + STR; } throw new NotAuthorizedException(msg); } if(doAs != null && !doAs.equals(requestingUser)) { ProxyUserSupport.validate(requestingUser, getRequestingHost(requestingUser, request), doAs); } } /** * All 'tasks' spawned by WebHCat should be run as this user. W/o doAs query parameter * this is just the user making the request (or * {@link org.apache.hadoop.security.authentication.client.PseudoAuthenticator#USER_NAME} * query param). * @return value of doAs query parameter or {@link #getRequestingUser()}
/** * Verify that we have a valid user. Throw an exception if invalid. */
Verify that we have a valid user. Throw an exception if invalid
verifyUser
{ "repo_name": "nishantmonu51/hive", "path": "hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java", "license": "apache-2.0", "size": 44277 }
[ "org.apache.hadoop.security.UserGroupInformation", "org.apache.hadoop.security.authentication.client.PseudoAuthenticator" ]
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authentication.client.PseudoAuthenticator;
import org.apache.hadoop.security.*; import org.apache.hadoop.security.authentication.client.*;
[ "org.apache.hadoop" ]
org.apache.hadoop;
227,120
private synchronized void writeXmlFile(Document p_doc, String p_prefix) { try { FileOutputStream fos = new FileOutputStream( String.format("%s/%s_%s%s", m_strCmdLocation, p_prefix, (new DateTime()).toString(m_dateTimeFormatter), C_STR_SUFFIX)); // Using a serializer with indention set to 2 spaces, // write the XML document to the file Serializer output = new Serializer(fos, "ISO-8859-1"); output.setIndent(2); output.write(p_doc); fos.close(); } catch (FileNotFoundException e) { Logger.w(TAG, "could not write file", e); } catch (UnsupportedEncodingException e) { Logger.w(TAG, "unsupported encoding exception", e); } catch (IOException e) { Logger.w(TAG, "IO Exception", e); } cleanOldFiles(p_prefix); }
synchronized void function(Document p_doc, String p_prefix) { try { FileOutputStream fos = new FileOutputStream( String.format(STR, m_strCmdLocation, p_prefix, (new DateTime()).toString(m_dateTimeFormatter), C_STR_SUFFIX)); Serializer output = new Serializer(fos, STR); output.setIndent(2); output.write(p_doc); fos.close(); } catch (FileNotFoundException e) { Logger.w(TAG, STR, e); } catch (UnsupportedEncodingException e) { Logger.w(TAG, STR, e); } catch (IOException e) { Logger.w(TAG, STR, e); } cleanOldFiles(p_prefix); }
/** * Write the XML file out and clean up old ones * * @param p_doc * @param p_prefix */
Write the XML file out and clean up old ones
writeXmlFile
{ "repo_name": "mkurdziel/BitHome-Hub-Java-V1", "path": "Controller/src/synet/controller/messaging/MsgAdapterNet.java", "license": "apache-2.0", "size": 36750 }
[ "java.io.FileNotFoundException", "java.io.FileOutputStream", "java.io.IOException", "java.io.UnsupportedEncodingException", "nu.xom.Document", "nu.xom.Serializer", "org.joda.time.DateTime" ]
import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.UnsupportedEncodingException; import nu.xom.Document; import nu.xom.Serializer; import org.joda.time.DateTime;
import java.io.*; import nu.xom.*; import org.joda.time.*;
[ "java.io", "nu.xom", "org.joda.time" ]
java.io; nu.xom; org.joda.time;
1,141,769
protected String build() { // special handling for event parameter List<Parameter> parameters = new ArrayList<Parameter>(this.parameters.size() + 1); if (this.event != null) { parameters.add(this.event); } parameters.addAll(this.parameters); // lookup validation info for the bean class to find encrypted properties Map<String, ValidationMetadata> validations = getValidationMetadata(); StringBuilder buffer = new StringBuilder(256); buffer.append(getBaseURL(this.baseUrl, parameters)); boolean seenQuestionMark = buffer.indexOf("?") != -1; for (Parameter param : parameters) { // special handling for event parameter if (param == this.event) { if (param.value == null) continue; else param = new Parameter((String) this.event.value, ""); } // Figure out whether we already have params or not if (!seenQuestionMark) { buffer.append('?'); seenQuestionMark = true; } else { buffer.append(getParameterSeparator()); } buffer.append(StringUtil.urlEncode(param.name)).append('='); if (param.value != null) { ValidationMetadata validation = validations.get(param.name); String formatted = format(param.value); if (validation != null && validation.encrypted()) formatted = CryptoUtil.encrypt(formatted); buffer.append(StringUtil.urlEncode(formatted)); } } return buffer.toString(); }
String function() { List<Parameter> parameters = new ArrayList<Parameter>(this.parameters.size() + 1); if (this.event != null) { parameters.add(this.event); } parameters.addAll(this.parameters); Map<String, ValidationMetadata> validations = getValidationMetadata(); StringBuilder buffer = new StringBuilder(256); buffer.append(getBaseURL(this.baseUrl, parameters)); boolean seenQuestionMark = buffer.indexOf("?") != -1; for (Parameter param : parameters) { if (param == this.event) { if (param.value == null) continue; else param = new Parameter((String) this.event.value, ""); } if (!seenQuestionMark) { buffer.append('?'); seenQuestionMark = true; } else { buffer.append(getParameterSeparator()); } buffer.append(StringUtil.urlEncode(param.name)).append('='); if (param.value != null) { ValidationMetadata validation = validations.get(param.name); String formatted = format(param.value); if (validation != null && validation.encrypted()) formatted = CryptoUtil.encrypt(formatted); buffer.append(StringUtil.urlEncode(formatted)); } } return buffer.toString(); }
/** * Build and return the URL */
Build and return the URL
build
{ "repo_name": "nkasvosve/beyondj", "path": "beyondj-third-party/stripes-master/stripes/src/main/java/net/sourceforge/stripes/util/UrlBuilder.java", "license": "apache-2.0", "size": 22007 }
[ "java.util.ArrayList", "java.util.List", "java.util.Map", "net.sourceforge.stripes.validation.ValidationMetadata" ]
import java.util.ArrayList; import java.util.List; import java.util.Map; import net.sourceforge.stripes.validation.ValidationMetadata;
import java.util.*; import net.sourceforge.stripes.validation.*;
[ "java.util", "net.sourceforge.stripes" ]
java.util; net.sourceforge.stripes;
1,832,879
private void ensurePropertyDeclared(Node getprop) { ObjectType ownerType = ObjectType.cast( getJSType(getprop.getFirstChild()).restrictByNotNullOrUndefined()); if (ownerType != null) { ensurePropertyDeclaredHelper(getprop, ownerType); } }
void function(Node getprop) { ObjectType ownerType = ObjectType.cast( getJSType(getprop.getFirstChild()).restrictByNotNullOrUndefined()); if (ownerType != null) { ensurePropertyDeclaredHelper(getprop, ownerType); } }
/** * Defines a declared property if it has not been defined yet. * * This handles the case where a property is declared on an object where * the object type is inferred, and so the object type will not * be known in {@code TypedScopeCreator}. */
Defines a declared property if it has not been defined yet. This handles the case where a property is declared on an object where the object type is inferred, and so the object type will not be known in TypedScopeCreator
ensurePropertyDeclared
{ "repo_name": "abdullah38rcc/closure-compiler", "path": "src/com/google/javascript/jscomp/TypeInference.java", "license": "apache-2.0", "size": 55059 }
[ "com.google.javascript.rhino.Node", "com.google.javascript.rhino.jstype.ObjectType" ]
import com.google.javascript.rhino.Node; import com.google.javascript.rhino.jstype.ObjectType;
import com.google.javascript.rhino.*; import com.google.javascript.rhino.jstype.*;
[ "com.google.javascript" ]
com.google.javascript;
2,088,815
public boolean enterRef(RefType type) { return true; }
boolean function(RefType type) { return true; }
/** * Implementors must return true if the children must be visited, false otherwise. */
Implementors must return true if the children must be visited, false otherwise
enterRef
{ "repo_name": "ewanld/johnson-runtime", "path": "johnson-codegen/src/main/java/com/github/johnson/codegen/JohnsonTypeVisitor.java", "license": "mit", "size": 3220 }
[ "com.github.johnson.codegen.types.RefType" ]
import com.github.johnson.codegen.types.RefType;
import com.github.johnson.codegen.types.*;
[ "com.github.johnson" ]
com.github.johnson;
1,903,280
public CustomerOrder getOrder(@WebParam(name = "orderId") String orderId);
CustomerOrder function(@WebParam(name = STR) String orderId);
/** * Get an Order * * @param orderId * @return */
Get an Order
getOrder
{ "repo_name": "rhtconsulting/fuse-quickstarts", "path": "karaf/soap_secure/src/main/java/com/redhat/consulting/fusequickstarts/karaf/soap/secure/service/ws/CustomerOrderService.java", "license": "apache-2.0", "size": 901 }
[ "com.redhat.consulting.fusequickstarts.karaf.soap.secure.model.CustomerOrder", "javax.jws.WebParam" ]
import com.redhat.consulting.fusequickstarts.karaf.soap.secure.model.CustomerOrder; import javax.jws.WebParam;
import com.redhat.consulting.fusequickstarts.karaf.soap.secure.model.*; import javax.jws.*;
[ "com.redhat.consulting", "javax.jws" ]
com.redhat.consulting; javax.jws;
1,361,188
public static synchronized void setCredentialProvider( AWSCredentialsProvider provider) { credentialProvider = provider; } /** * Returns the region configured for the default AWS SDK metric collector; * or null if the default is to be used. * * @throws IllegalArgumentException when using a region not included in * {@link Regions} * * @deprecated Use {@link #getRegionName()}
static synchronized void function( AWSCredentialsProvider provider) { credentialProvider = provider; } /** * Returns the region configured for the default AWS SDK metric collector; * or null if the default is to be used. * * @throws IllegalArgumentException when using a region not included in * {@link Regions} * * @deprecated Use {@link #getRegionName()}
/** * Sets the credential provider for the default AWS SDK metric * implementation; or null if the default is to be used. Calling this method * may result in the credential provider being different from the credential * file property. */
Sets the credential provider for the default AWS SDK metric implementation; or null if the default is to be used. Calling this method may result in the credential provider being different from the credential file property
setCredentialProvider
{ "repo_name": "loremipsumdolor/CastFast", "path": "src/com/amazonaws/metrics/AwsSdkMetrics.java", "license": "mit", "size": 36416 }
[ "com.amazonaws.auth.AWSCredentialsProvider", "com.amazonaws.regions.Regions" ]
import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.regions.Regions;
import com.amazonaws.auth.*; import com.amazonaws.regions.*;
[ "com.amazonaws.auth", "com.amazonaws.regions" ]
com.amazonaws.auth; com.amazonaws.regions;
2,684,785
protected void populate( Object bean, HttpServletRequest request, Locale locale ) { BeanUtil.populate( bean, request, locale ); }
void function( Object bean, HttpServletRequest request, Locale locale ) { BeanUtil.populate( bean, request, locale ); }
/** * Populate a bean using parameters in http request, with locale date format controls * * @param bean * bean to populate * @param request * http request * @param locale * the locale */
Populate a bean using parameters in http request, with locale date format controls
populate
{ "repo_name": "lutece-platform/lutece-core", "path": "src/java/fr/paris/lutece/portal/util/mvc/xpage/MVCApplication.java", "license": "bsd-3-clause", "size": 28327 }
[ "fr.paris.lutece.util.bean.BeanUtil", "java.util.Locale", "javax.servlet.http.HttpServletRequest" ]
import fr.paris.lutece.util.bean.BeanUtil; import java.util.Locale; import javax.servlet.http.HttpServletRequest;
import fr.paris.lutece.util.bean.*; import java.util.*; import javax.servlet.http.*;
[ "fr.paris.lutece", "java.util", "javax.servlet" ]
fr.paris.lutece; java.util; javax.servlet;
552,466
public AIPlatformValue findAIPlatformByPlatformID(int sessionId, Integer platformID) throws SessionNotFoundException, SessionTimeoutException, PermissionException, PlatformNotFoundException;
AIPlatformValue function(int sessionId, Integer platformID) throws SessionNotFoundException, SessionTimeoutException, PermissionException, PlatformNotFoundException;
/** * Find an AI Platform from an appdef platform */
Find an AI Platform from an appdef platform
findAIPlatformByPlatformID
{ "repo_name": "cc14514/hq6", "path": "hq-server/src/main/java/org/hyperic/hq/bizapp/shared/AIBoss.java", "license": "unlicense", "size": 12806 }
[ "org.hyperic.hq.appdef.shared.AIPlatformValue", "org.hyperic.hq.appdef.shared.PlatformNotFoundException", "org.hyperic.hq.auth.shared.SessionNotFoundException", "org.hyperic.hq.auth.shared.SessionTimeoutException", "org.hyperic.hq.authz.shared.PermissionException" ]
import org.hyperic.hq.appdef.shared.AIPlatformValue; import org.hyperic.hq.appdef.shared.PlatformNotFoundException; import org.hyperic.hq.auth.shared.SessionNotFoundException; import org.hyperic.hq.auth.shared.SessionTimeoutException; import org.hyperic.hq.authz.shared.PermissionException;
import org.hyperic.hq.appdef.shared.*; import org.hyperic.hq.auth.shared.*; import org.hyperic.hq.authz.shared.*;
[ "org.hyperic.hq" ]
org.hyperic.hq;
2,001,805
private void loadDefaultLookAndFeelTheme() { try { MetalLookAndFeel.setCurrentTheme(new ExecuteQueryTheme()); UIManager.setLookAndFeel(new MetalLookAndFeel()); } catch (UnsupportedLookAndFeelException e) { throw new ApplicationException(e); } }
void function() { try { MetalLookAndFeel.setCurrentTheme(new ExecuteQueryTheme()); UIManager.setLookAndFeel(new MetalLookAndFeel()); } catch (UnsupportedLookAndFeelException e) { throw new ApplicationException(e); } }
/** * Sets the default look and feel theme on Metal. */
Sets the default look and feel theme on Metal
loadDefaultLookAndFeelTheme
{ "repo_name": "toxeh/ExecuteQuery", "path": "java/src/org/executequery/util/LookAndFeelLoader.java", "license": "gpl-3.0", "size": 7444 }
[ "javax.swing.UIManager", "javax.swing.UnsupportedLookAndFeelException", "javax.swing.plaf.metal.MetalLookAndFeel", "org.executequery.ApplicationException", "org.executequery.plaf.ExecuteQueryTheme" ]
import javax.swing.UIManager; import javax.swing.UnsupportedLookAndFeelException; import javax.swing.plaf.metal.MetalLookAndFeel; import org.executequery.ApplicationException; import org.executequery.plaf.ExecuteQueryTheme;
import javax.swing.*; import javax.swing.plaf.metal.*; import org.executequery.*; import org.executequery.plaf.*;
[ "javax.swing", "org.executequery", "org.executequery.plaf" ]
javax.swing; org.executequery; org.executequery.plaf;
1,162,566
protected ILaunchConfiguration[] getLaunchConfigurations(IResource resource) { final List<ILaunchConfiguration> configurations = new ArrayList<ILaunchConfiguration>(); final ILaunchManager manager = DebugPlugin.getDefault().getLaunchManager(); final ILaunchConfigurationType type = manager .getLaunchConfigurationType(getLaunchConfigurationTypeID()); // try to find existing configurations using the same file try { for (ILaunchConfiguration configuration : manager.getLaunchConfigurations(type)) { if (configuration.hasAttribute(AbstractDSLLaunchConfigurationDelegate.RESOURCE_URI)) { final String pathString = configuration.getAttribute( AbstractDSLLaunchConfigurationDelegate.RESOURCE_URI, ""); try { IFile file = ResourcesPlugin.getWorkspace().getRoot().getFile(new Path(pathString)); if (resource != null && resource.equals(file)) { configurations.add(configuration); } } catch (IllegalArgumentException e) { Activator.getDefault().error(e); } } } } catch (CoreException e) { // could not load configurations, ignore e.toString(); } return configurations.toArray(new ILaunchConfiguration[configurations.size()]); } /** * Launch a resource. Try to launch using a launch configuration. Used for contextual launches * * @param file * source file * @param firstInstruction * the first {@link EObject instruction}
ILaunchConfiguration[] function(IResource resource) { final List<ILaunchConfiguration> configurations = new ArrayList<ILaunchConfiguration>(); final ILaunchManager manager = DebugPlugin.getDefault().getLaunchManager(); final ILaunchConfigurationType type = manager .getLaunchConfigurationType(getLaunchConfigurationTypeID()); try { for (ILaunchConfiguration configuration : manager.getLaunchConfigurations(type)) { if (configuration.hasAttribute(AbstractDSLLaunchConfigurationDelegate.RESOURCE_URI)) { final String pathString = configuration.getAttribute( AbstractDSLLaunchConfigurationDelegate.RESOURCE_URI, ""); try { IFile file = ResourcesPlugin.getWorkspace().getRoot().getFile(new Path(pathString)); if (resource != null && resource.equals(file)) { configurations.add(configuration); } } catch (IllegalArgumentException e) { Activator.getDefault().error(e); } } } } catch (CoreException e) { e.toString(); } return configurations.toArray(new ILaunchConfiguration[configurations.size()]); } /** * Launch a resource. Try to launch using a launch configuration. Used for contextual launches * * @param file * source file * @param firstInstruction * the first {@link EObject instruction}
/** * Get all {@link ILaunchConfiguration} that target the given {@link IResource}. * * @param resource * root file to execute * @return {@link ILaunchConfiguration}s using resource */
Get all <code>ILaunchConfiguration</code> that target the given <code>IResource</code>
getLaunchConfigurations
{ "repo_name": "SiriusLab/SiriusAnimator", "path": "simulationmodelanimation/plugins/org.eclipse.gemoc.dsl.debug.ide.ui/src/org/eclipse/gemoc/dsl/debug/ide/ui/launch/AbstractDSLLaunchConfigurationDelegateUI.java", "license": "epl-1.0", "size": 11406 }
[ "java.util.ArrayList", "java.util.List", "org.eclipse.core.resources.IFile", "org.eclipse.core.resources.IResource", "org.eclipse.core.resources.ResourcesPlugin", "org.eclipse.core.runtime.CoreException", "org.eclipse.core.runtime.Path", "org.eclipse.debug.core.DebugPlugin", "org.eclipse.debug.core.ILaunchConfiguration", "org.eclipse.debug.core.ILaunchConfigurationType", "org.eclipse.debug.core.ILaunchManager", "org.eclipse.emf.ecore.EObject", "org.eclipse.gemoc.dsl.debug.ide.Activator", "org.eclipse.gemoc.dsl.debug.ide.launch.AbstractDSLLaunchConfigurationDelegate" ]
import java.util.ArrayList; import java.util.List; import org.eclipse.core.resources.IFile; import org.eclipse.core.resources.IResource; import org.eclipse.core.resources.ResourcesPlugin; import org.eclipse.core.runtime.CoreException; import org.eclipse.core.runtime.Path; import org.eclipse.debug.core.DebugPlugin; import org.eclipse.debug.core.ILaunchConfiguration; import org.eclipse.debug.core.ILaunchConfigurationType; import org.eclipse.debug.core.ILaunchManager; import org.eclipse.emf.ecore.EObject; import org.eclipse.gemoc.dsl.debug.ide.Activator; import org.eclipse.gemoc.dsl.debug.ide.launch.AbstractDSLLaunchConfigurationDelegate;
import java.util.*; import org.eclipse.core.resources.*; import org.eclipse.core.runtime.*; import org.eclipse.debug.core.*; import org.eclipse.emf.ecore.*; import org.eclipse.gemoc.dsl.debug.ide.*; import org.eclipse.gemoc.dsl.debug.ide.launch.*;
[ "java.util", "org.eclipse.core", "org.eclipse.debug", "org.eclipse.emf", "org.eclipse.gemoc" ]
java.util; org.eclipse.core; org.eclipse.debug; org.eclipse.emf; org.eclipse.gemoc;
2,495,005
public void setCssText(String cssText) throws DOMException { if (handler == null) { throw new DOMException (DOMException.NO_MODIFICATION_ALLOWED_ERR, ""); } else { getValue(); handler.topTextChanged(cssText); } }
void function(String cssText) throws DOMException { if (handler == null) { throw new DOMException (DOMException.NO_MODIFICATION_ALLOWED_ERR, ""); } else { getValue(); handler.topTextChanged(cssText); } }
/** * <b>DOM</b>: Implements {@link * org.w3c.dom.css.CSSValue#setCssText(String)}. */
DOM: Implements <code>org.w3c.dom.css.CSSValue#setCssText(String)</code>
setCssText
{ "repo_name": "Squeegee/batik", "path": "sources/org/apache/batik/css/dom/CSSOMValue.java", "license": "apache-2.0", "size": 46308 }
[ "org.w3c.dom.DOMException" ]
import org.w3c.dom.DOMException;
import org.w3c.dom.*;
[ "org.w3c.dom" ]
org.w3c.dom;
271,684
public PathFragment getExecPath() { return execPath; }
PathFragment function() { return execPath; }
/** * Returns the path fragment from the exec root to the actual root. For source roots, this returns * the empty fragment. */
Returns the path fragment from the exec root to the actual root. For source roots, this returns the empty fragment
getExecPath
{ "repo_name": "cushon/bazel", "path": "src/main/java/com/google/devtools/build/lib/actions/ArtifactRoot.java", "license": "apache-2.0", "size": 10549 }
[ "com.google.devtools.build.lib.vfs.PathFragment" ]
import com.google.devtools.build.lib.vfs.PathFragment;
import com.google.devtools.build.lib.vfs.*;
[ "com.google.devtools" ]
com.google.devtools;
391,345
public void destroyRemotely(DistributedMember recipient, Integer bucketId, EntryEventImpl event, Object expectedOldValue) throws EntryNotFoundException, PrimaryBucketException, ForceReattemptException { DestroyResponse response = DestroyMessage.send(recipient, this, event, expectedOldValue); if (response != null) { this.prStats.incPartitionMessagesSent(); try { response.waitForCacheException(); event.setVersionTag(response.getVersionTag()); } catch (EntryNotFoundException enfe) { throw enfe; } catch (TransactionDataNotColocatedException enfe) { throw enfe; } catch (TransactionDataRebalancedException e) { throw e; } catch (CacheException ce) { throw new PartitionedRegionException( String.format("Destroy of entry on %s failed", recipient), ce); } catch (RegionDestroyedException ignore) { throw new RegionDestroyedException(toString(), getFullPath()); } } }
void function(DistributedMember recipient, Integer bucketId, EntryEventImpl event, Object expectedOldValue) throws EntryNotFoundException, PrimaryBucketException, ForceReattemptException { DestroyResponse response = DestroyMessage.send(recipient, this, event, expectedOldValue); if (response != null) { this.prStats.incPartitionMessagesSent(); try { response.waitForCacheException(); event.setVersionTag(response.getVersionTag()); } catch (EntryNotFoundException enfe) { throw enfe; } catch (TransactionDataNotColocatedException enfe) { throw enfe; } catch (TransactionDataRebalancedException e) { throw e; } catch (CacheException ce) { throw new PartitionedRegionException( String.format(STR, recipient), ce); } catch (RegionDestroyedException ignore) { throw new RegionDestroyedException(toString(), getFullPath()); } } }
/** * Destroy the entry on the remote node. * * @param recipient the member id of the receiver of the message * @param bucketId the idenity of the bucket * @param event the event prompting this request * @param expectedOldValue if not null, then destroy only if entry exists and current value is * equal to expectedOldValue * @throws EntryNotFoundException if entry not found OR if expectedOldValue is non-null and * doesn't equal the current value * @throws PrimaryBucketException if the bucket on that node is not the primary copy * @throws ForceReattemptException if the peer is no longer available */
Destroy the entry on the remote node
destroyRemotely
{ "repo_name": "davinash/geode", "path": "geode-core/src/main/java/org/apache/geode/internal/cache/PartitionedRegion.java", "license": "apache-2.0", "size": 383155 }
[ "org.apache.geode.cache.CacheException", "org.apache.geode.cache.EntryNotFoundException", "org.apache.geode.cache.RegionDestroyedException", "org.apache.geode.cache.TransactionDataNotColocatedException", "org.apache.geode.cache.TransactionDataRebalancedException", "org.apache.geode.distributed.DistributedMember", "org.apache.geode.internal.cache.partitioned.DestroyMessage" ]
import org.apache.geode.cache.CacheException; import org.apache.geode.cache.EntryNotFoundException; import org.apache.geode.cache.RegionDestroyedException; import org.apache.geode.cache.TransactionDataNotColocatedException; import org.apache.geode.cache.TransactionDataRebalancedException; import org.apache.geode.distributed.DistributedMember; import org.apache.geode.internal.cache.partitioned.DestroyMessage;
import org.apache.geode.cache.*; import org.apache.geode.distributed.*; import org.apache.geode.internal.cache.partitioned.*;
[ "org.apache.geode" ]
org.apache.geode;
2,354,513
public SortedSet<Integer> getObjectTypes() { return objectTypes != null ? Collections.unmodifiableSortedSet(this.objectTypes) : null; } // FieldType
SortedSet<Integer> function() { return objectTypes != null ? Collections.unmodifiableSortedSet(this.objectTypes) : null; }
/** * Get the object types this field type is allowed to reference, if so restricted. * * @return storage IDs of allowed object types, or null if there is no restriction */
Get the object types this field type is allowed to reference, if so restricted
getObjectTypes
{ "repo_name": "tempbottle/jsimpledb", "path": "src/java/org/jsimpledb/core/ReferenceFieldType.java", "license": "apache-2.0", "size": 2877 }
[ "java.util.Collections", "java.util.SortedSet" ]
import java.util.Collections; import java.util.SortedSet;
import java.util.*;
[ "java.util" ]
java.util;
891,107
public static void main(String[] args) { try { //============================================================= // Authenticate final File credFile = new File(System.getenv("AZURE_AUTH_LOCATION")); Azure azure = Azure .configure() .withLogLevel(LogLevel.BODY.withPrettyJson(true)) .authenticate(credFile) .withDefaultSubscription(); // Print selected subscription System.out.println("Selected subscription: " + azure.subscriptionId()); runSample(azure); } catch (Exception e) { System.out.println(e.getMessage()); e.printStackTrace(); } } private ManageInternetFacingLoadBalancer() { }
static void function(String[] args) { try { final File credFile = new File(System.getenv(STR)); Azure azure = Azure .configure() .withLogLevel(LogLevel.BODY.withPrettyJson(true)) .authenticate(credFile) .withDefaultSubscription(); System.out.println(STR + azure.subscriptionId()); runSample(azure); } catch (Exception e) { System.out.println(e.getMessage()); e.printStackTrace(); } } private ManageInternetFacingLoadBalancer() { }
/** * Main entry point. * @param args parameters */
Main entry point
main
{ "repo_name": "anudeepsharma/azure-sdk-for-java", "path": "azure-samples/src/main/java/com/microsoft/azure/management/network/samples/ManageInternetFacingLoadBalancer.java", "license": "mit", "size": 25439 }
[ "com.microsoft.azure.management.Azure", "com.microsoft.rest.LogLevel", "java.io.File" ]
import com.microsoft.azure.management.Azure; import com.microsoft.rest.LogLevel; import java.io.File;
import com.microsoft.azure.management.*; import com.microsoft.rest.*; import java.io.*;
[ "com.microsoft.azure", "com.microsoft.rest", "java.io" ]
com.microsoft.azure; com.microsoft.rest; java.io;
235,640
public final Client prepareESClientForUnitTest() throws Exception { try { // For unit tests it is recommended to use local node. // This is to ensure that your node will never join existing cluster on the network. // path.data location tempFolder = new File("tmp"); String tempFolderName = tempFolder.getCanonicalPath(); if (tempFolder.exists()) { FileUtils.deleteDirectory(tempFolder); } if (!tempFolder.mkdir()) { throw new IOException("Could not create a temporary folder [" + tempFolderName + "]"); } // Make sure that the index and metadata are not stored on the disk // path.data folder is created but we make sure it is removed after test finishes Settings settings = org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder() .put("index.store.type", "memory").put("gateway.type", "none").put("http.enabled", "false") .put("path.data", tempFolderName).put("node.river", "_none_").build(); node = NodeBuilder.nodeBuilder().settings(settings).local(true).node(); client = node.client(); // wait a moment for initialization Thread.sleep(100); return client; } catch (Exception e) { finalizeESClientForUnitTest(); throw e; } }
final Client function() throws Exception { try { tempFolder = new File("tmp"); String tempFolderName = tempFolder.getCanonicalPath(); if (tempFolder.exists()) { FileUtils.deleteDirectory(tempFolder); } if (!tempFolder.mkdir()) { throw new IOException(STR + tempFolderName + "]"); } Settings settings = org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder() .put(STR, STR).put(STR, "none").put(STR, "false") .put(STR, tempFolderName).put(STR, STR).build(); node = NodeBuilder.nodeBuilder().settings(settings).local(true).node(); client = node.client(); Thread.sleep(100); return client; } catch (Exception e) { finalizeESClientForUnitTest(); throw e; } }
/** * Prepare ES in-memory client for unit test. Do not forgot to call {@link #finalizeESClientForUnitTest()} at the end * of test! * * @return * @throws Exception */
Prepare ES in-memory client for unit test. Do not forgot to call <code>#finalizeESClientForUnitTest()</code> at the end of test
prepareESClientForUnitTest
{ "repo_name": "searchisko/elasticsearch-river-sysinfo", "path": "src/test/java/org/jboss/elasticsearch/river/sysinfo/testtools/ESRealClientTestBase.java", "license": "apache-2.0", "size": 3320 }
[ "java.io.File", "java.io.IOException", "org.apache.commons.io.FileUtils", "org.elasticsearch.client.Client", "org.elasticsearch.common.settings.Settings", "org.elasticsearch.node.NodeBuilder" ]
import java.io.File; import java.io.IOException; import org.apache.commons.io.FileUtils; import org.elasticsearch.client.Client; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.node.NodeBuilder;
import java.io.*; import org.apache.commons.io.*; import org.elasticsearch.client.*; import org.elasticsearch.common.settings.*; import org.elasticsearch.node.*;
[ "java.io", "org.apache.commons", "org.elasticsearch.client", "org.elasticsearch.common", "org.elasticsearch.node" ]
java.io; org.apache.commons; org.elasticsearch.client; org.elasticsearch.common; org.elasticsearch.node;
307,001
public Document deleteAllElementAll(Document document) { NodeList nodes = document.getElementsByTagName(XMLConstruct.ChildElementShareMember); for (int i = 0; i < nodes.getLength(); i++) { nodes.item(i).getParentNode().removeChild(nodes.item(i)); } return document; }
Document function(Document document) { NodeList nodes = document.getElementsByTagName(XMLConstruct.ChildElementShareMember); for (int i = 0; i < nodes.getLength(); i++) { nodes.item(i).getParentNode().removeChild(nodes.item(i)); } return document; }
/** * Delete All child elements * @param document */
Delete All child elements
deleteAllElementAll
{ "repo_name": "dev131/DropTillLate_Application", "path": "ch.droptilllate.application/src/ch/droptilllate/database/query/ShareMemberQuery.java", "license": "epl-1.0", "size": 5089 }
[ "ch.droptilllate.application.properties.XMLConstruct", "org.w3c.dom.Document", "org.w3c.dom.NodeList" ]
import ch.droptilllate.application.properties.XMLConstruct; import org.w3c.dom.Document; import org.w3c.dom.NodeList;
import ch.droptilllate.application.properties.*; import org.w3c.dom.*;
[ "ch.droptilllate.application", "org.w3c.dom" ]
ch.droptilllate.application; org.w3c.dom;
1,871,115
public static boolean isCOIProjectCorrect(COIProject coiProject) { if (coiProject == null || coiProject.getPath() == null || coiProject.getTypeOfCOI() == null) { return false; } return (coiProject.getTypeOfCOI() == COIEnum.PROJECT); }
static boolean function(COIProject coiProject) { if (coiProject == null coiProject.getPath() == null coiProject.getTypeOfCOI() == null) { return false; } return (coiProject.getTypeOfCOI() == COIEnum.PROJECT); }
/** * Method test if new project has correct parameters. * * @param coiProject Project to save. * @return boolean Boolean value (True/False). */
Method test if new project has correct parameters
isCOIProjectCorrect
{ "repo_name": "rough23/cudaonlineide", "path": "src/cz/utb/fai/cudaonlineide/server/verifier/RPCVerifier.java", "license": "gpl-3.0", "size": 4051 }
[ "cz.utb.fai.cudaonlineide.shared.dto.COIEnum", "cz.utb.fai.cudaonlineide.shared.dto.COIProject" ]
import cz.utb.fai.cudaonlineide.shared.dto.COIEnum; import cz.utb.fai.cudaonlineide.shared.dto.COIProject;
import cz.utb.fai.cudaonlineide.shared.dto.*;
[ "cz.utb.fai" ]
cz.utb.fai;
640,832
public void setItemPointBreak(int seriesIdx, int itemIdx, PointBreak pb) { this.seriesLegends.get(seriesIdx).setLegendBreak(itemIdx, pb); }
void function(int seriesIdx, int itemIdx, PointBreak pb) { this.seriesLegends.get(seriesIdx).setLegendBreak(itemIdx, pb); }
/** * Set item point break * * @param seriesIdx Series index * @param itemIdx Item index * @param pb Item point break */
Set item point break
setItemPointBreak
{ "repo_name": "meteoinfo/meteoinfolib", "path": "src/org/meteoinfo/chart/plot/XY1DPlot.java", "license": "lgpl-3.0", "size": 34104 }
[ "org.meteoinfo.legend.PointBreak" ]
import org.meteoinfo.legend.PointBreak;
import org.meteoinfo.legend.*;
[ "org.meteoinfo.legend" ]
org.meteoinfo.legend;
1,958,496
private void populateFamilyPropsList(Map<String, PName> familyNames, Map<String,Object> commonFamilyProps, CreateTableStatement statement, String defaultFamilyName, boolean isLocalIndex, final List<Pair<byte[],Map<String,Object>>> familyPropList) throws SQLException { for (PName familyName : familyNames.values()) { String fam = familyName.getString(); Collection<Pair<String, Object>> propsForCF = statement.getProps().get(IndexUtil.getActualColumnFamilyName(fam)); // No specific properties for this column family, so add the common family properties if (propsForCF.isEmpty()) { familyPropList.add(new Pair<>(familyName.getBytes(),commonFamilyProps)); } else { Map<String,Object> combinedFamilyProps = Maps.newHashMapWithExpectedSize(propsForCF.size() + commonFamilyProps.size()); combinedFamilyProps.putAll(commonFamilyProps); for (Pair<String,Object> prop : propsForCF) { // Don't allow specifying column families for TTL, KEEP_DELETED_CELLS and REPLICATION_SCOPE. // These properties can only be applied for all column families of a table and can't be column family specific. // See PHOENIX-3955 if (!fam.equals(QueryConstants.ALL_FAMILY_PROPERTIES_KEY) && MetaDataUtil.propertyNotAllowedToBeOutOfSync(prop.getFirst())) { throw new SQLExceptionInfo.Builder(SQLExceptionCode.COLUMN_FAMILY_NOT_ALLOWED_FOR_PROPERTY) .setMessage("Property: " + prop.getFirst()) .build() .buildException(); } combinedFamilyProps.put(prop.getFirst(), prop.getSecond()); } familyPropList.add(new Pair<>(familyName.getBytes(),combinedFamilyProps)); } } if (familyNames.isEmpty()) { // If there are no family names, use the default column family name. This also takes care of the case when // the table ddl has only PK cols present (which means familyNames is empty). byte[] cf = defaultFamilyName == null ? (!isLocalIndex? QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES : QueryConstants.DEFAULT_LOCAL_INDEX_COLUMN_FAMILY_BYTES) : Bytes.toBytes(defaultFamilyName); familyPropList.add(new Pair<>(cf, commonFamilyProps)); } }
void function(Map<String, PName> familyNames, Map<String,Object> commonFamilyProps, CreateTableStatement statement, String defaultFamilyName, boolean isLocalIndex, final List<Pair<byte[],Map<String,Object>>> familyPropList) throws SQLException { for (PName familyName : familyNames.values()) { String fam = familyName.getString(); Collection<Pair<String, Object>> propsForCF = statement.getProps().get(IndexUtil.getActualColumnFamilyName(fam)); if (propsForCF.isEmpty()) { familyPropList.add(new Pair<>(familyName.getBytes(),commonFamilyProps)); } else { Map<String,Object> combinedFamilyProps = Maps.newHashMapWithExpectedSize(propsForCF.size() + commonFamilyProps.size()); combinedFamilyProps.putAll(commonFamilyProps); for (Pair<String,Object> prop : propsForCF) { if (!fam.equals(QueryConstants.ALL_FAMILY_PROPERTIES_KEY) && MetaDataUtil.propertyNotAllowedToBeOutOfSync(prop.getFirst())) { throw new SQLExceptionInfo.Builder(SQLExceptionCode.COLUMN_FAMILY_NOT_ALLOWED_FOR_PROPERTY) .setMessage(STR + prop.getFirst()) .build() .buildException(); } combinedFamilyProps.put(prop.getFirst(), prop.getSecond()); } familyPropList.add(new Pair<>(familyName.getBytes(),combinedFamilyProps)); } } if (familyNames.isEmpty()) { byte[] cf = defaultFamilyName == null ? (!isLocalIndex? QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES : QueryConstants.DEFAULT_LOCAL_INDEX_COLUMN_FAMILY_BYTES) : Bytes.toBytes(defaultFamilyName); familyPropList.add(new Pair<>(cf, commonFamilyProps)); } }
/** * * Populate the properties for each column family referenced in the create table statement * @param familyNames column families referenced in the create table statement * @param commonFamilyProps properties common to all column families * @param statement create table statement * @param defaultFamilyName the default column family name * @param isLocalIndex true if in the create local index path * @param familyPropList list containing pairs of column families and their corresponding properties * @throws SQLException */
Populate the properties for each column family referenced in the create table statement
populateFamilyPropsList
{ "repo_name": "growingio/phoenix", "path": "phoenix-core/src/main/java/org/apache/phoenix/schema/MetaDataClient.java", "license": "apache-2.0", "size": 300967 }
[ "com.google.common.collect.Maps", "java.sql.SQLException", "java.util.Collection", "java.util.List", "java.util.Map", "org.apache.hadoop.hbase.util.Bytes", "org.apache.hadoop.hbase.util.Pair", "org.apache.phoenix.exception.SQLExceptionCode", "org.apache.phoenix.exception.SQLExceptionInfo", "org.apache.phoenix.parse.CreateTableStatement", "org.apache.phoenix.query.QueryConstants", "org.apache.phoenix.util.IndexUtil", "org.apache.phoenix.util.MetaDataUtil" ]
import com.google.common.collect.Maps; import java.sql.SQLException; import java.util.Collection; import java.util.List; import java.util.Map; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; import org.apache.phoenix.exception.SQLExceptionCode; import org.apache.phoenix.exception.SQLExceptionInfo; import org.apache.phoenix.parse.CreateTableStatement; import org.apache.phoenix.query.QueryConstants; import org.apache.phoenix.util.IndexUtil; import org.apache.phoenix.util.MetaDataUtil;
import com.google.common.collect.*; import java.sql.*; import java.util.*; import org.apache.hadoop.hbase.util.*; import org.apache.phoenix.exception.*; import org.apache.phoenix.parse.*; import org.apache.phoenix.query.*; import org.apache.phoenix.util.*;
[ "com.google.common", "java.sql", "java.util", "org.apache.hadoop", "org.apache.phoenix" ]
com.google.common; java.sql; java.util; org.apache.hadoop; org.apache.phoenix;
2,793,498
@Override public View onCreateView(@NonNull LayoutInflater inflater, ViewGroup parent, Bundle savedInstanceState) { View view = inflateFragment(R.layout.fragment_midpoint, inflater, parent); this.unbinder = ButterKnife.bind(this, view); setEditTextOnFocusChangeListener(this.highEditText, this.lowEditText); setEditTextTextChangeListener(this.highEditText, this.lowEditText); LogManager.debug(CLASS_NAME, "onCreateView", ""); return view; }
View function(@NonNull LayoutInflater inflater, ViewGroup parent, Bundle savedInstanceState) { View view = inflateFragment(R.layout.fragment_midpoint, inflater, parent); this.unbinder = ButterKnife.bind(this, view); setEditTextOnFocusChangeListener(this.highEditText, this.lowEditText); setEditTextTextChangeListener(this.highEditText, this.lowEditText); LogManager.debug(CLASS_NAME, STR, ""); return view; }
/** * Initializes the fragment's user interface. */
Initializes the fragment's user interface
onCreateView
{ "repo_name": "Pepito-Manaloto/PSE_Planner", "path": "app/src/main/java/com/aaron/pseplanner/fragment/MidpointFragment.java", "license": "apache-2.0", "size": 3320 }
[ "android.os.Bundle", "android.support.annotation.NonNull", "android.view.LayoutInflater", "android.view.View", "android.view.ViewGroup", "com.aaron.pseplanner.service.LogManager" ]
import android.os.Bundle; import android.support.annotation.NonNull; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import com.aaron.pseplanner.service.LogManager;
import android.os.*; import android.support.annotation.*; import android.view.*; import com.aaron.pseplanner.service.*;
[ "android.os", "android.support", "android.view", "com.aaron.pseplanner" ]
android.os; android.support; android.view; com.aaron.pseplanner;
435,364
public Set<String> eventHintSet() { return eventHintSet; }
Set<String> function() { return eventHintSet; }
/** * Gets set of event hint value for finding target event. * @return event hint set */
Gets set of event hint value for finding target event
eventHintSet
{ "repo_name": "gkatsikas/onos", "path": "apps/workflow/api/src/main/java/org/onosproject/workflow/api/EventTimeoutTask.java", "license": "apache-2.0", "size": 4467 }
[ "java.util.Set" ]
import java.util.Set;
import java.util.*;
[ "java.util" ]
java.util;
2,177,121
@Override Record getRecord(String id, QueryOptions options) throws RemoteException;
Record getRecord(String id, QueryOptions options) throws RemoteException;
/** * Return a single record. * * @param id The record ID. * @param options {@link QueryOptions} that adds extra control over which * record are returned. * @return A record. * @throws RemoteException If error occur when using the remote storage. * @see ReadableStorage */
Return a single record
getRecord
{ "repo_name": "statsbiblioteket/summa", "path": "Core/src/main/java/dk/statsbiblioteket/summa/storage/api/rmi/RemoteReadableStorage.java", "license": "apache-2.0", "size": 3706 }
[ "dk.statsbiblioteket.summa.common.Record", "dk.statsbiblioteket.summa.storage.api.QueryOptions", "java.rmi.RemoteException" ]
import dk.statsbiblioteket.summa.common.Record; import dk.statsbiblioteket.summa.storage.api.QueryOptions; import java.rmi.RemoteException;
import dk.statsbiblioteket.summa.common.*; import dk.statsbiblioteket.summa.storage.api.*; import java.rmi.*;
[ "dk.statsbiblioteket.summa", "java.rmi" ]
dk.statsbiblioteket.summa; java.rmi;
85,808
@Override public Completable download() { return downloadCompletableProvider.getCompletable(true); }
Completable function() { return downloadCompletableProvider.getCompletable(true); }
/** * Downloads the resource in scope in an asynchronous way. As soon as it's downloaded and processed, the * {@code Completable} is completed. * @return a {@code Completable} that completes when the download and processing is finished */
Downloads the resource in scope in an asynchronous way. As soon as it's downloaded and processed, the Completable is completed
download
{ "repo_name": "dhis2/dhis2-android-sdk", "path": "core/src/main/java/org/hisp/dhis/android/core/arch/repositories/object/internal/ReadOnlyFirstObjectWithDownloadRepositoryImpl.java", "license": "bsd-3-clause", "size": 3970 }
[ "io.reactivex.Completable" ]
import io.reactivex.Completable;
import io.reactivex.*;
[ "io.reactivex" ]
io.reactivex;
2,641,812
protected EditPart getEditPart() { return source; }
EditPart function() { return source; }
/** * Returns the source edit part. * @return the source edit part */
Returns the source edit part
getEditPart
{ "repo_name": "mikesligo/visGrid", "path": "ie.tcd.gmf.visGrid.plugin/src/org/eclipse/gmf/runtime/lite/services/TreeDirectEditManager.java", "license": "gpl-3.0", "size": 10314 }
[ "org.eclipse.gef.EditPart" ]
import org.eclipse.gef.EditPart;
import org.eclipse.gef.*;
[ "org.eclipse.gef" ]
org.eclipse.gef;
2,358,065
public ModelProjectionBuilder property(Property prop, Consumer<ModelProjectionBuilder> nested) { ModelProjectionBuilder builder = new ModelProjectionBuilder(); nested.accept(builder); builders.add(new ModelProjection.Builder(prop, builder)); return this; }
ModelProjectionBuilder function(Property prop, Consumer<ModelProjectionBuilder> nested) { ModelProjectionBuilder builder = new ModelProjectionBuilder(); nested.accept(builder); builders.add(new ModelProjection.Builder(prop, builder)); return this; }
/** * Add the given property and its values to the projection, then project properties on those values. * @param prop The property to add. * @param nested Build the projection on the property values. * @return This builder instance. */
Add the given property and its values to the projection, then project properties on those values
property
{ "repo_name": "UKGovLD/registry-core", "path": "src/main/java/com/epimorphics/registry/ror/projection/ModelProjectionBuilder.java", "license": "apache-2.0", "size": 3455 }
[ "java.util.function.Consumer", "org.apache.jena.rdf.model.Property" ]
import java.util.function.Consumer; import org.apache.jena.rdf.model.Property;
import java.util.function.*; import org.apache.jena.rdf.model.*;
[ "java.util", "org.apache.jena" ]
java.util; org.apache.jena;
112,842
public void logWarn(int logTargets, LogMessageIdentifier identifier, String... params) { String message = getLogMessageText(identifier, params); if (logToSystemLog(logTargets)) { systemLogger.warn(message); } if (logToAccessLog(logTargets)) { accessLogger.warn(message); } if (logToAuditLog(logTargets)) { auditLogger.warn(message); } if (logToReverseProxyLog(logTargets)) { proxyLogger.warn(message); } }
void function(int logTargets, LogMessageIdentifier identifier, String... params) { String message = getLogMessageText(identifier, params); if (logToSystemLog(logTargets)) { systemLogger.warn(message); } if (logToAccessLog(logTargets)) { accessLogger.warn(message); } if (logToAuditLog(logTargets)) { auditLogger.warn(message); } if (logToReverseProxyLog(logTargets)) { proxyLogger.warn(message); } }
/** * Logs an information with log type {@link Level#WARN}. The information is * written to the log files specified. * * @param logTargets * The target log files to be used. * @param identifier * The message identifier to be logged. * @param params * The parameters for the place holders of the message */
Logs an information with log type <code>Level#WARN</code>. The information is written to the log files specified
logWarn
{ "repo_name": "opetrovski/development", "path": "oscm-common/javasrc/org/oscm/logging/Log4jLogger.java", "license": "apache-2.0", "size": 11238 }
[ "org.oscm.types.enumtypes.LogMessageIdentifier" ]
import org.oscm.types.enumtypes.LogMessageIdentifier;
import org.oscm.types.enumtypes.*;
[ "org.oscm.types" ]
org.oscm.types;
2,889,802
public static List<FullscreenInfo> getFullscreenInfo() { boolean managedOnly = PrefServiceBridge.getInstance().isFullscreenManaged(); ArrayList<FullscreenInfo> list = new ArrayList<FullscreenInfo>(); nativeGetFullscreenOrigins(list, managedOnly); return list; }
static List<FullscreenInfo> function() { boolean managedOnly = PrefServiceBridge.getInstance().isFullscreenManaged(); ArrayList<FullscreenInfo> list = new ArrayList<FullscreenInfo>(); nativeGetFullscreenOrigins(list, managedOnly); return list; }
/** * Get a list of stored fullscreen information. */
Get a list of stored fullscreen information
getFullscreenInfo
{ "repo_name": "ltilve/chromium", "path": "chrome/android/java/src/org/chromium/chrome/browser/preferences/website/WebsitePreferenceBridge.java", "license": "bsd-3-clause", "size": 10533 }
[ "java.util.ArrayList", "java.util.List", "org.chromium.chrome.browser.preferences.PrefServiceBridge" ]
import java.util.ArrayList; import java.util.List; import org.chromium.chrome.browser.preferences.PrefServiceBridge;
import java.util.*; import org.chromium.chrome.browser.preferences.*;
[ "java.util", "org.chromium.chrome" ]
java.util; org.chromium.chrome;
1,739,517
public void bind(Name name, Object obj) throws NamingException { bind(name, obj, false); }
void function(Name name, Object obj) throws NamingException { bind(name, obj, false); }
/** * Binds a name to an object. All intermediate contexts and the target * context (that named by all but terminal atomic component of the name) * must already exist. * * @param name the name to bind; may not be empty * @param obj the object to bind; possibly null * @exception NameAlreadyBoundException if name is already bound * @exception InvalidAttributesException if object did not supply all * mandatory attributes * @exception NamingException if a naming exception is encountered */
Binds a name to an object. All intermediate contexts and the target context (that named by all but terminal atomic component of the name) must already exist
bind
{ "repo_name": "plumer/codana", "path": "tomcat_files/7.0.0/NamingContext.java", "license": "mit", "size": 33332 }
[ "javax.naming.Name", "javax.naming.NamingException" ]
import javax.naming.Name; import javax.naming.NamingException;
import javax.naming.*;
[ "javax.naming" ]
javax.naming;
1,999,900
@Override public LockingStrategy getLockingStrategy(Lockable lockable, LockMode lockMode) { if ( lockMode == LockMode.PESSIMISTIC_FORCE_INCREMENT ) { return new PessimisticForceIncrementLockingStrategy( lockable, lockMode ); } else if ( lockMode == LockMode.PESSIMISTIC_WRITE ) { return new InfinispanPessimisticWriteLockingStrategy<EK>( lockable, lockMode ); } else if ( lockMode == LockMode.PESSIMISTIC_READ ) { // TODO find a more efficient pessimistic read return new InfinispanPessimisticWriteLockingStrategy<EK>( lockable, lockMode ); } else if ( lockMode == LockMode.OPTIMISTIC ) { return new OptimisticLockingStrategy( lockable, lockMode ); } else if ( lockMode == LockMode.OPTIMISTIC_FORCE_INCREMENT ) { return new OptimisticForceIncrementLockingStrategy( lockable, lockMode ); } else { return null; } }
LockingStrategy function(Lockable lockable, LockMode lockMode) { if ( lockMode == LockMode.PESSIMISTIC_FORCE_INCREMENT ) { return new PessimisticForceIncrementLockingStrategy( lockable, lockMode ); } else if ( lockMode == LockMode.PESSIMISTIC_WRITE ) { return new InfinispanPessimisticWriteLockingStrategy<EK>( lockable, lockMode ); } else if ( lockMode == LockMode.PESSIMISTIC_READ ) { return new InfinispanPessimisticWriteLockingStrategy<EK>( lockable, lockMode ); } else if ( lockMode == LockMode.OPTIMISTIC ) { return new OptimisticLockingStrategy( lockable, lockMode ); } else if ( lockMode == LockMode.OPTIMISTIC_FORCE_INCREMENT ) { return new OptimisticForceIncrementLockingStrategy( lockable, lockMode ); } else { return null; } }
/** * Get a strategy instance which knows how to acquire a database-level lock * of the specified mode for this dialect. * * @param lockable The persister for the entity to be locked. * @param lockMode The type of lock to be acquired. * @return The appropriate locking strategy. * @since 3.2 */
Get a strategy instance which knows how to acquire a database-level lock of the specified mode for this dialect
getLockingStrategy
{ "repo_name": "mp911de/hibernate-ogm", "path": "infinispan/src/main/java/org/hibernate/ogm/datastore/infinispan/InfinispanDialect.java", "license": "lgpl-2.1", "size": 9916 }
[ "org.hibernate.LockMode", "org.hibernate.dialect.lock.LockingStrategy", "org.hibernate.dialect.lock.OptimisticForceIncrementLockingStrategy", "org.hibernate.dialect.lock.OptimisticLockingStrategy", "org.hibernate.dialect.lock.PessimisticForceIncrementLockingStrategy", "org.hibernate.ogm.datastore.infinispan.dialect.impl.InfinispanPessimisticWriteLockingStrategy", "org.hibernate.persister.entity.Lockable" ]
import org.hibernate.LockMode; import org.hibernate.dialect.lock.LockingStrategy; import org.hibernate.dialect.lock.OptimisticForceIncrementLockingStrategy; import org.hibernate.dialect.lock.OptimisticLockingStrategy; import org.hibernate.dialect.lock.PessimisticForceIncrementLockingStrategy; import org.hibernate.ogm.datastore.infinispan.dialect.impl.InfinispanPessimisticWriteLockingStrategy; import org.hibernate.persister.entity.Lockable;
import org.hibernate.*; import org.hibernate.dialect.lock.*; import org.hibernate.ogm.datastore.infinispan.dialect.impl.*; import org.hibernate.persister.entity.*;
[ "org.hibernate", "org.hibernate.dialect", "org.hibernate.ogm", "org.hibernate.persister" ]
org.hibernate; org.hibernate.dialect; org.hibernate.ogm; org.hibernate.persister;
1,316,806
public Bbox getBounds() { if (isEmpty()) { return null; } return exteriorRing.getBounds(); }
Bbox function() { if (isEmpty()) { return null; } return exteriorRing.getBounds(); }
/** * Return the bounds of the exterior ring, or null if the polygon is empty. */
Return the bounds of the exterior ring, or null if the polygon is empty
getBounds
{ "repo_name": "lat-lon/geomajas", "path": "face/geomajas-face-gwt/client/src/main/java/org/geomajas/gwt/client/spatial/geometry/Polygon.java", "license": "agpl-3.0", "size": 7720 }
[ "org.geomajas.gwt.client.spatial.Bbox" ]
import org.geomajas.gwt.client.spatial.Bbox;
import org.geomajas.gwt.client.spatial.*;
[ "org.geomajas.gwt" ]
org.geomajas.gwt;
1,125,298
public synchronized void markAsFailed(ClusterState state, Exception reason) { final ClusterStateContext failedContext = findState(state.stateUUID()); if (failedContext == null) { throw new IllegalArgumentException("can't resolve failed cluster state with uuid [" + state.stateUUID() + "], version [" + state.version() + "]"); } if (failedContext.committed() == false) { throw new IllegalArgumentException("failed cluster state is not committed " + state); } // fail all committed states which are batch together with the failed state ArrayList<ClusterStateContext> statesToRemove = new ArrayList<>(); for (int index = 0; index < pendingStates.size(); index++) { final ClusterStateContext pendingContext = pendingStates.get(index); if (pendingContext.committed() == false) { continue; } final ClusterState pendingState = pendingContext.state; if (pendingContext.equals(failedContext)) { statesToRemove.add(pendingContext); pendingContext.listener.onNewClusterStateFailed(reason); } else if (state.supersedes(pendingState)) { statesToRemove.add(pendingContext); logger.debug("failing committed state {} together with state {}", pendingContext, failedContext); pendingContext.listener.onNewClusterStateFailed(reason); } } pendingStates.removeAll(statesToRemove); assert findState(state.stateUUID()) == null : "state was marked as processed but can still be found in pending list " + state; }
synchronized void function(ClusterState state, Exception reason) { final ClusterStateContext failedContext = findState(state.stateUUID()); if (failedContext == null) { throw new IllegalArgumentException(STR + state.stateUUID() + STR + state.version() + "]"); } if (failedContext.committed() == false) { throw new IllegalArgumentException(STR + state); } ArrayList<ClusterStateContext> statesToRemove = new ArrayList<>(); for (int index = 0; index < pendingStates.size(); index++) { final ClusterStateContext pendingContext = pendingStates.get(index); if (pendingContext.committed() == false) { continue; } final ClusterState pendingState = pendingContext.state; if (pendingContext.equals(failedContext)) { statesToRemove.add(pendingContext); pendingContext.listener.onNewClusterStateFailed(reason); } else if (state.supersedes(pendingState)) { statesToRemove.add(pendingContext); logger.debug(STR, pendingContext, failedContext); pendingContext.listener.onNewClusterStateFailed(reason); } } pendingStates.removeAll(statesToRemove); assert findState(state.stateUUID()) == null : STR + state; }
/** * mark that the processing of the given state has failed. All committed states that are {@link ClusterState#supersedes(ClusterState)}-ed * by this failed state, will be failed as well */
mark that the processing of the given state has failed. All committed states that are <code>ClusterState#supersedes(ClusterState)</code>-ed by this failed state, will be failed as well
markAsFailed
{ "repo_name": "dpursehouse/elasticsearch", "path": "core/src/main/java/org/elasticsearch/discovery/zen/publish/PendingClusterStatesQueue.java", "license": "apache-2.0", "size": 14065 }
[ "java.util.ArrayList", "org.elasticsearch.cluster.ClusterState" ]
import java.util.ArrayList; import org.elasticsearch.cluster.ClusterState;
import java.util.*; import org.elasticsearch.cluster.*;
[ "java.util", "org.elasticsearch.cluster" ]
java.util; org.elasticsearch.cluster;
1,658,204
// ToDo: (Issue 219 - PackageRefactoring) - Better return Path of // Documents?? public List<Document> getEmbeddedDocuments() { List<Document> embeddedObjects = new ArrayList<Document>(); // ToDo: (Issue 219 - PackageRefactoring) - Algorithm enhancement: // Instead going through all the files for each mimetype, better // Check all files, which have a mimetype if it is one of the desired, // perhaps start with ODF prefix for (OdfMediaType mediaType : OdfMediaType.values()) { embeddedObjects.addAll(getEmbeddedDocuments(mediaType)); } return embeddedObjects; }
List<Document> function() { List<Document> embeddedObjects = new ArrayList<Document>(); for (OdfMediaType mediaType : OdfMediaType.values()) { embeddedObjects.addAll(getEmbeddedDocuments(mediaType)); } return embeddedObjects; }
/** * Method returns all embedded OdfPackageDocuments, which match a valid * OdfMediaType, of the root OdfPackageDocument. * * @return a list with all embedded documents of the root OdfPackageDocument */
Method returns all embedded OdfPackageDocuments, which match a valid OdfMediaType, of the root OdfPackageDocument
getEmbeddedDocuments
{ "repo_name": "jbjonesjr/geoproponis", "path": "external/simple-odf-0.8.1-incubating-sources/org/odftoolkit/simple/Document.java", "license": "gpl-2.0", "size": 100866 }
[ "java.util.ArrayList", "java.util.List" ]
import java.util.ArrayList; import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
2,129,574
@ServiceMethod(returns = ReturnType.COLLECTION) private PagedFlux<SubscriptionContractInner> listAsync( String resourceGroupName, String serviceName, String userId, String filter, Integer top, Integer skip, Context context) { return new PagedFlux<>( () -> listSinglePageAsync(resourceGroupName, serviceName, userId, filter, top, skip, context), nextLink -> listNextSinglePageAsync(nextLink, context)); }
@ServiceMethod(returns = ReturnType.COLLECTION) PagedFlux<SubscriptionContractInner> function( String resourceGroupName, String serviceName, String userId, String filter, Integer top, Integer skip, Context context) { return new PagedFlux<>( () -> listSinglePageAsync(resourceGroupName, serviceName, userId, filter, top, skip, context), nextLink -> listNextSinglePageAsync(nextLink, context)); }
/** * Lists the collection of subscriptions of the specified user. * * @param resourceGroupName The name of the resource group. * @param serviceName The name of the API Management service. * @param userId User identifier. Must be unique in the current API Management service instance. * @param filter | Field | Usage | Supported operators | Supported functions * |&lt;/br&gt;|-------------|------------------------|-----------------------------------|&lt;/br&gt;|name | * filter | ge, le, eq, ne, gt, lt | substringof, contains, startswith, endswith |&lt;/br&gt;|displayName | * filter | ge, le, eq, ne, gt, lt | substringof, contains, startswith, endswith |&lt;/br&gt;|stateComment | * filter | ge, le, eq, ne, gt, lt | substringof, contains, startswith, endswith |&lt;/br&gt;|ownerId | filter | * ge, le, eq, ne, gt, lt | substringof, contains, startswith, endswith |&lt;/br&gt;|scope | filter | ge, le, * eq, ne, gt, lt | substringof, contains, startswith, endswith |&lt;/br&gt;|userId | filter | ge, le, eq, ne, * gt, lt | substringof, contains, startswith, endswith |&lt;/br&gt;|productId | filter | ge, le, eq, ne, gt, lt * | substringof, contains, startswith, endswith |&lt;/br&gt;. * @param top Number of records to return. * @param skip Number of records to skip. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return paged Subscriptions list representation. */
Lists the collection of subscriptions of the specified user
listAsync
{ "repo_name": "Azure/azure-sdk-for-java", "path": "sdk/apimanagement/azure-resourcemanager-apimanagement/src/main/java/com/azure/resourcemanager/apimanagement/implementation/UserSubscriptionsClientImpl.java", "license": "mit", "size": 34692 }
[ "com.azure.core.annotation.ReturnType", "com.azure.core.annotation.ServiceMethod", "com.azure.core.http.rest.PagedFlux", "com.azure.core.util.Context", "com.azure.resourcemanager.apimanagement.fluent.models.SubscriptionContractInner" ]
import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.http.rest.PagedFlux; import com.azure.core.util.Context; import com.azure.resourcemanager.apimanagement.fluent.models.SubscriptionContractInner;
import com.azure.core.annotation.*; import com.azure.core.http.rest.*; import com.azure.core.util.*; import com.azure.resourcemanager.apimanagement.fluent.models.*;
[ "com.azure.core", "com.azure.resourcemanager" ]
com.azure.core; com.azure.resourcemanager;
418,432
boolean smoothSlideTo(float slideOffset, int velocity) { if (!mCanSlide) { // Nothing to do. return false; } final int topBound = getSlidingTop(); int y = mIsSlidingUp ? (int) (topBound + slideOffset * mSlideRange) : (int) (topBound - slideOffset * mSlideRange); if (mDragHelper.smoothSlideViewTo(mSlideableView, mSlideableView.getLeft(), y)) { setAllChildrenVisible(); ViewCompat.postInvalidateOnAnimation(this); return true; } return false; }
boolean smoothSlideTo(float slideOffset, int velocity) { if (!mCanSlide) { return false; } final int topBound = getSlidingTop(); int y = mIsSlidingUp ? (int) (topBound + slideOffset * mSlideRange) : (int) (topBound - slideOffset * mSlideRange); if (mDragHelper.smoothSlideViewTo(mSlideableView, mSlideableView.getLeft(), y)) { setAllChildrenVisible(); ViewCompat.postInvalidateOnAnimation(this); return true; } return false; }
/** * Smoothly animate mDraggingPane to the target X position within its range. * * @param slideOffset position to animate to * @param velocity initial velocity in case of fling, or 0. */
Smoothly animate mDraggingPane to the target X position within its range
smoothSlideTo
{ "repo_name": "felipecsl/AndroidSlidingUpPanel", "path": "library/src/com/sothree/slidinguppanel/SlidingUpPanelLayout.java", "license": "apache-2.0", "size": 39248 }
[ "android.support.v4.view.ViewCompat" ]
import android.support.v4.view.ViewCompat;
import android.support.v4.view.*;
[ "android.support" ]
android.support;
535,738
protected int dp2px(int dpVal) { return (int) TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, dpVal, getResources().getDisplayMetrics()); }
int function(int dpVal) { return (int) TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, dpVal, getResources().getDisplayMetrics()); }
/** * dp 2 px * * @param dpVal */
dp 2 px
dp2px
{ "repo_name": "kivensolo/UiUsingListView", "path": "library/ui/src/main/java/com/module/views/progress/SpiralProgressView.java", "license": "gpl-2.0", "size": 9912 }
[ "android.util.TypedValue" ]
import android.util.TypedValue;
import android.util.*;
[ "android.util" ]
android.util;
2,616,944
public void movetoAbs(float x, float y) throws ParseException { listHandler.item(new SVGPathSegMovetoLinetoItem (SVGPathSeg.PATHSEG_MOVETO_ABS,PATHSEG_MOVETO_ABS_LETTER, x,y)); lastAbs.setX(x); lastAbs.setY(y); lastAbs.setPathSegType(SVGPathSeg.PATHSEG_MOVETO_ABS); }
void function(float x, float y) throws ParseException { listHandler.item(new SVGPathSegMovetoLinetoItem (SVGPathSeg.PATHSEG_MOVETO_ABS,PATHSEG_MOVETO_ABS_LETTER, x,y)); lastAbs.setX(x); lastAbs.setY(y); lastAbs.setPathSegType(SVGPathSeg.PATHSEG_MOVETO_ABS); }
/** * Implements {@link org.apache.batik.parser.PathHandler#movetoAbs(float,float)}. */
Implements <code>org.apache.batik.parser.PathHandler#movetoAbs(float,float)</code>
movetoAbs
{ "repo_name": "Squeegee/batik", "path": "sources/org/apache/batik/dom/svg/AbstractSVGNormPathSegList.java", "license": "apache-2.0", "size": 14568 }
[ "org.apache.batik.parser.ParseException", "org.w3c.dom.svg.SVGPathSeg" ]
import org.apache.batik.parser.ParseException; import org.w3c.dom.svg.SVGPathSeg;
import org.apache.batik.parser.*; import org.w3c.dom.svg.*;
[ "org.apache.batik", "org.w3c.dom" ]
org.apache.batik; org.w3c.dom;
2,763,757