Unnamed: 0
int64
0
6.45k
func
stringlengths
37
143k
target
class label
2 classes
project
stringlengths
33
157
633
public class IndicesStatusAction extends IndicesAction<IndicesStatusRequest, IndicesStatusResponse, IndicesStatusRequestBuilder> { public static final IndicesStatusAction INSTANCE = new IndicesStatusAction(); public static final String NAME = "indices/status"; private IndicesStatusAction() { super(NAME); } @Override public IndicesStatusResponse newResponse() { return new IndicesStatusResponse(); } @Override public IndicesStatusRequestBuilder newRequestBuilder(IndicesAdminClient client) { return new IndicesStatusRequestBuilder(client); } }
0true
src_main_java_org_elasticsearch_action_admin_indices_status_IndicesStatusAction.java
230
PostingsHighlighter highlighter = new PostingsHighlighter() { @Override protected PassageFormatter getFormatter(String field) { return new DefaultPassageFormatter("<b>", "</b>", "... ", true); } };
0true
src_test_java_org_apache_lucene_search_postingshighlight_XPostingsHighlighterTests.java
115
static final class EmptyTask extends ForkJoinTask<Void> { private static final long serialVersionUID = -7721805057305804111L; EmptyTask() { status = ForkJoinTask.NORMAL; } // force done public final Void getRawResult() { return null; } public final void setRawResult(Void x) {} public final boolean exec() { return true; } }
0true
src_main_java_jsr166e_ForkJoinPool.java
3,989
public class FunctionScoreQueryParser implements QueryParser { public static final String NAME = "function_score"; ScoreFunctionParserMapper funtionParserMapper; @Inject public FunctionScoreQueryParser(ScoreFunctionParserMapper funtionParserMapper) { this.funtionParserMapper = funtionParserMapper; } @Override public String[] names() { return new String[] { NAME, Strings.toCamelCase(NAME) }; } private static final ImmutableMap<String, CombineFunction> combineFunctionsMap; static { CombineFunction[] values = CombineFunction.values(); Builder<String, CombineFunction> combineFunctionMapBuilder = ImmutableMap.<String, CombineFunction>builder(); for (CombineFunction combineFunction : values) { combineFunctionMapBuilder.put(combineFunction.getName(), combineFunction); } combineFunctionsMap = combineFunctionMapBuilder.build(); } @Override public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException { XContentParser parser = parseContext.parser(); Query query = null; float boost = 1.0f; FiltersFunctionScoreQuery.ScoreMode scoreMode = FiltersFunctionScoreQuery.ScoreMode.Multiply; ArrayList<FiltersFunctionScoreQuery.FilterFunction> filterFunctions = new ArrayList<FiltersFunctionScoreQuery.FilterFunction>(); float maxBoost = Float.MAX_VALUE; String currentFieldName = null; XContentParser.Token token; CombineFunction combineFunction = CombineFunction.MULT; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if ("query".equals(currentFieldName)) { query = parseContext.parseInnerQuery(); } else if ("filter".equals(currentFieldName)) { query = new XConstantScoreQuery(parseContext.parseInnerFilter()); } else if ("score_mode".equals(currentFieldName) || "scoreMode".equals(currentFieldName)) { scoreMode = parseScoreMode(parseContext, parser); } else if ("boost_mode".equals(currentFieldName) || "boostMode".equals(currentFieldName)) { combineFunction = parseBoostMode(parseContext, parser); } else if ("max_boost".equals(currentFieldName) || "maxBoost".equals(currentFieldName)) { maxBoost = parser.floatValue(); } else if ("boost".equals(currentFieldName)) { boost = parser.floatValue(); } else if ("functions".equals(currentFieldName)) { currentFieldName = parseFiltersAndFunctions(parseContext, parser, filterFunctions, currentFieldName); } else { // we tru to parse a score function. If there is no score // function for the current field name, // funtionParserMapper.get() will throw an Exception. filterFunctions.add(new FiltersFunctionScoreQuery.FilterFunction(null, funtionParserMapper.get(parseContext.index(), currentFieldName).parse(parseContext, parser))); } } if (query == null) { query = Queries.newMatchAllQuery(); } // if all filter elements returned null, just use the query if (filterFunctions.isEmpty()) { return query; } // handle cases where only one score function and no filter was // provided. In this case we create a FunctionScoreQuery. if (filterFunctions.size() == 1 && filterFunctions.get(0).filter == null) { FunctionScoreQuery theQuery = new FunctionScoreQuery(query, filterFunctions.get(0).function); if (combineFunction != null) { theQuery.setCombineFunction(combineFunction); } theQuery.setBoost(boost); theQuery.setMaxBoost(maxBoost); return theQuery; // in all other cases we create a FiltersFunctionScoreQuery. } else { FiltersFunctionScoreQuery functionScoreQuery = new FiltersFunctionScoreQuery(query, scoreMode, filterFunctions.toArray(new FiltersFunctionScoreQuery.FilterFunction[filterFunctions.size()]), maxBoost); if (combineFunction != null) { functionScoreQuery.setCombineFunction(combineFunction); } functionScoreQuery.setBoost(boost); return functionScoreQuery; } } private String parseFiltersAndFunctions(QueryParseContext parseContext, XContentParser parser, ArrayList<FiltersFunctionScoreQuery.FilterFunction> filterFunctions, String currentFieldName) throws IOException { XContentParser.Token token; while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { Filter filter = null; ScoreFunction scoreFunction = null; if (token != XContentParser.Token.START_OBJECT) { throw new QueryParsingException(parseContext.index(), NAME + ": malformed query, expected a " + XContentParser.Token.START_OBJECT + " while parsing functions but got a " + token); } else { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else { if ("filter".equals(currentFieldName)) { filter = parseContext.parseInnerFilter(); } else { // do not need to check null here, // funtionParserMapper throws exception if parser // non-existent ScoreFunctionParser functionParser = funtionParserMapper.get(parseContext.index(), currentFieldName); scoreFunction = functionParser.parse(parseContext, parser); } } } } if (filter == null) { filter = Queries.MATCH_ALL_FILTER; } filterFunctions.add(new FiltersFunctionScoreQuery.FilterFunction(filter, scoreFunction)); } return currentFieldName; } private FiltersFunctionScoreQuery.ScoreMode parseScoreMode(QueryParseContext parseContext, XContentParser parser) throws IOException { String scoreMode = parser.text(); if ("avg".equals(scoreMode)) { return FiltersFunctionScoreQuery.ScoreMode.Avg; } else if ("max".equals(scoreMode)) { return FiltersFunctionScoreQuery.ScoreMode.Max; } else if ("min".equals(scoreMode)) { return FiltersFunctionScoreQuery.ScoreMode.Min; } else if ("sum".equals(scoreMode)) { return FiltersFunctionScoreQuery.ScoreMode.Sum; } else if ("multiply".equals(scoreMode)) { return FiltersFunctionScoreQuery.ScoreMode.Multiply; } else if ("first".equals(scoreMode)) { return FiltersFunctionScoreQuery.ScoreMode.First; } else { throw new QueryParsingException(parseContext.index(), NAME + " illegal score_mode [" + scoreMode + "]"); } } private CombineFunction parseBoostMode(QueryParseContext parseContext, XContentParser parser) throws IOException { String boostMode = parser.text(); CombineFunction cf = combineFunctionsMap.get(boostMode); if (cf == null) { throw new QueryParsingException(parseContext.index(), NAME + " illegal boost_mode [" + boostMode + "]"); } return cf; } }
1no label
src_main_java_org_elasticsearch_index_query_functionscore_FunctionScoreQueryParser.java
3,711
private class ManagedThread extends Thread { public ManagedThread(Runnable target) { super(threadGroup, target, threadName); } @Override public void run() { try { super.run(); } catch (OutOfMemoryError e) { OutOfMemoryErrorDispatcher.onOutOfMemory(e); } } }
1no label
hazelcast_src_main_java_com_hazelcast_util_executor_SingleExecutorThreadFactory.java
776
@Deprecated public class AvailabilityStatusType implements Serializable, BroadleafEnumerationType { private static final long serialVersionUID = 1L; private static final Map<String, AvailabilityStatusType> TYPES = new LinkedHashMap<String, AvailabilityStatusType>(); public static final AvailabilityStatusType AVAILABLE = new AvailabilityStatusType("AVAILABLE", "Available"); public static final AvailabilityStatusType UNAVAILABLE = new AvailabilityStatusType("UNAVAILABLE", "Unavailable"); public static final AvailabilityStatusType BACKORDERED = new AvailabilityStatusType("BACKORDERED", "Back Ordered"); public static AvailabilityStatusType getInstance(final String type) { return TYPES.get(type); } private String type; private String friendlyType; public AvailabilityStatusType() { //do nothing } public AvailabilityStatusType(final String type, final String friendlyType) { this.friendlyType = friendlyType; setType(type); } @Override public String getType() { return type; } @Override public String getFriendlyType() { return friendlyType; } private void setType(final String type) { this.type = type; if (!TYPES.containsKey(type)) { TYPES.put(type, this); } else { throw new RuntimeException("Cannot add the type: (" + type + "). It already exists as a type via " + getInstance(type).getClass().getName()); } } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((type == null) ? 0 : type.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; AvailabilityStatusType other = (AvailabilityStatusType) obj; if (type == null) { if (other.type != null) return false; } else if (!type.equals(other.type)) return false; return true; } }
1no label
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_inventory_service_type_AvailabilityStatusType.java
511
public class HourOfDayType implements Serializable, BroadleafEnumerationType { private static final long serialVersionUID = 1L; private static final Map<String, HourOfDayType> TYPES = new LinkedHashMap<String, HourOfDayType>(); public static final HourOfDayType ZERO = new HourOfDayType("0", "00"); public static final HourOfDayType ONE = new HourOfDayType("1", "01"); public static final HourOfDayType TWO = new HourOfDayType("2", "02"); public static final HourOfDayType THREE = new HourOfDayType("3", "03"); public static final HourOfDayType FOUR = new HourOfDayType("4", "04"); public static final HourOfDayType FIVE = new HourOfDayType("5", "05"); public static final HourOfDayType SIX = new HourOfDayType("6", "06"); public static final HourOfDayType SEVEN = new HourOfDayType("7", "07"); public static final HourOfDayType EIGHT = new HourOfDayType("8", "08"); public static final HourOfDayType NINE = new HourOfDayType("9", "09"); public static final HourOfDayType TEN = new HourOfDayType("10", "10"); public static final HourOfDayType ELEVEN = new HourOfDayType("11", "11"); public static final HourOfDayType TWELVE = new HourOfDayType("12", "12"); public static final HourOfDayType THIRTEEN = new HourOfDayType("13", "13"); public static final HourOfDayType FOURTEEN = new HourOfDayType("14", "14"); public static final HourOfDayType FIFTEEN = new HourOfDayType("15", "15"); public static final HourOfDayType SIXTEEN = new HourOfDayType("16", "16"); public static final HourOfDayType SEVENTEEN = new HourOfDayType("17", "17"); public static final HourOfDayType EIGHTEEN = new HourOfDayType("18", "18"); public static final HourOfDayType NINETEEN = new HourOfDayType("19", "19"); public static final HourOfDayType TWENTY = new HourOfDayType("20", "20"); public static final HourOfDayType TWENTYONE = new HourOfDayType("21", "21"); public static final HourOfDayType TWNETYTWO = new HourOfDayType("22", "22"); public static final HourOfDayType TWENTYTHREE = new HourOfDayType("23", "23"); public static HourOfDayType getInstance(final String type) { return TYPES.get(type); } private String type; private String friendlyType; public HourOfDayType() { //do nothing } public HourOfDayType(final String type, final String friendlyType) { this.friendlyType = friendlyType; setType(type); } public String getType() { return type; } public String getFriendlyType() { return friendlyType; } private void setType(final String type) { this.type = type; if (!TYPES.containsKey(type)) { TYPES.put(type, this); } else { throw new RuntimeException("Cannot add the type: (" + type + "). It already exists as a type via " + getInstance(type).getClass().getName()); } } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((type == null) ? 0 : type.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; HourOfDayType other = (HourOfDayType) obj; if (type == null) { if (other.type != null) return false; } else if (!type.equals(other.type)) return false; return true; } }
1no label
common_src_main_java_org_broadleafcommerce_common_time_HourOfDayType.java
1,496
public static class Map extends Mapper<NullWritable, FaunusVertex, NullWritable, FaunusVertex> { private boolean drop; @Override public void setup(final Mapper.Context context) throws IOException, InterruptedException { this.drop = Tokens.Action.valueOf(context.getConfiguration().get(ACTION)).equals(Tokens.Action.DROP); } @Override public void map(final NullWritable key, final FaunusVertex value, final Mapper<NullWritable, FaunusVertex, NullWritable, FaunusVertex>.Context context) throws IOException, InterruptedException { Iterator<Edge> itty = value.getEdges(Direction.IN).iterator(); long edgesKept = 0; long edgesDropped = 0; while (itty.hasNext()) { if (this.drop) { if ((((StandardFaunusEdge) itty.next()).hasPaths())) { itty.remove(); edgesDropped++; } else edgesKept++; } else { if (!(((StandardFaunusEdge) itty.next()).hasPaths())) { itty.remove(); edgesDropped++; } else edgesKept++; } } DEFAULT_COMPAT.incrementContextCounter(context, Counters.IN_EDGES_DROPPED, edgesDropped); DEFAULT_COMPAT.incrementContextCounter(context, Counters.IN_EDGES_KEPT, edgesKept); /////////////////// itty = value.getEdges(Direction.OUT).iterator(); edgesKept = 0; edgesDropped = 0; while (itty.hasNext()) { if (this.drop) { if ((((StandardFaunusEdge) itty.next()).hasPaths())) { itty.remove(); edgesDropped++; } else edgesKept++; } else { if (!(((StandardFaunusEdge) itty.next()).hasPaths())) { itty.remove(); edgesDropped++; } else edgesKept++; } } DEFAULT_COMPAT.incrementContextCounter(context, Counters.OUT_EDGES_DROPPED, edgesDropped); DEFAULT_COMPAT.incrementContextCounter(context, Counters.OUT_EDGES_KEPT, edgesKept); context.write(NullWritable.get(), value); } }
1no label
titan-hadoop-parent_titan-hadoop-core_src_main_java_com_thinkaurelius_titan_hadoop_mapreduce_sideeffect_CommitEdgesMap.java
191
public class AIXCLibrary implements CLibrary { private static final Function memmove; private static final Function bcopy; static { Function memmoveFc; try { memmoveFc = Function.getFunction(Platform.C_LIBRARY_NAME, "memmove"); } catch (UnsatisfiedLinkError linkError) { memmoveFc = null; } Function bcopyFc; try { bcopyFc = Function.getFunction(Platform.C_LIBRARY_NAME, "bcopy"); } catch (UnsatisfiedLinkError linkError) { bcopyFc = null; } memmove = memmoveFc; bcopy = bcopyFc; OLogManager.instance().debug(CLibrary.class, "Following c library functions were found memmove : %s , bcopy : %s.", memmoveFc != null ? "yes" : "no", bcopyFc != null ? "yes" : "no"); } public void memoryMove(long src, long dest, long len) { final Pointer srcPointer = new Pointer(src); final Pointer destPointer = new Pointer(dest); if (memmove != null) memmove.invoke(Pointer.class, new Object[] { destPointer, srcPointer, new NativeLong(len) }); else if (bcopy != null) bcopy.invokeVoid(new Object[] { srcPointer, destPointer, new NativeLong(len) }); else { if (src > dest) for (long n = 0; n < len; n++) destPointer.setByte(n, srcPointer.getByte(n)); else for (long n = len - 1; n >= 0; n--) destPointer.setByte(n, srcPointer.getByte(n)); } } }
0true
nativeos_src_main_java_com_orientechnologies_nio_AIXCLibrary.java
186
public class OPair<K extends Comparable<K>, V> implements Entry<K, V>, Comparable<OPair<K, V>> { public K key; public V value; public OPair() { } public OPair(final K iKey, final V iValue) { key = iKey; value = iValue; } public OPair(final Entry<K, V> iSource) { key = iSource.getKey(); value = iSource.getValue(); } public void init(final K iKey, final V iValue) { key = iKey; value = iValue; } public K getKey() { return key; } public V getValue() { return value; } public V setValue(final V iValue) { V oldValue = value; value = iValue; return oldValue; } @Override public String toString() { return key + ":" + value; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((key == null) ? 0 : key.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; OPair<?, ?> other = (OPair<?, ?>) obj; if (key == null) { if (other.key != null) return false; } else if (!key.equals(other.key)) return false; return true; } public int compareTo(final OPair<K, V> o) { return key.compareTo(o.key); } }
0true
commons_src_main_java_com_orientechnologies_common_util_OPair.java
295
public abstract class OTraverseAbstractProcess<T> extends OCommandProcess<OTraverse, T, OIdentifiable> { public OTraverseAbstractProcess(final OTraverse iCommand, final T iTarget) { super(iCommand, iTarget); command.getContext().push(this); } public abstract String getStatus(); public OIdentifiable drop() { command.getContext().pop(); return null; } }
1no label
core_src_main_java_com_orientechnologies_orient_core_command_traverse_OTraverseAbstractProcess.java
301
public class OTraverseRecordSetProcess extends OTraverseAbstractProcess<Iterator<OIdentifiable>> { protected OIdentifiable record; protected int index = -1; public OTraverseRecordSetProcess(final OTraverse iCommand, final Iterator<OIdentifiable> iTarget) { super(iCommand, iTarget); } @SuppressWarnings("unchecked") public OIdentifiable process() { while (target.hasNext()) { record = target.next(); index++; final ORecord<?> rec = record.getRecord(); if (rec instanceof ODocument) { ODocument doc = (ODocument) rec; if (!doc.getIdentity().isPersistent() && doc.fields() == 1) { // EXTRACT THE FIELD CONTEXT Object fieldvalue = doc.field(doc.fieldNames()[0]); if (fieldvalue instanceof Collection<?>) { final OTraverseRecordSetProcess subProcess = new OTraverseRecordSetProcess(command, ((Collection<OIdentifiable>) fieldvalue).iterator()); final OIdentifiable subValue = subProcess.process(); if (subValue != null) return subValue; } else if (fieldvalue instanceof ODocument) { final OTraverseRecordProcess subProcess = new OTraverseRecordProcess(command, (ODocument) rec); final OIdentifiable subValue = subProcess.process(); if (subValue != null) return subValue; } } else { final OTraverseRecordProcess subProcess = new OTraverseRecordProcess(command, (ODocument) rec); final OIdentifiable subValue = subProcess.process(); if (subValue != null) return subValue; } } } return drop(); } @Override public String getStatus() { return null; } @Override public String toString() { return target != null ? target.toString() : "-"; } }
1no label
core_src_main_java_com_orientechnologies_orient_core_command_traverse_OTraverseRecordSetProcess.java
1,222
public abstract class OStorageAbstract extends OSharedContainerImpl implements OStorage { protected final String url; protected final String mode; protected OStorageConfiguration configuration; protected String name; protected AtomicLong version = new AtomicLong(); protected OLevel2RecordCache level2Cache; protected volatile STATUS status = STATUS.CLOSED; protected final OSharedResourceAdaptiveExternal lock; public OStorageAbstract(final String name, final String URL, final String mode, final int timeout, final OCacheLevelTwoLocator cacheLocator) { if (OStringSerializerHelper.contains(name, '/')) this.name = name.substring(name.lastIndexOf("/") + 1); else this.name = name; if (OStringSerializerHelper.contains(name, ',')) throw new IllegalArgumentException("Invalid character in storage name: " + this.name); level2Cache = new OLevel2RecordCache(this, cacheLocator); level2Cache.startup(); url = URL; this.mode = mode; lock = new OSharedResourceAdaptiveExternal(OGlobalConfiguration.ENVIRONMENT_CONCURRENT.getValueAsBoolean(), timeout, true); } public OStorage getUnderlying() { return this; } public OStorageConfiguration getConfiguration() { return configuration; } public boolean isClosed() { return status == STATUS.CLOSED; } public boolean checkForRecordValidity(final OPhysicalPosition ppos) { return ppos != null && !ppos.recordVersion.isTombstone() && ppos.dataSegmentId > -1; } public String getName() { return name; } /** * Returns the configured local Level-2 cache component. Cache component is always created even if not used. * * @return */ public OLevel2RecordCache getLevel2Cache() { return level2Cache; } public String getURL() { return url; } public void close() { close(false); } public void close(final boolean iForce) { if (!checkForClose(iForce)) return; lock.acquireExclusiveLock(); try { for (Object resource : sharedResources.values()) { if (resource instanceof OSharedResource) ((OSharedResource) resource).releaseExclusiveLock(); if (resource instanceof OCloseable) ((OCloseable) resource).close(); } sharedResources.clear(); Orient.instance().unregisterStorage(this); } finally { lock.releaseExclusiveLock(); } } /** * Returns current storage's version as serial. */ public long getVersion() { return version.get(); } public boolean dropCluster(final String iClusterName, final boolean iTruncate) { return dropCluster(getClusterIdByName(iClusterName), iTruncate); } protected boolean checkForClose(final boolean iForce) { lock.acquireSharedLock(); try { if (status == STATUS.CLOSED) return false; final int remainingUsers = getUsers() > 0 ? removeUser() : 0; return iForce || (!OGlobalConfiguration.STORAGE_KEEP_OPEN.getValueAsBoolean() && remainingUsers == 0); } finally { lock.releaseSharedLock(); } } public int getUsers() { return lock.getUsers(); } public int addUser() { return lock.addUser(); } public int removeUser() { return lock.removeUser(); } public OSharedResourceAdaptiveExternal getLock() { return lock; } public long countRecords() { long tot = 0; for (OCluster c : getClusterInstances()) if (c != null) tot += c.getEntries() - c.getTombstonesCount(); return tot; } public <V> V callInLock(final Callable<V> iCallable, final boolean iExclusiveLock) { if (iExclusiveLock) lock.acquireExclusiveLock(); else lock.acquireSharedLock(); try { return iCallable.call(); } catch (RuntimeException e) { throw e; } catch (Exception e) { throw new OException("Error on nested call in lock", e); } finally { if (iExclusiveLock) lock.releaseExclusiveLock(); else lock.releaseSharedLock(); } } @Override public String toString() { return url != null ? url : "?"; } public STATUS getStatus() { return status; } public void checkForClusterPermissions(final String iClusterName) { // CHECK FOR ORESTRICTED OMetadata metaData = ODatabaseRecordThreadLocal.INSTANCE.get().getMetadata(); if (metaData != null) { final Set<OClass> classes = metaData.getSchema().getClassesRelyOnCluster(iClusterName); for (OClass c : classes) { if (c.isSubClassOf(OSecurityShared.RESTRICTED_CLASSNAME)) throw new OSecurityException("Class " + c.getName() + " cannot be truncated because has record level security enabled (extends " + OSecurityShared.RESTRICTED_CLASSNAME + ")"); } } } @Override public boolean isDistributed() { return false; } }
1no label
core_src_main_java_com_orientechnologies_orient_core_storage_OStorageAbstract.java
696
public class BulkRequest extends ActionRequest<BulkRequest> { private static final int REQUEST_OVERHEAD = 50; final List<ActionRequest> requests = Lists.newArrayList(); List<Object> payloads = null; protected TimeValue timeout = BulkShardRequest.DEFAULT_TIMEOUT; private ReplicationType replicationType = ReplicationType.DEFAULT; private WriteConsistencyLevel consistencyLevel = WriteConsistencyLevel.DEFAULT; private boolean refresh = false; private long sizeInBytes = 0; /** * Adds a list of requests to be executed. Either index or delete requests. */ public BulkRequest add(ActionRequest... requests) { for (ActionRequest request : requests) { add(request, null); } return this; } public BulkRequest add(ActionRequest request) { return add(request, null); } public BulkRequest add(ActionRequest request, @Nullable Object payload) { if (request instanceof IndexRequest) { add((IndexRequest) request, payload); } else if (request instanceof DeleteRequest) { add((DeleteRequest) request, payload); } else if (request instanceof UpdateRequest) { add((UpdateRequest) request, payload); } else { throw new ElasticsearchIllegalArgumentException("No support for request [" + request + "]"); } return this; } /** * Adds a list of requests to be executed. Either index or delete requests. */ public BulkRequest add(Iterable<ActionRequest> requests) { for (ActionRequest request : requests) { if (request instanceof IndexRequest) { add((IndexRequest) request); } else if (request instanceof DeleteRequest) { add((DeleteRequest) request); } else { throw new ElasticsearchIllegalArgumentException("No support for request [" + request + "]"); } } return this; } /** * Adds an {@link IndexRequest} to the list of actions to execute. Follows the same behavior of {@link IndexRequest} * (for example, if no id is provided, one will be generated, or usage of the create flag). */ public BulkRequest add(IndexRequest request) { request.beforeLocalFork(); return internalAdd(request, null); } public BulkRequest add(IndexRequest request, @Nullable Object payload) { request.beforeLocalFork(); return internalAdd(request, payload); } BulkRequest internalAdd(IndexRequest request, @Nullable Object payload) { requests.add(request); addPayload(payload); sizeInBytes += request.source().length() + REQUEST_OVERHEAD; return this; } /** * Adds an {@link UpdateRequest} to the list of actions to execute. */ public BulkRequest add(UpdateRequest request) { request.beforeLocalFork(); return internalAdd(request, null); } public BulkRequest add(UpdateRequest request, @Nullable Object payload) { request.beforeLocalFork(); return internalAdd(request, payload); } BulkRequest internalAdd(UpdateRequest request, @Nullable Object payload) { requests.add(request); addPayload(payload); if (request.doc() != null) { sizeInBytes += request.doc().source().length(); } if (request.upsertRequest() != null) { sizeInBytes += request.upsertRequest().source().length(); } if (request.script() != null) { sizeInBytes += request.script().length() * 2; } return this; } /** * Adds an {@link DeleteRequest} to the list of actions to execute. */ public BulkRequest add(DeleteRequest request) { return add(request, null); } public BulkRequest add(DeleteRequest request, @Nullable Object payload) { requests.add(request); addPayload(payload); sizeInBytes += REQUEST_OVERHEAD; return this; } private void addPayload(Object payload) { if (payloads == null) { if (payload == null) { return; } payloads = new ArrayList<Object>(requests.size() + 10); // add requests#size-1 elements to the payloads if it null (we add for an *existing* request) for (int i = 1; i < requests.size(); i++) { payloads.add(null); } } payloads.add(payload); } /** * The list of requests in this bulk request. */ public List<ActionRequest> requests() { return this.requests; } /** * The list of optional payloads associated with requests in the same order as the requests. Note, elements within * it might be null if no payload has been provided. * <p/> * Note, if no payloads have been provided, this method will return null (as to conserve memory overhead). */ @Nullable public List<Object> payloads() { return this.payloads; } /** * The number of actions in the bulk request. */ public int numberOfActions() { return requests.size(); } /** * The estimated size in bytes of the bulk request. */ public long estimatedSizeInBytes() { return sizeInBytes; } /** * Adds a framed data in binary format */ public BulkRequest add(byte[] data, int from, int length, boolean contentUnsafe) throws Exception { return add(data, from, length, contentUnsafe, null, null); } /** * Adds a framed data in binary format */ public BulkRequest add(byte[] data, int from, int length, boolean contentUnsafe, @Nullable String defaultIndex, @Nullable String defaultType) throws Exception { return add(new BytesArray(data, from, length), contentUnsafe, defaultIndex, defaultType); } /** * Adds a framed data in binary format */ public BulkRequest add(BytesReference data, boolean contentUnsafe, @Nullable String defaultIndex, @Nullable String defaultType) throws Exception { return add(data, contentUnsafe, defaultIndex, defaultType, null, null, true); } /** * Adds a framed data in binary format */ public BulkRequest add(BytesReference data, boolean contentUnsafe, @Nullable String defaultIndex, @Nullable String defaultType, boolean allowExplicitIndex) throws Exception { return add(data, contentUnsafe, defaultIndex, defaultType, null, null, allowExplicitIndex); } public BulkRequest add(BytesReference data, boolean contentUnsafe, @Nullable String defaultIndex, @Nullable String defaultType, @Nullable String defaultRouting, @Nullable Object payload, boolean allowExplicitIndex) throws Exception { XContent xContent = XContentFactory.xContent(data); int from = 0; int length = data.length(); byte marker = xContent.streamSeparator(); while (true) { int nextMarker = findNextMarker(marker, from, data, length); if (nextMarker == -1) { break; } // now parse the action XContentParser parser = xContent.createParser(data.slice(from, nextMarker - from)); try { // move pointers from = nextMarker + 1; // Move to START_OBJECT XContentParser.Token token = parser.nextToken(); if (token == null) { continue; } assert token == XContentParser.Token.START_OBJECT; // Move to FIELD_NAME, that's the action token = parser.nextToken(); assert token == XContentParser.Token.FIELD_NAME; String action = parser.currentName(); String index = defaultIndex; String type = defaultType; String id = null; String routing = defaultRouting; String parent = null; String timestamp = null; Long ttl = null; String opType = null; long version = Versions.MATCH_ANY; VersionType versionType = VersionType.INTERNAL; int retryOnConflict = 0; // at this stage, next token can either be END_OBJECT (and use default index and type, with auto generated id) // or START_OBJECT which will have another set of parameters String currentFieldName = null; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token.isValue()) { if ("_index".equals(currentFieldName)) { if (!allowExplicitIndex) { throw new ElasticsearchIllegalArgumentException("explicit index in bulk is not allowed"); } index = parser.text(); } else if ("_type".equals(currentFieldName)) { type = parser.text(); } else if ("_id".equals(currentFieldName)) { id = parser.text(); } else if ("_routing".equals(currentFieldName) || "routing".equals(currentFieldName)) { routing = parser.text(); } else if ("_parent".equals(currentFieldName) || "parent".equals(currentFieldName)) { parent = parser.text(); } else if ("_timestamp".equals(currentFieldName) || "timestamp".equals(currentFieldName)) { timestamp = parser.text(); } else if ("_ttl".equals(currentFieldName) || "ttl".equals(currentFieldName)) { if (parser.currentToken() == XContentParser.Token.VALUE_STRING) { ttl = TimeValue.parseTimeValue(parser.text(), null).millis(); } else { ttl = parser.longValue(); } } else if ("op_type".equals(currentFieldName) || "opType".equals(currentFieldName)) { opType = parser.text(); } else if ("_version".equals(currentFieldName) || "version".equals(currentFieldName)) { version = parser.longValue(); } else if ("_version_type".equals(currentFieldName) || "_versionType".equals(currentFieldName) || "version_type".equals(currentFieldName) || "versionType".equals(currentFieldName)) { versionType = VersionType.fromString(parser.text()); } else if ("_retry_on_conflict".equals(currentFieldName) || "_retryOnConflict".equals(currentFieldName)) { retryOnConflict = parser.intValue(); } } } if ("delete".equals(action)) { add(new DeleteRequest(index, type, id).routing(routing).parent(parent).version(version).versionType(versionType), payload); } else { nextMarker = findNextMarker(marker, from, data, length); if (nextMarker == -1) { break; } // order is important, we set parent after routing, so routing will be set to parent if not set explicitly // we use internalAdd so we don't fork here, this allows us not to copy over the big byte array to small chunks // of index request. All index requests are still unsafe if applicable. if ("index".equals(action)) { if (opType == null) { internalAdd(new IndexRequest(index, type, id).routing(routing).parent(parent).timestamp(timestamp).ttl(ttl).version(version).versionType(versionType) .source(data.slice(from, nextMarker - from), contentUnsafe), payload); } else { internalAdd(new IndexRequest(index, type, id).routing(routing).parent(parent).timestamp(timestamp).ttl(ttl).version(version).versionType(versionType) .create("create".equals(opType)) .source(data.slice(from, nextMarker - from), contentUnsafe), payload); } } else if ("create".equals(action)) { internalAdd(new IndexRequest(index, type, id).routing(routing).parent(parent).timestamp(timestamp).ttl(ttl).version(version).versionType(versionType) .create(true) .source(data.slice(from, nextMarker - from), contentUnsafe), payload); } else if ("update".equals(action)) { UpdateRequest updateRequest = new UpdateRequest(index, type, id).routing(routing).parent(parent).retryOnConflict(retryOnConflict) .version(version).versionType(versionType) .source(data.slice(from, nextMarker - from)); IndexRequest upsertRequest = updateRequest.upsertRequest(); if (upsertRequest != null) { upsertRequest.routing(routing); upsertRequest.parent(parent); // order is important, set it after routing, so it will set the routing upsertRequest.timestamp(timestamp); upsertRequest.ttl(ttl); upsertRequest.version(version); upsertRequest.versionType(versionType); } IndexRequest doc = updateRequest.doc(); if (doc != null) { doc.routing(routing); doc.parent(parent); // order is important, set it after routing, so it will set the routing doc.timestamp(timestamp); doc.ttl(ttl); doc.version(version); doc.versionType(versionType); } internalAdd(updateRequest, payload); } // move pointers from = nextMarker + 1; } } finally { parser.close(); } } return this; } /** * Sets the consistency level of write. Defaults to {@link org.elasticsearch.action.WriteConsistencyLevel#DEFAULT} */ public BulkRequest consistencyLevel(WriteConsistencyLevel consistencyLevel) { this.consistencyLevel = consistencyLevel; return this; } public WriteConsistencyLevel consistencyLevel() { return this.consistencyLevel; } /** * Should a refresh be executed post this bulk operation causing the operations to * be searchable. Note, heavy indexing should not set this to <tt>true</tt>. Defaults * to <tt>false</tt>. */ public BulkRequest refresh(boolean refresh) { this.refresh = refresh; return this; } public boolean refresh() { return this.refresh; } /** * Set the replication type for this operation. */ public BulkRequest replicationType(ReplicationType replicationType) { this.replicationType = replicationType; return this; } public ReplicationType replicationType() { return this.replicationType; } /** * A timeout to wait if the index operation can't be performed immediately. Defaults to <tt>1m</tt>. */ public final BulkRequest timeout(TimeValue timeout) { this.timeout = timeout; return this; } /** * A timeout to wait if the index operation can't be performed immediately. Defaults to <tt>1m</tt>. */ public final BulkRequest timeout(String timeout) { return timeout(TimeValue.parseTimeValue(timeout, null)); } public TimeValue timeout() { return timeout; } private int findNextMarker(byte marker, int from, BytesReference data, int length) { for (int i = from; i < length; i++) { if (data.get(i) == marker) { return i; } } return -1; } @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; if (requests.isEmpty()) { validationException = addValidationError("no requests added", validationException); } for (int i = 0; i < requests.size(); i++) { ActionRequestValidationException ex = requests.get(i).validate(); if (ex != null) { if (validationException == null) { validationException = new ActionRequestValidationException(); } validationException.addValidationErrors(ex.validationErrors()); } } return validationException; } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); replicationType = ReplicationType.fromId(in.readByte()); consistencyLevel = WriteConsistencyLevel.fromId(in.readByte()); int size = in.readVInt(); for (int i = 0; i < size; i++) { byte type = in.readByte(); if (type == 0) { IndexRequest request = new IndexRequest(); request.readFrom(in); requests.add(request); } else if (type == 1) { DeleteRequest request = new DeleteRequest(); request.readFrom(in); requests.add(request); } else if (type == 2) { UpdateRequest request = new UpdateRequest(); request.readFrom(in); requests.add(request); } } refresh = in.readBoolean(); timeout = TimeValue.readTimeValue(in); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeByte(replicationType.id()); out.writeByte(consistencyLevel.id()); out.writeVInt(requests.size()); for (ActionRequest request : requests) { if (request instanceof IndexRequest) { out.writeByte((byte) 0); } else if (request instanceof DeleteRequest) { out.writeByte((byte) 1); } else if (request instanceof UpdateRequest) { out.writeByte((byte) 2); } request.writeTo(out); } out.writeBoolean(refresh); timeout.writeTo(out); } }
1no label
src_main_java_org_elasticsearch_action_bulk_BulkRequest.java
2,614
private static class FastStringCreator implements UTFEncoderDecoder.StringCreator { private final Constructor<String> constructor; private final boolean useOldStringConstructor; public FastStringCreator(Constructor<String> constructor) { this.constructor = constructor; this.useOldStringConstructor = constructor.getParameterTypes().length == 3; } @Override public String buildString(char[] chars) { try { if (useOldStringConstructor) { return constructor.newInstance(0, chars.length, chars); } else { return constructor.newInstance(chars, Boolean.TRUE); } } catch (Exception e) { throw new RuntimeException(e); } } }
1no label
hazelcast_src_main_java_com_hazelcast_nio_UTFEncoderDecoder.java
1,516
public class ScriptMap { public static final String CLASS = Tokens.makeNamespace(ScriptMap.class) + ".class"; public static final String SCRIPT_PATH = Tokens.makeNamespace(ScriptMap.class) + ".scriptPath"; public static final String SCRIPT_ARGS = Tokens.makeNamespace(ScriptMap.class) + ".scriptArgs"; private static final String ARGS = "args"; private static final String V = "v"; private static final String SETUP_ARGS = "setup(args)"; private static final String MAP_V_ARGS = "map(v,args)"; private static final String CLEANUP_ARGS = "cleanup(args)"; public static Configuration createConfiguration(final String scriptUri, final String... args) { Configuration configuration = new EmptyConfiguration(); configuration.set(SCRIPT_PATH, scriptUri); configuration.setStrings(SCRIPT_ARGS, args); return configuration; } public static class Map extends Mapper<NullWritable, FaunusVertex, NullWritable, FaunusVertex> { private final ScriptEngine engine = new FaunusGremlinScriptEngine(); private SafeMapperOutputs outputs; private Text textWritable = new Text(); @Override public void setup(final Mapper.Context context) throws IOException, InterruptedException { final FileSystem fs = FileSystem.get(context.getConfiguration()); try { this.engine.eval(new InputStreamReader(fs.open(new Path(context.getConfiguration().get(SCRIPT_PATH))))); this.engine.put(ARGS, context.getConfiguration().getStrings(SCRIPT_ARGS)); this.engine.eval(SETUP_ARGS); } catch (Exception e) { throw new InterruptedException(e.getMessage()); } this.outputs = new SafeMapperOutputs(context); } @Override public void map(final NullWritable key, final FaunusVertex value, final Mapper<NullWritable, FaunusVertex, NullWritable, FaunusVertex>.Context context) throws IOException, InterruptedException { if (value.hasPaths()) { final Object result; try { this.engine.put(V, value); result = engine.eval(MAP_V_ARGS); } catch (Exception e) { throw new InterruptedException(e.getMessage()); } this.textWritable.set((null == result) ? Tokens.NULL : result.toString()); this.outputs.write(Tokens.SIDEEFFECT, NullWritable.get(), this.textWritable); } this.outputs.write(Tokens.GRAPH, NullWritable.get(), value); } @Override public void cleanup(final Mapper<NullWritable, FaunusVertex, NullWritable, FaunusVertex>.Context context) throws IOException, InterruptedException { try { this.engine.eval(CLEANUP_ARGS); } catch (Exception e) { throw new InterruptedException(e.getMessage()); } this.outputs.close(); } } }
1no label
titan-hadoop-parent_titan-hadoop-core_src_main_java_com_thinkaurelius_titan_hadoop_mapreduce_sideeffect_ScriptMap.java
813
@SuppressWarnings("unchecked") public class OSchemaShared extends ODocumentWrapperNoClass implements OSchema, OCloseable { private static final long serialVersionUID = 1L; public static final int CURRENT_VERSION_NUMBER = 4; private static final String DROP_INDEX_QUERY = "drop index "; protected Map<String, OClass> classes = new HashMap<String, OClass>(); public OSchemaShared(final int schemaClusterId) { super(new ODocument()); } public int countClasses() { getDatabase().checkSecurity(ODatabaseSecurityResources.SCHEMA, ORole.PERMISSION_READ); return getDatabase().getStorage().callInLock(new Callable<Integer>() { @Override public Integer call() throws Exception { return classes.size(); } }, false); } public OClass createClass(final Class<?> iClass) { final Class<?> superClass = iClass.getSuperclass(); final OClass cls; if (superClass != null && superClass != Object.class && existsClass(superClass.getSimpleName())) cls = getClass(superClass.getSimpleName()); else cls = null; return createClass(iClass.getSimpleName(), cls, OStorage.CLUSTER_TYPE.PHYSICAL); } public OClass createClass(final Class<?> iClass, final int iDefaultClusterId) { final Class<?> superClass = iClass.getSuperclass(); final OClass cls; if (superClass != null && superClass != Object.class && existsClass(superClass.getSimpleName())) cls = getClass(superClass.getSimpleName()); else cls = null; return createClass(iClass.getSimpleName(), cls, iDefaultClusterId); } public OClass createClass(final String iClassName) { return createClass(iClassName, null, OStorage.CLUSTER_TYPE.PHYSICAL); } public OClass createClass(final String iClassName, final OClass iSuperClass) { return createClass(iClassName, iSuperClass, OStorage.CLUSTER_TYPE.PHYSICAL); } public OClass createClass(final String iClassName, final OClass iSuperClass, final OStorage.CLUSTER_TYPE iType) { if (getDatabase().getTransaction().isActive()) throw new IllegalStateException("Cannot create class " + iClassName + " inside a transaction"); int clusterId = getDatabase().getClusterIdByName(iClassName); if (clusterId == -1) // CREATE A NEW CLUSTER clusterId = createCluster(iType.toString(), iClassName); return createClass(iClassName, iSuperClass, clusterId); } public OClass createClass(final String iClassName, final int iDefaultClusterId) { return createClass(iClassName, null, new int[] { iDefaultClusterId }); } public OClass createClass(final String iClassName, final OClass iSuperClass, final int iDefaultClusterId) { return createClass(iClassName, iSuperClass, new int[] { iDefaultClusterId }); } public OClass getOrCreateClass(final String iClassName) { return getOrCreateClass(iClassName, null); } public OClass getOrCreateClass(final String iClassName, final OClass iSuperClass) { return getDatabase().getStorage().callInLock(new Callable<OClass>() { @Override public OClass call() throws Exception { OClass cls = classes.get(iClassName.toLowerCase()); if (cls == null) cls = createClass(iClassName, iSuperClass); else if (iSuperClass != null && !cls.isSubClassOf(iSuperClass)) throw new IllegalArgumentException("Class '" + iClassName + "' is not an instance of " + iSuperClass.getShortName()); return cls; } }, true); } @Override public OClass createAbstractClass(final Class<?> iClass) { final Class<?> superClass = iClass.getSuperclass(); final OClass cls; if (superClass != null && superClass != Object.class && existsClass(superClass.getSimpleName())) cls = getClass(superClass.getSimpleName()); else cls = null; return createClass(iClass.getSimpleName(), cls, -1); } @Override public OClass createAbstractClass(final String iClassName) { return createClass(iClassName, null, -1); } @Override public OClass createAbstractClass(final String iClassName, final OClass iSuperClass) { return createClass(iClassName, iSuperClass, -1); } private int createCluster(String iType, String iClassName) { return getDatabase().command(new OCommandSQL("create cluster " + iClassName + " " + iType)).<Integer> execute(); } public OClass createClass(final String iClassName, final OClass iSuperClass, final int[] iClusterIds) { getDatabase().checkSecurity(ODatabaseSecurityResources.SCHEMA, ORole.PERMISSION_CREATE); final String key = iClassName.toLowerCase(); return getDatabase().getStorage().callInLock(new Callable<OClass>() { @Override public OClass call() throws Exception { if (classes.containsKey(key)) throw new OSchemaException("Class " + iClassName + " already exists in current database"); final StringBuilder cmd = new StringBuilder("create class "); cmd.append(iClassName); if (iSuperClass != null) { cmd.append(" extends "); cmd.append(iSuperClass.getName()); } if (iClusterIds != null) { if (iClusterIds.length == 1 && iClusterIds[0] == -1) cmd.append(" abstract"); else { cmd.append(" cluster "); for (int i = 0; i < iClusterIds.length; ++i) { if (i > 0) cmd.append(','); else cmd.append(' '); cmd.append(iClusterIds[i]); } } } getDatabase().command(new OCommandSQL(cmd.toString())).execute(); if (!(getDatabase().getStorage() instanceof OStorageEmbedded)) getDatabase().reload(); if (classes.containsKey(key)) return classes.get(key); else // ADD IT LOCALLY AVOIDING TO RELOAD THE ENTIRE SCHEMA createClassInternal(iClassName, iSuperClass, iClusterIds); return classes.get(key); } }, true); } public OClass createClassInternal(final String iClassName, final OClass superClass, final int[] iClusterIds) { if (iClassName == null || iClassName.length() == 0) throw new OSchemaException("Found class name null"); final Character wrongCharacter = checkNameIfValid(iClassName); if (wrongCharacter != null) throw new OSchemaException("Found invalid class name. Character '" + wrongCharacter + "' cannot be used in class name."); final ODatabaseRecord database = getDatabase(); final int[] clusterIds; if (iClusterIds == null || iClusterIds.length == 0) // CREATE A NEW CLUSTER clusterIds = new int[] { database.addCluster(CLUSTER_TYPE.PHYSICAL.toString(), iClassName, null, null) }; else clusterIds = iClusterIds; database.checkSecurity(ODatabaseSecurityResources.SCHEMA, ORole.PERMISSION_CREATE); final String key = iClassName.toLowerCase(); final OSchemaShared me = this; return getDatabase().getStorage().callInLock(new Callable<OClass>() { @Override public OClass call() throws Exception { if (classes.containsKey(key)) throw new OSchemaException("Class " + iClassName + " already exists in current database"); final OClassImpl cls = new OClassImpl(me, iClassName, clusterIds); classes.put(key, cls); if (cls.getShortName() != null) // BIND SHORT NAME TOO classes.put(cls.getShortName().toLowerCase(), cls); if (superClass != null) { cls.setSuperClassInternal(superClass); // UPDATE INDEXES final int[] clustersToIndex = superClass.getPolymorphicClusterIds(); final String[] clusterNames = new String[clustersToIndex.length]; for (int i = 0; i < clustersToIndex.length; i++) clusterNames[i] = database.getClusterNameById(clustersToIndex[i]); for (OIndex<?> index : superClass.getIndexes()) for (String clusterName : clusterNames) if (clusterName != null) database.getMetadata().getIndexManager().addClusterToIndex(clusterName, index.getName()); } return cls; } }, true); } public static Character checkNameIfValid(String iName) { if (iName == null) throw new IllegalArgumentException("Name is null"); iName = iName.trim(); final int nameSize = iName.length(); if (nameSize == 0) throw new IllegalArgumentException("Name is empty"); for (int i = 0; i < nameSize; ++i) { final char c = iName.charAt(i); if (c == ':' || c == ',' || c == ' ') // INVALID CHARACTER return c; } // for (char c : iName.toCharArray()) // if (!Character.isJavaIdentifierPart(c)) // return c; return null; } /* * (non-Javadoc) * * @see com.orientechnologies.orient.core.metadata.schema.OSchema#dropClass(java.lang.String) */ public void dropClass(final String iClassName) { if (getDatabase().getTransaction().isActive()) throw new IllegalStateException("Cannot drop a class inside a transaction"); if (iClassName == null) throw new IllegalArgumentException("Class name is null"); getDatabase().checkSecurity(ODatabaseSecurityResources.SCHEMA, ORole.PERMISSION_DELETE); final String key = iClassName.toLowerCase(); getDatabase().getStorage().callInLock(new Callable<Object>() { @Override public Object call() throws Exception { final OClass cls = classes.get(key); if (cls == null) throw new OSchemaException("Class " + iClassName + " was not found in current database"); if (cls.getBaseClasses().hasNext()) throw new OSchemaException("Class " + iClassName + " cannot be dropped because it has sub classes. Remove the dependencies before trying to drop it again"); final StringBuilder cmd = new StringBuilder("drop class "); cmd.append(iClassName); Object result = getDatabase().command(new OCommandSQL(cmd.toString())).execute(); if (result instanceof Boolean && (Boolean) result) { classes.remove(key); } getDatabase().reload(); reload(); return null; } }, true); } public void dropClassInternal(final String iClassName) { if (getDatabase().getTransaction().isActive()) throw new IllegalStateException("Cannot drop a class inside a transaction"); if (iClassName == null) throw new IllegalArgumentException("Class name is null"); getDatabase().checkSecurity(ODatabaseSecurityResources.SCHEMA, ORole.PERMISSION_DELETE); final String key = iClassName.toLowerCase(); getDatabase().getStorage().callInLock(new Callable<Object>() { @Override public Object call() throws Exception { final OClass cls = classes.get(key); if (cls == null) throw new OSchemaException("Class " + iClassName + " was not found in current database"); if (cls.getBaseClasses().hasNext()) throw new OSchemaException("Class " + iClassName + " cannot be dropped because it has sub classes. Remove the dependencies before trying to drop it again"); if (cls.getSuperClass() != null) { // REMOVE DEPENDENCY FROM SUPERCLASS ((OClassImpl) cls.getSuperClass()).removeBaseClassInternal(cls); } dropClassIndexes(cls); classes.remove(key); if (cls.getShortName() != null) // REMOVE THE ALIAS TOO classes.remove(cls.getShortName().toLowerCase()); return null; } }, true); } private void dropClassIndexes(final OClass cls) { for (final OIndex<?> index : getDatabase().getMetadata().getIndexManager().getClassIndexes(cls.getName())) { getDatabase().command(new OCommandSQL(DROP_INDEX_QUERY + index.getName())); } } /** * Reloads the schema inside a storage's shared lock. */ @Override public <RET extends ODocumentWrapper> RET reload() { getDatabase().getStorage().callInLock(new Callable<Object>() { @Override public Object call() throws Exception { reload(null); return null; } }, true); return (RET) this; } public boolean existsClass(final String iClassName) { return getDatabase().getStorage().callInLock(new Callable<Boolean>() { @Override public Boolean call() throws Exception { return classes.containsKey(iClassName.toLowerCase()); } }, false); } /* * (non-Javadoc) * * @see com.orientechnologies.orient.core.metadata.schema.OSchema#getClass(java.lang.Class) */ public OClass getClass(final Class<?> iClass) { return getClass(iClass.getSimpleName()); } /* * (non-Javadoc) * * @see com.orientechnologies.orient.core.metadata.schema.OSchema#getClass(java.lang.String) */ public OClass getClass(final String iClassName) { if (iClassName == null) return null; OClass cls; cls = getDatabase().getStorage().callInLock(new Callable<OClass>() { @Override public OClass call() throws Exception { return classes.get(iClassName.toLowerCase()); } }, false); if (cls == null && getDatabase().getDatabaseOwner() instanceof ODatabaseObject) { cls = getDatabase().getStorage().callInLock(new Callable<OClass>() { @Override public OClass call() throws Exception { OClass cls = classes.get(iClassName.toLowerCase()); if (cls == null) { // CHECK IF CAN AUTO-CREATE IT final ODatabase ownerDb = getDatabase().getDatabaseOwner(); if (ownerDb instanceof ODatabaseObject) { final Class<?> javaClass = ((ODatabaseObject) ownerDb).getEntityManager().getEntityClass(iClassName); if (javaClass != null) { // AUTO REGISTER THE CLASS AT FIRST USE cls = cascadeCreate(javaClass); } } } return cls; } }, true); } return cls; } public void changeClassName(final String iOldName, final String iNewName) { getDatabase().getStorage().callInLock(new Callable<Object>() { @Override public Object call() throws Exception { final OClass clazz = classes.remove(iOldName.toLowerCase()); classes.put(iNewName.toLowerCase(), clazz); return null; } }, true); } /** * Binds ODocument to POJO. */ @Override public void fromStream() { final OSchemaShared me = this; getDatabase().getStorage().callInLock(new Callable<Object>() { @Override public Object call() throws Exception { // READ CURRENT SCHEMA VERSION final Integer schemaVersion = (Integer) document.field("schemaVersion"); if (schemaVersion == null) { OLogManager .instance() .error( this, "Database's schema is empty! Recreating the system classes and allow the opening of the database but double check the integrity of the database"); return null; } else if (schemaVersion.intValue() != CURRENT_VERSION_NUMBER) { // HANDLE SCHEMA UPGRADE throw new OConfigurationException( "Database schema is different. Please export your old database with the previous version of OrientDB and reimport it using the current one."); } // REGISTER ALL THE CLASSES classes.clear(); OClassImpl cls; Collection<ODocument> storedClasses = document.field("classes"); for (ODocument c : storedClasses) { cls = new OClassImpl(me, c); cls.fromStream(); classes.put(cls.getName().toLowerCase(), cls); if (cls.getShortName() != null) classes.put(cls.getShortName().toLowerCase(), cls); } // REBUILD THE INHERITANCE TREE String superClassName; OClass superClass; for (ODocument c : storedClasses) { superClassName = c.field("superClass"); if (superClassName != null) { // HAS A SUPER CLASS cls = (OClassImpl) classes.get(((String) c.field("name")).toLowerCase()); superClass = classes.get(superClassName.toLowerCase()); if (superClass == null) throw new OConfigurationException("Super class '" + superClassName + "' was declared in class '" + cls.getName() + "' but was not found in schema. Remove the dependency or create the class to continue."); cls.setSuperClassInternal(superClass); } } return null; } }, true); } /** * Binds POJO to ODocument. */ @Override @OBeforeSerialization public ODocument toStream() { return getDatabase().getStorage().callInLock(new Callable<ODocument>() { @Override public ODocument call() throws Exception { document.setInternalStatus(ORecordElement.STATUS.UNMARSHALLING); try { document.field("schemaVersion", CURRENT_VERSION_NUMBER); Set<ODocument> cc = new HashSet<ODocument>(); for (OClass c : classes.values()) cc.add(((OClassImpl) c).toStream()); document.field("classes", cc, OType.EMBEDDEDSET); } finally { document.setInternalStatus(ORecordElement.STATUS.LOADED); } return document; } }, false); } public Collection<OClass> getClasses() { getDatabase().checkSecurity(ODatabaseSecurityResources.SCHEMA, ORole.PERMISSION_READ); return getDatabase().getStorage().callInLock(new Callable<Collection<OClass>>() { @Override public HashSet<OClass> call() throws Exception { return new HashSet<OClass>(classes.values()); } }, false); } @Override public Set<OClass> getClassesRelyOnCluster(final String iClusterName) { getDatabase().checkSecurity(ODatabaseSecurityResources.SCHEMA, ORole.PERMISSION_READ); return getDatabase().getStorage().callInLock(new Callable<Set<OClass>>() { @Override public Set<OClass> call() throws Exception { final int clusterId = getDatabase().getClusterIdByName(iClusterName); final Set<OClass> result = new HashSet<OClass>(); for (OClass c : classes.values()) { if (OArrays.contains(c.getPolymorphicClusterIds(), clusterId)) result.add(c); } return result; } }, false); } @Override public OSchemaShared load() { getDatabase().getStorage().callInLock(new Callable<Object>() { @Override public Object call() throws Exception { getDatabase(); ((ORecordId) document.getIdentity()).fromString(getDatabase().getStorage().getConfiguration().schemaRecordId); reload("*:-1 index:0"); return null; } }, true); return this; } public void create() { final ODatabaseRecord db = getDatabase(); super.save(OMetadataDefault.CLUSTER_INTERNAL_NAME); db.getStorage().getConfiguration().schemaRecordId = document.getIdentity().toString(); db.getStorage().getConfiguration().update(); } public void close() { classes.clear(); document.clear(); } public void saveInternal() { final ODatabaseRecord db = getDatabase(); if (db.getTransaction().isActive()) throw new OSchemaException("Cannot change the schema while a transaction is active. Schema changes are not transactional"); db.getStorage().callInLock(new Callable<Object>() { @Override public Object call() throws Exception { saveInternal(OMetadataDefault.CLUSTER_INTERNAL_NAME); return null; } }, true); } @Deprecated public int getVersion() { return getDatabase().getStorage().callInLock(new Callable<Integer>() { @Override public Integer call() throws Exception { return document.getRecordVersion().getCounter(); } }, false); } public ORID getIdentity() { return document.getIdentity(); } /** * Avoid to handle this by user API. */ @Override public <RET extends ODocumentWrapper> RET save() { return (RET) this; } /** * Avoid to handle this by user API. */ @Override public <RET extends ODocumentWrapper> RET save(final String iClusterName) { return (RET) this; } public OSchemaShared setDirty() { document.setDirty(); return this; } private OClass cascadeCreate(final Class<?> javaClass) { final OClassImpl cls = (OClassImpl) createClass(javaClass.getSimpleName()); final Class<?> javaSuperClass = javaClass.getSuperclass(); if (javaSuperClass != null && !javaSuperClass.getName().equals("java.lang.Object") && !javaSuperClass.getName().startsWith("com.orientechnologies")) { OClass superClass = classes.get(javaSuperClass.getSimpleName().toLowerCase()); if (superClass == null) superClass = cascadeCreate(javaSuperClass); cls.setSuperClass(superClass); } return cls; } private ODatabaseRecord getDatabase() { return ODatabaseRecordThreadLocal.INSTANCE.get(); } private void saveInternal(final String iClusterName) { document.setDirty(); for (int retry = 0; retry < 10; retry++) try { super.save(OMetadataDefault.CLUSTER_INTERNAL_NAME); break; } catch (OConcurrentModificationException e) { reload(null, true); } super.save(OMetadataDefault.CLUSTER_INTERNAL_NAME); } }
1no label
core_src_main_java_com_orientechnologies_orient_core_metadata_schema_OSchemaShared.java
1,320
public class FieldType implements Serializable, BroadleafEnumerationType { private static final long serialVersionUID = 1L; private static final Map<String, FieldType> TYPES = new LinkedHashMap<String, FieldType>(); public static final FieldType ID = new FieldType("id", "ID"); public static final FieldType CATEGORY = new FieldType("category", "Category"); public static final FieldType INT = new FieldType("i", "Integer"); public static final FieldType INTS = new FieldType("is", "Integer (Multi)"); public static final FieldType STRING = new FieldType("s", "String"); public static final FieldType STRINGS = new FieldType("ss", "String (Multi)"); public static final FieldType LONG = new FieldType("l", "Long"); public static final FieldType LONGS = new FieldType("ls", "Long (Multi)"); public static final FieldType TEXT = new FieldType("t", "Text"); public static final FieldType TEXTS = new FieldType("txt", "Text (Multi)"); public static final FieldType BOOLEAN = new FieldType("b", "Boolean"); public static final FieldType BOOLEANS = new FieldType("bs", "Boolean (Multi)"); public static final FieldType DOUBLE = new FieldType("d", "Double"); public static final FieldType DOUBLES = new FieldType("ds", "Double (Multi)"); public static final FieldType PRICE = new FieldType("p", "Price"); public static final FieldType DATE = new FieldType("dt", "Date"); public static final FieldType DATES = new FieldType("dts", "Date (Multi)"); public static final FieldType TRIEINT = new FieldType("ti", "Trie Integer"); public static final FieldType TRIELONG = new FieldType("tl", "Trie Long"); public static final FieldType TRIEDOUBLE = new FieldType("td", "Trie Double"); public static final FieldType TRIEDATE = new FieldType("tdt", "Trie Date"); public static FieldType getInstance(final String type) { return TYPES.get(type); } private String type; private String friendlyType; public FieldType() { //do nothing } public FieldType(final String type, final String friendlyType) { this.friendlyType = friendlyType; setType(type); } @Override public String getType() { return type; } @Override public String getFriendlyType() { return friendlyType; } private void setType(final String type) { this.type = type; if (!TYPES.containsKey(type)) { TYPES.put(type, this); } } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((type == null) ? 0 : type.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; FieldType other = (FieldType) obj; if (type == null) { if (other.type != null) return false; } else if (!type.equals(other.type)) return false; return true; } }
1no label
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_search_domain_solr_FieldType.java
2,519
public class SmileXContent implements XContent { public static XContentBuilder contentBuilder() throws IOException { return XContentBuilder.builder(smileXContent); } final static SmileFactory smileFactory; public final static SmileXContent smileXContent; static { smileFactory = new SmileFactory(); smileFactory.configure(SmileGenerator.Feature.ENCODE_BINARY_AS_7BIT, false); // for now, this is an overhead, might make sense for web sockets smileXContent = new SmileXContent(); } private SmileXContent() { } @Override public XContentType type() { return XContentType.SMILE; } @Override public byte streamSeparator() { return (byte) 0xFF; } @Override public XContentGenerator createGenerator(OutputStream os) throws IOException { return new SmileXContentGenerator(smileFactory.createGenerator(os, JsonEncoding.UTF8)); } @Override public XContentGenerator createGenerator(Writer writer) throws IOException { return new SmileXContentGenerator(smileFactory.createGenerator(writer)); } @Override public XContentParser createParser(String content) throws IOException { return new SmileXContentParser(smileFactory.createParser(new FastStringReader(content))); } @Override public XContentParser createParser(InputStream is) throws IOException { return new SmileXContentParser(smileFactory.createParser(is)); } @Override public XContentParser createParser(byte[] data) throws IOException { return new SmileXContentParser(smileFactory.createParser(data)); } @Override public XContentParser createParser(byte[] data, int offset, int length) throws IOException { return new SmileXContentParser(smileFactory.createParser(data, offset, length)); } @Override public XContentParser createParser(BytesReference bytes) throws IOException { if (bytes.hasArray()) { return createParser(bytes.array(), bytes.arrayOffset(), bytes.length()); } return createParser(bytes.streamInput()); } @Override public XContentParser createParser(Reader reader) throws IOException { return new JsonXContentParser(smileFactory.createParser(reader)); } }
1no label
src_main_java_org_elasticsearch_common_xcontent_smile_SmileXContent.java
199
public interface ClientConnectionManager { void shutdown(); void start(); ClientConnection tryToConnect(Address address) throws Exception; ClientConnection ownerConnection(Address address) throws Exception; boolean removeEventHandler(Integer callId); }
0true
hazelcast-client_src_main_java_com_hazelcast_client_connection_ClientConnectionManager.java
1,121
public class OSQLFunctionDecode extends OSQLFunctionAbstract { public static final String NAME = "decode"; /** * Get the date at construction to have the same date for all the iteration. */ public OSQLFunctionDecode() { super(NAME, 2, 2); } @Override public Object execute(OIdentifiable iCurrentRecord, Object iCurrentResult, final Object[] iParameters, OCommandContext iContext) { final String candidate = iParameters[0].toString(); final String format = iParameters[1].toString(); if(OSQLFunctionEncode.FORMAT_BASE64.equalsIgnoreCase(format)){ return OBase64Utils.decode(candidate); }else{ throw new OException("unknowned format :"+format); } } @Override public String getSyntax() { return "Syntax error: decode(<binaryfield>, <format>)"; } }
1no label
core_src_main_java_com_orientechnologies_orient_core_sql_functions_misc_OSQLFunctionDecode.java
209
public class QueryParserSettings { public static final boolean DEFAULT_ALLOW_LEADING_WILDCARD = true; public static final boolean DEFAULT_ANALYZE_WILDCARD = false; public static final float DEFAULT_BOOST = 1.f; private String queryString; private String defaultField; private float boost = DEFAULT_BOOST; private MapperQueryParser.Operator defaultOperator = QueryParser.Operator.OR; private boolean autoGeneratePhraseQueries = false; private boolean allowLeadingWildcard = DEFAULT_ALLOW_LEADING_WILDCARD; private boolean lowercaseExpandedTerms = true; private boolean enablePositionIncrements = true; private int phraseSlop = 0; private float fuzzyMinSim = FuzzyQuery.defaultMinSimilarity; private int fuzzyPrefixLength = FuzzyQuery.defaultPrefixLength; private int fuzzyMaxExpansions = FuzzyQuery.defaultMaxExpansions; private MultiTermQuery.RewriteMethod fuzzyRewriteMethod = null; private boolean analyzeWildcard = DEFAULT_ANALYZE_WILDCARD; private boolean escape = false; private Analyzer defaultAnalyzer = null; private Analyzer defaultQuoteAnalyzer = null; private Analyzer forcedAnalyzer = null; private Analyzer forcedQuoteAnalyzer = null; private String quoteFieldSuffix = null; private MultiTermQuery.RewriteMethod rewriteMethod = MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT; private String minimumShouldMatch; private boolean lenient; List<String> fields = null; Collection<String> queryTypes = null; ObjectFloatOpenHashMap<String> boosts = null; float tieBreaker = 0.0f; boolean useDisMax = true; public boolean isCacheable() { // a hack for now :) to determine if a query string is cacheable return !queryString.contains("now"); } public String queryString() { return queryString; } public void queryString(String queryString) { this.queryString = queryString; } public String defaultField() { return defaultField; } public void defaultField(String defaultField) { this.defaultField = defaultField; } public float boost() { return boost; } public void boost(float boost) { this.boost = boost; } public QueryParser.Operator defaultOperator() { return defaultOperator; } public void defaultOperator(QueryParser.Operator defaultOperator) { this.defaultOperator = defaultOperator; } public boolean autoGeneratePhraseQueries() { return autoGeneratePhraseQueries; } public void autoGeneratePhraseQueries(boolean autoGeneratePhraseQueries) { this.autoGeneratePhraseQueries = autoGeneratePhraseQueries; } public boolean allowLeadingWildcard() { return allowLeadingWildcard; } public void allowLeadingWildcard(boolean allowLeadingWildcard) { this.allowLeadingWildcard = allowLeadingWildcard; } public boolean lowercaseExpandedTerms() { return lowercaseExpandedTerms; } public void lowercaseExpandedTerms(boolean lowercaseExpandedTerms) { this.lowercaseExpandedTerms = lowercaseExpandedTerms; } public boolean enablePositionIncrements() { return enablePositionIncrements; } public void enablePositionIncrements(boolean enablePositionIncrements) { this.enablePositionIncrements = enablePositionIncrements; } public int phraseSlop() { return phraseSlop; } public void phraseSlop(int phraseSlop) { this.phraseSlop = phraseSlop; } public float fuzzyMinSim() { return fuzzyMinSim; } public void fuzzyMinSim(float fuzzyMinSim) { this.fuzzyMinSim = fuzzyMinSim; } public int fuzzyPrefixLength() { return fuzzyPrefixLength; } public void fuzzyPrefixLength(int fuzzyPrefixLength) { this.fuzzyPrefixLength = fuzzyPrefixLength; } public int fuzzyMaxExpansions() { return fuzzyMaxExpansions; } public void fuzzyMaxExpansions(int fuzzyMaxExpansions) { this.fuzzyMaxExpansions = fuzzyMaxExpansions; } public MultiTermQuery.RewriteMethod fuzzyRewriteMethod() { return fuzzyRewriteMethod; } public void fuzzyRewriteMethod(MultiTermQuery.RewriteMethod fuzzyRewriteMethod) { this.fuzzyRewriteMethod = fuzzyRewriteMethod; } public boolean escape() { return escape; } public void escape(boolean escape) { this.escape = escape; } public Analyzer defaultAnalyzer() { return defaultAnalyzer; } public void defaultAnalyzer(Analyzer defaultAnalyzer) { this.defaultAnalyzer = defaultAnalyzer; } public Analyzer defaultQuoteAnalyzer() { return defaultQuoteAnalyzer; } public void defaultQuoteAnalyzer(Analyzer defaultAnalyzer) { this.defaultQuoteAnalyzer = defaultAnalyzer; } public Analyzer forcedAnalyzer() { return forcedAnalyzer; } public void forcedAnalyzer(Analyzer forcedAnalyzer) { this.forcedAnalyzer = forcedAnalyzer; } public Analyzer forcedQuoteAnalyzer() { return forcedQuoteAnalyzer; } public void forcedQuoteAnalyzer(Analyzer forcedAnalyzer) { this.forcedQuoteAnalyzer = forcedAnalyzer; } public boolean analyzeWildcard() { return this.analyzeWildcard; } public void analyzeWildcard(boolean analyzeWildcard) { this.analyzeWildcard = analyzeWildcard; } public MultiTermQuery.RewriteMethod rewriteMethod() { return this.rewriteMethod; } public void rewriteMethod(MultiTermQuery.RewriteMethod rewriteMethod) { this.rewriteMethod = rewriteMethod; } public String minimumShouldMatch() { return this.minimumShouldMatch; } public void minimumShouldMatch(String minimumShouldMatch) { this.minimumShouldMatch = minimumShouldMatch; } public void quoteFieldSuffix(String quoteFieldSuffix) { this.quoteFieldSuffix = quoteFieldSuffix; } public String quoteFieldSuffix() { return this.quoteFieldSuffix; } public void lenient(boolean lenient) { this.lenient = lenient; } public boolean lenient() { return this.lenient; } public List<String> fields() { return fields; } public void fields(List<String> fields) { this.fields = fields; } public Collection<String> queryTypes() { return queryTypes; } public void queryTypes(Collection<String> queryTypes) { this.queryTypes = queryTypes; } public ObjectFloatOpenHashMap<String> boosts() { return boosts; } public void boosts(ObjectFloatOpenHashMap<String> boosts) { this.boosts = boosts; } public float tieBreaker() { return tieBreaker; } public void tieBreaker(float tieBreaker) { this.tieBreaker = tieBreaker; } public boolean useDisMax() { return useDisMax; } public void useDisMax(boolean useDisMax) { this.useDisMax = useDisMax; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; QueryParserSettings that = (QueryParserSettings) o; if (autoGeneratePhraseQueries != that.autoGeneratePhraseQueries()) return false; if (allowLeadingWildcard != that.allowLeadingWildcard) return false; if (Float.compare(that.boost, boost) != 0) return false; if (enablePositionIncrements != that.enablePositionIncrements) return false; if (escape != that.escape) return false; if (analyzeWildcard != that.analyzeWildcard) return false; if (Float.compare(that.fuzzyMinSim, fuzzyMinSim) != 0) return false; if (fuzzyPrefixLength != that.fuzzyPrefixLength) return false; if (fuzzyMaxExpansions != that.fuzzyMaxExpansions) return false; if (fuzzyRewriteMethod != null ? !fuzzyRewriteMethod.equals(that.fuzzyRewriteMethod) : that.fuzzyRewriteMethod != null) return false; if (lowercaseExpandedTerms != that.lowercaseExpandedTerms) return false; if (phraseSlop != that.phraseSlop) return false; if (defaultAnalyzer != null ? !defaultAnalyzer.equals(that.defaultAnalyzer) : that.defaultAnalyzer != null) return false; if (defaultQuoteAnalyzer != null ? !defaultQuoteAnalyzer.equals(that.defaultQuoteAnalyzer) : that.defaultQuoteAnalyzer != null) return false; if (forcedAnalyzer != null ? !forcedAnalyzer.equals(that.forcedAnalyzer) : that.forcedAnalyzer != null) return false; if (forcedQuoteAnalyzer != null ? !forcedQuoteAnalyzer.equals(that.forcedQuoteAnalyzer) : that.forcedQuoteAnalyzer != null) return false; if (defaultField != null ? !defaultField.equals(that.defaultField) : that.defaultField != null) return false; if (defaultOperator != that.defaultOperator) return false; if (queryString != null ? !queryString.equals(that.queryString) : that.queryString != null) return false; if (rewriteMethod != null ? !rewriteMethod.equals(that.rewriteMethod) : that.rewriteMethod != null) return false; if (minimumShouldMatch != null ? !minimumShouldMatch.equals(that.minimumShouldMatch) : that.minimumShouldMatch != null) return false; if (quoteFieldSuffix != null ? !quoteFieldSuffix.equals(that.quoteFieldSuffix) : that.quoteFieldSuffix != null) return false; if (lenient != that.lenient) { return false; } if (Float.compare(that.tieBreaker, tieBreaker) != 0) return false; if (useDisMax != that.useDisMax) return false; if (boosts != null ? !boosts.equals(that.boosts) : that.boosts != null) return false; if (fields != null ? !fields.equals(that.fields) : that.fields != null) return false; if (queryTypes != null ? !queryTypes.equals(that.queryTypes) : that.queryTypes != null) return false; return true; } @Override public int hashCode() { int result = queryString != null ? queryString.hashCode() : 0; result = 31 * result + (defaultField != null ? defaultField.hashCode() : 0); result = 31 * result + (boost != +0.0f ? Float.floatToIntBits(boost) : 0); result = 31 * result + (defaultOperator != null ? defaultOperator.hashCode() : 0); result = 31 * result + (autoGeneratePhraseQueries ? 1 : 0); result = 31 * result + (allowLeadingWildcard ? 1 : 0); result = 31 * result + (lowercaseExpandedTerms ? 1 : 0); result = 31 * result + (enablePositionIncrements ? 1 : 0); result = 31 * result + phraseSlop; result = 31 * result + (fuzzyMinSim != +0.0f ? Float.floatToIntBits(fuzzyMinSim) : 0); result = 31 * result + fuzzyPrefixLength; result = 31 * result + (escape ? 1 : 0); result = 31 * result + (defaultAnalyzer != null ? defaultAnalyzer.hashCode() : 0); result = 31 * result + (defaultQuoteAnalyzer != null ? defaultQuoteAnalyzer.hashCode() : 0); result = 31 * result + (forcedAnalyzer != null ? forcedAnalyzer.hashCode() : 0); result = 31 * result + (forcedQuoteAnalyzer != null ? forcedQuoteAnalyzer.hashCode() : 0); result = 31 * result + (analyzeWildcard ? 1 : 0); result = 31 * result + (fields != null ? fields.hashCode() : 0); result = 31 * result + (queryTypes != null ? queryTypes.hashCode() : 0); result = 31 * result + (boosts != null ? boosts.hashCode() : 0); result = 31 * result + (tieBreaker != +0.0f ? Float.floatToIntBits(tieBreaker) : 0); result = 31 * result + (useDisMax ? 1 : 0); return result; } }
0true
src_main_java_org_apache_lucene_queryparser_classic_QueryParserSettings.java
1,500
public class GroupProperties { public static final String PROP_HOSTED_MANAGEMENT_ENABLED = "hazelcast.hosted.management.enabled"; public static final String PROP_HOSTED_MANAGEMENT_URL = "hazelcast.hosted.management.url"; public static final String PROP_HEALTH_MONITORING_LEVEL = "hazelcast.health.monitoring.level"; public static final String PROP_HEALTH_MONITORING_DELAY_SECONDS = "hazelcast.health.monitoring.delay.seconds"; public static final String PROP_VERSION_CHECK_ENABLED = "hazelcast.version.check.enabled"; public static final String PROP_PREFER_IPv4_STACK = "hazelcast.prefer.ipv4.stack"; public static final String PROP_IO_THREAD_COUNT = "hazelcast.io.thread.count"; /** * The number of partition threads per Member. If this is less than the number of partitions on a Member, then * partition operations will queue behind other operations of different partitions. The default is 4. */ public static final String PROP_PARTITION_OPERATION_THREAD_COUNT = "hazelcast.operation.thread.count"; public static final String PROP_GENERIC_OPERATION_THREAD_COUNT = "hazelcast.operation.generic.thread.count"; public static final String PROP_EVENT_THREAD_COUNT = "hazelcast.event.thread.count"; public static final String PROP_EVENT_QUEUE_CAPACITY = "hazelcast.event.queue.capacity"; public static final String PROP_EVENT_QUEUE_TIMEOUT_MILLIS = "hazelcast.event.queue.timeout.millis"; public static final String PROP_CONNECT_ALL_WAIT_SECONDS = "hazelcast.connect.all.wait.seconds"; public static final String PROP_MEMCACHE_ENABLED = "hazelcast.memcache.enabled"; public static final String PROP_REST_ENABLED = "hazelcast.rest.enabled"; public static final String PROP_MAP_LOAD_CHUNK_SIZE = "hazelcast.map.load.chunk.size"; public static final String PROP_MERGE_FIRST_RUN_DELAY_SECONDS = "hazelcast.merge.first.run.delay.seconds"; public static final String PROP_MERGE_NEXT_RUN_DELAY_SECONDS = "hazelcast.merge.next.run.delay.seconds"; public static final String PROP_OPERATION_CALL_TIMEOUT_MILLIS = "hazelcast.operation.call.timeout.millis"; public static final String PROP_SOCKET_BIND_ANY = "hazelcast.socket.bind.any"; public static final String PROP_SOCKET_SERVER_BIND_ANY = "hazelcast.socket.server.bind.any"; public static final String PROP_SOCKET_CLIENT_BIND_ANY = "hazelcast.socket.client.bind.any"; public static final String PROP_SOCKET_CLIENT_BIND = "hazelcast.socket.client.bind"; public static final String PROP_SOCKET_RECEIVE_BUFFER_SIZE = "hazelcast.socket.receive.buffer.size"; public static final String PROP_SOCKET_SEND_BUFFER_SIZE = "hazelcast.socket.send.buffer.size"; public static final String PROP_SOCKET_LINGER_SECONDS = "hazelcast.socket.linger.seconds"; public static final String PROP_SOCKET_KEEP_ALIVE = "hazelcast.socket.keep.alive"; public static final String PROP_SOCKET_NO_DELAY = "hazelcast.socket.no.delay"; public static final String PROP_SHUTDOWNHOOK_ENABLED = "hazelcast.shutdownhook.enabled"; public static final String PROP_WAIT_SECONDS_BEFORE_JOIN = "hazelcast.wait.seconds.before.join"; public static final String PROP_MAX_WAIT_SECONDS_BEFORE_JOIN = "hazelcast.max.wait.seconds.before.join"; public static final String PROP_MAX_JOIN_SECONDS = "hazelcast.max.join.seconds"; public static final String PROP_MAX_JOIN_MERGE_TARGET_SECONDS = "hazelcast.max.join.merge.target.seconds"; public static final String PROP_HEARTBEAT_INTERVAL_SECONDS = "hazelcast.heartbeat.interval.seconds"; public static final String PROP_MAX_NO_HEARTBEAT_SECONDS = "hazelcast.max.no.heartbeat.seconds"; public static final String PROP_MAX_NO_MASTER_CONFIRMATION_SECONDS = "hazelcast.max.no.master.confirmation.seconds"; public static final String PROP_MASTER_CONFIRMATION_INTERVAL_SECONDS = "hazelcast.master.confirmation.interval.seconds"; public static final String PROP_MEMBER_LIST_PUBLISH_INTERVAL_SECONDS = "hazelcast.member.list.publish.interval.seconds"; public static final String PROP_ICMP_ENABLED = "hazelcast.icmp.enabled"; public static final String PROP_ICMP_TIMEOUT = "hazelcast.icmp.timeout"; public static final String PROP_ICMP_TTL = "hazelcast.icmp.ttl"; public static final String PROP_INITIAL_MIN_CLUSTER_SIZE = "hazelcast.initial.min.cluster.size"; public static final String PROP_INITIAL_WAIT_SECONDS = "hazelcast.initial.wait.seconds"; public static final String PROP_MAP_REPLICA_WAIT_SECONDS_FOR_SCHEDULED_OPERATIONS = "hazelcast.map.replica.wait.seconds.for.scheduled.tasks"; public static final String PROP_PARTITION_COUNT = "hazelcast.partition.count"; public static final String PROP_LOGGING_TYPE = "hazelcast.logging.type"; public static final String PROP_ENABLE_JMX = "hazelcast.jmx"; public static final String PROP_ENABLE_JMX_DETAILED = "hazelcast.jmx.detailed"; public static final String PROP_MC_MAX_VISIBLE_INSTANCE_COUNT = "hazelcast.mc.max.visible.instance.count"; public static final String PROP_MC_URL_CHANGE_ENABLED = "hazelcast.mc.url.change.enabled"; public static final String PROP_CONNECTION_MONITOR_INTERVAL = "hazelcast.connection.monitor.interval"; public static final String PROP_CONNECTION_MONITOR_MAX_FAULTS = "hazelcast.connection.monitor.max.faults"; public static final String PROP_PARTITION_MIGRATION_INTERVAL = "hazelcast.partition.migration.interval"; public static final String PROP_PARTITION_MIGRATION_TIMEOUT = "hazelcast.partition.migration.timeout"; public static final String PROP_PARTITION_MIGRATION_ZIP_ENABLED = "hazelcast.partition.migration.zip.enabled"; public static final String PROP_PARTITION_TABLE_SEND_INTERVAL = "hazelcast.partition.table.send.interval"; public static final String PROP_PARTITION_BACKUP_SYNC_INTERVAL = "hazelcast.partition.backup.sync.interval"; public static final String PROP_PARTITIONING_STRATEGY_CLASS = "hazelcast.partitioning.strategy.class"; public static final String PROP_GRACEFUL_SHUTDOWN_MAX_WAIT = "hazelcast.graceful.shutdown.max.wait"; public static final String PROP_SYSTEM_LOG_ENABLED = "hazelcast.system.log.enabled"; public static final String PROP_ELASTIC_MEMORY_ENABLED = "hazelcast.elastic.memory.enabled"; public static final String PROP_ELASTIC_MEMORY_TOTAL_SIZE = "hazelcast.elastic.memory.total.size"; public static final String PROP_ELASTIC_MEMORY_CHUNK_SIZE = "hazelcast.elastic.memory.chunk.size"; public static final String PROP_ELASTIC_MEMORY_SHARED_STORAGE = "hazelcast.elastic.memory.shared.storage"; public static final String PROP_ELASTIC_MEMORY_UNSAFE_ENABLED = "hazelcast.elastic.memory.unsafe.enabled"; public static final String PROP_ENTERPRISE_LICENSE_KEY = "hazelcast.enterprise.license.key"; /** * This property will only be used temporary until we have exposed the hosted management center to the public. * So it will be disabled by default. */ public final GroupProperty HOSTED_MANAGEMENT_ENABLED; public final GroupProperty HOSTED_MANAGEMENT_URL; public final GroupProperty PARTITION_OPERATION_THREAD_COUNT; public final GroupProperty GENERIC_OPERATION_THREAD_COUNT; public final GroupProperty EVENT_THREAD_COUNT; public final GroupProperty HEALTH_MONITORING_LEVEL; public final GroupProperty HEALTH_MONITORING_DELAY_SECONDS; public final GroupProperty IO_THREAD_COUNT; public final GroupProperty EVENT_QUEUE_CAPACITY; public final GroupProperty EVENT_QUEUE_TIMEOUT_MILLIS; public final GroupProperty PREFER_IPv4_STACK; public final GroupProperty CONNECT_ALL_WAIT_SECONDS; public final GroupProperty VERSION_CHECK_ENABLED; public final GroupProperty MEMCACHE_ENABLED; public final GroupProperty REST_ENABLED; public final GroupProperty MAP_LOAD_CHUNK_SIZE; public final GroupProperty MERGE_FIRST_RUN_DELAY_SECONDS; public final GroupProperty MERGE_NEXT_RUN_DELAY_SECONDS; public final GroupProperty OPERATION_CALL_TIMEOUT_MILLIS; public final GroupProperty SOCKET_SERVER_BIND_ANY; public final GroupProperty SOCKET_CLIENT_BIND_ANY; public final GroupProperty SOCKET_CLIENT_BIND; // number of kilobytes public final GroupProperty SOCKET_RECEIVE_BUFFER_SIZE; // number of kilobytes public final GroupProperty SOCKET_SEND_BUFFER_SIZE; public final GroupProperty SOCKET_LINGER_SECONDS; public final GroupProperty SOCKET_KEEP_ALIVE; public final GroupProperty SOCKET_NO_DELAY; public final GroupProperty SHUTDOWNHOOK_ENABLED; public final GroupProperty WAIT_SECONDS_BEFORE_JOIN; public final GroupProperty MAX_WAIT_SECONDS_BEFORE_JOIN; public final GroupProperty MAX_JOIN_SECONDS; public final GroupProperty MAX_JOIN_MERGE_TARGET_SECONDS; public final GroupProperty MAX_NO_HEARTBEAT_SECONDS; public final GroupProperty HEARTBEAT_INTERVAL_SECONDS; public final GroupProperty MASTER_CONFIRMATION_INTERVAL_SECONDS; public final GroupProperty MAX_NO_MASTER_CONFIRMATION_SECONDS; public final GroupProperty MEMBER_LIST_PUBLISH_INTERVAL_SECONDS; public final GroupProperty ICMP_ENABLED; public final GroupProperty ICMP_TIMEOUT; public final GroupProperty ICMP_TTL; public final GroupProperty INITIAL_WAIT_SECONDS; public final GroupProperty INITIAL_MIN_CLUSTER_SIZE; public final GroupProperty MAP_REPLICA_WAIT_SECONDS_FOR_SCHEDULED_TASKS; public final GroupProperty PARTITION_COUNT; public final GroupProperty LOGGING_TYPE; public final GroupProperty ENABLE_JMX; public final GroupProperty ENABLE_JMX_DETAILED; public final GroupProperty MC_MAX_INSTANCE_COUNT; public final GroupProperty MC_URL_CHANGE_ENABLED; public final GroupProperty CONNECTION_MONITOR_INTERVAL; public final GroupProperty CONNECTION_MONITOR_MAX_FAULTS; public final GroupProperty PARTITION_MIGRATION_INTERVAL; public final GroupProperty PARTITION_MIGRATION_TIMEOUT; public final GroupProperty PARTITION_MIGRATION_ZIP_ENABLED; public final GroupProperty PARTITION_TABLE_SEND_INTERVAL; public final GroupProperty PARTITION_BACKUP_SYNC_INTERVAL; public final GroupProperty PARTITIONING_STRATEGY_CLASS; public final GroupProperty GRACEFUL_SHUTDOWN_MAX_WAIT; public final GroupProperty SYSTEM_LOG_ENABLED; public final GroupProperty ELASTIC_MEMORY_ENABLED; public final GroupProperty ELASTIC_MEMORY_TOTAL_SIZE; public final GroupProperty ELASTIC_MEMORY_CHUNK_SIZE; public final GroupProperty ELASTIC_MEMORY_SHARED_STORAGE; public final GroupProperty ELASTIC_MEMORY_UNSAFE_ENABLED; public final GroupProperty ENTERPRISE_LICENSE_KEY; /** * * @param config */ public GroupProperties(Config config) { HOSTED_MANAGEMENT_ENABLED = new GroupProperty(config, PROP_HOSTED_MANAGEMENT_ENABLED, "false"); //todo: we need to pull out the version. HOSTED_MANAGEMENT_URL = new GroupProperty(config, PROP_HOSTED_MANAGEMENT_URL, "http://manage.hazelcast.com/3.2"); HEALTH_MONITORING_LEVEL = new GroupProperty(config, PROP_HEALTH_MONITORING_LEVEL, HealthMonitorLevel.SILENT.toString()); HEALTH_MONITORING_DELAY_SECONDS = new GroupProperty(config, PROP_HEALTH_MONITORING_DELAY_SECONDS, "30"); VERSION_CHECK_ENABLED = new GroupProperty(config, PROP_VERSION_CHECK_ENABLED, "true"); PREFER_IPv4_STACK = new GroupProperty(config, PROP_PREFER_IPv4_STACK, "true"); IO_THREAD_COUNT = new GroupProperty(config, PROP_IO_THREAD_COUNT, "3"); //-1 means that the value is worked out dynamically. PARTITION_OPERATION_THREAD_COUNT = new GroupProperty(config, PROP_PARTITION_OPERATION_THREAD_COUNT, "-1"); GENERIC_OPERATION_THREAD_COUNT = new GroupProperty(config, PROP_GENERIC_OPERATION_THREAD_COUNT, "-1"); EVENT_THREAD_COUNT = new GroupProperty(config, PROP_EVENT_THREAD_COUNT, "5"); EVENT_QUEUE_CAPACITY = new GroupProperty(config, PROP_EVENT_QUEUE_CAPACITY, "1000000"); EVENT_QUEUE_TIMEOUT_MILLIS = new GroupProperty(config, PROP_EVENT_QUEUE_TIMEOUT_MILLIS, "250"); CONNECT_ALL_WAIT_SECONDS = new GroupProperty(config, PROP_CONNECT_ALL_WAIT_SECONDS, "120"); MEMCACHE_ENABLED = new GroupProperty(config, PROP_MEMCACHE_ENABLED, "true"); REST_ENABLED = new GroupProperty(config, PROP_REST_ENABLED, "true"); MAP_LOAD_CHUNK_SIZE = new GroupProperty(config, PROP_MAP_LOAD_CHUNK_SIZE, "1000"); MERGE_FIRST_RUN_DELAY_SECONDS = new GroupProperty(config, PROP_MERGE_FIRST_RUN_DELAY_SECONDS, "300"); MERGE_NEXT_RUN_DELAY_SECONDS = new GroupProperty(config, PROP_MERGE_NEXT_RUN_DELAY_SECONDS, "120"); OPERATION_CALL_TIMEOUT_MILLIS = new GroupProperty(config, PROP_OPERATION_CALL_TIMEOUT_MILLIS, "60000"); final GroupProperty SOCKET_BIND_ANY = new GroupProperty(config, PROP_SOCKET_BIND_ANY, "true"); SOCKET_SERVER_BIND_ANY = new GroupProperty(config, PROP_SOCKET_SERVER_BIND_ANY, SOCKET_BIND_ANY); SOCKET_CLIENT_BIND_ANY = new GroupProperty(config, PROP_SOCKET_CLIENT_BIND_ANY, SOCKET_BIND_ANY); SOCKET_CLIENT_BIND = new GroupProperty(config, PROP_SOCKET_CLIENT_BIND, "true"); SOCKET_RECEIVE_BUFFER_SIZE = new GroupProperty(config, PROP_SOCKET_RECEIVE_BUFFER_SIZE, "32"); SOCKET_SEND_BUFFER_SIZE = new GroupProperty(config, PROP_SOCKET_SEND_BUFFER_SIZE, "32"); SOCKET_LINGER_SECONDS = new GroupProperty(config, PROP_SOCKET_LINGER_SECONDS, "0"); SOCKET_KEEP_ALIVE = new GroupProperty(config, PROP_SOCKET_KEEP_ALIVE, "true"); SOCKET_NO_DELAY = new GroupProperty(config, PROP_SOCKET_NO_DELAY, "true"); SHUTDOWNHOOK_ENABLED = new GroupProperty(config, PROP_SHUTDOWNHOOK_ENABLED, "true"); WAIT_SECONDS_BEFORE_JOIN = new GroupProperty(config, PROP_WAIT_SECONDS_BEFORE_JOIN, "5"); MAX_WAIT_SECONDS_BEFORE_JOIN = new GroupProperty(config, PROP_MAX_WAIT_SECONDS_BEFORE_JOIN, "20"); MAX_JOIN_SECONDS = new GroupProperty(config, PROP_MAX_JOIN_SECONDS, "300"); MAX_JOIN_MERGE_TARGET_SECONDS = new GroupProperty(config, PROP_MAX_JOIN_MERGE_TARGET_SECONDS, "20"); HEARTBEAT_INTERVAL_SECONDS = new GroupProperty(config, PROP_HEARTBEAT_INTERVAL_SECONDS, "1"); MAX_NO_HEARTBEAT_SECONDS = new GroupProperty(config, PROP_MAX_NO_HEARTBEAT_SECONDS, "300"); MASTER_CONFIRMATION_INTERVAL_SECONDS = new GroupProperty(config, PROP_MASTER_CONFIRMATION_INTERVAL_SECONDS, "30"); MAX_NO_MASTER_CONFIRMATION_SECONDS = new GroupProperty(config, PROP_MAX_NO_MASTER_CONFIRMATION_SECONDS, "300"); MEMBER_LIST_PUBLISH_INTERVAL_SECONDS = new GroupProperty(config, PROP_MEMBER_LIST_PUBLISH_INTERVAL_SECONDS, "300"); ICMP_ENABLED = new GroupProperty(config, PROP_ICMP_ENABLED, "false"); ICMP_TIMEOUT = new GroupProperty(config, PROP_ICMP_TIMEOUT, "1000"); ICMP_TTL = new GroupProperty(config, PROP_ICMP_TTL, "0"); INITIAL_MIN_CLUSTER_SIZE = new GroupProperty(config, PROP_INITIAL_MIN_CLUSTER_SIZE, "0"); INITIAL_WAIT_SECONDS = new GroupProperty(config, PROP_INITIAL_WAIT_SECONDS, "0"); MAP_REPLICA_WAIT_SECONDS_FOR_SCHEDULED_TASKS = new GroupProperty(config, PROP_MAP_REPLICA_WAIT_SECONDS_FOR_SCHEDULED_OPERATIONS, "10"); PARTITION_COUNT = new GroupProperty(config, PROP_PARTITION_COUNT, "271"); LOGGING_TYPE = new GroupProperty(config, PROP_LOGGING_TYPE, "jdk"); ENABLE_JMX = new GroupProperty(config, PROP_ENABLE_JMX, "false"); ENABLE_JMX_DETAILED = new GroupProperty(config, PROP_ENABLE_JMX_DETAILED, "false"); MC_MAX_INSTANCE_COUNT = new GroupProperty(config, PROP_MC_MAX_VISIBLE_INSTANCE_COUNT, "100"); MC_URL_CHANGE_ENABLED = new GroupProperty(config, PROP_MC_URL_CHANGE_ENABLED, "true"); CONNECTION_MONITOR_INTERVAL = new GroupProperty(config, PROP_CONNECTION_MONITOR_INTERVAL, "100"); CONNECTION_MONITOR_MAX_FAULTS = new GroupProperty(config, PROP_CONNECTION_MONITOR_MAX_FAULTS, "3"); PARTITION_MIGRATION_INTERVAL = new GroupProperty(config, PROP_PARTITION_MIGRATION_INTERVAL, "0"); PARTITION_MIGRATION_TIMEOUT = new GroupProperty(config, PROP_PARTITION_MIGRATION_TIMEOUT, "300"); PARTITION_MIGRATION_ZIP_ENABLED = new GroupProperty(config, PROP_PARTITION_MIGRATION_ZIP_ENABLED, "true"); PARTITION_TABLE_SEND_INTERVAL = new GroupProperty(config, PROP_PARTITION_TABLE_SEND_INTERVAL, "15"); PARTITION_BACKUP_SYNC_INTERVAL = new GroupProperty(config, PROP_PARTITION_BACKUP_SYNC_INTERVAL, "30"); PARTITIONING_STRATEGY_CLASS = new GroupProperty(config, PROP_PARTITIONING_STRATEGY_CLASS, ""); GRACEFUL_SHUTDOWN_MAX_WAIT = new GroupProperty(config, PROP_GRACEFUL_SHUTDOWN_MAX_WAIT, "600"); SYSTEM_LOG_ENABLED = new GroupProperty(config, PROP_SYSTEM_LOG_ENABLED, "true"); ELASTIC_MEMORY_ENABLED = new GroupProperty(config, PROP_ELASTIC_MEMORY_ENABLED, "false"); ELASTIC_MEMORY_TOTAL_SIZE = new GroupProperty(config, PROP_ELASTIC_MEMORY_TOTAL_SIZE, "128M"); ELASTIC_MEMORY_CHUNK_SIZE = new GroupProperty(config, PROP_ELASTIC_MEMORY_CHUNK_SIZE, "1K"); ELASTIC_MEMORY_SHARED_STORAGE = new GroupProperty(config, PROP_ELASTIC_MEMORY_SHARED_STORAGE, "false"); ELASTIC_MEMORY_UNSAFE_ENABLED = new GroupProperty(config, PROP_ELASTIC_MEMORY_UNSAFE_ENABLED, "false"); ENTERPRISE_LICENSE_KEY = new GroupProperty(config, PROP_ENTERPRISE_LICENSE_KEY); } public static class GroupProperty { private final String name; private final String value; GroupProperty(Config config, String name) { this(config, name, (String) null); } GroupProperty(Config config, String name, GroupProperty defaultValue) { this(config, name, defaultValue != null ? defaultValue.getString() : null); } GroupProperty(Config config, String name, String defaultValue) { this.name = name; String configValue = (config != null) ? config.getProperty(name) : null; if (configValue != null) { value = configValue; } else if (System.getProperty(name) != null) { value = System.getProperty(name); } else { value = defaultValue; } } public String getName() { return this.name; } public String getValue() { return value; } public int getInteger() { return Integer.parseInt(this.value); } public byte getByte() { return Byte.parseByte(this.value); } public boolean getBoolean() { return Boolean.valueOf(this.value); } public String getString() { return value; } public long getLong() { return Long.parseLong(this.value); } @Override public String toString() { return "GroupProperty [name=" + this.name + ", value=" + this.value + "]"; } } }
1no label
hazelcast_src_main_java_com_hazelcast_instance_GroupProperties.java
1,271
@Deprecated public class ShippingServiceType implements Serializable, BroadleafEnumerationType { private static final long serialVersionUID = 1L; private static final Map<String, ShippingServiceType> TYPES = new LinkedHashMap<String, ShippingServiceType>(); public static final ShippingServiceType BANDED_SHIPPING = new ShippingServiceType("BANDED_SHIPPING", "Banded Shipping"); public static final ShippingServiceType USPS = new ShippingServiceType("USPS", "United States Postal Service"); public static final ShippingServiceType FED_EX = new ShippingServiceType("FED_EX", "Federal Express"); public static final ShippingServiceType UPS = new ShippingServiceType("UPS", "United Parcel Service"); public static final ShippingServiceType DHL = new ShippingServiceType("DHL", "DHL"); public static ShippingServiceType getInstance(final String type) { return TYPES.get(type); } private String type; private String friendlyType; public ShippingServiceType() { //do nothing } public ShippingServiceType(final String type, final String friendlyType) { this.friendlyType = friendlyType; setType(type); } @Override public String getType() { return type; } @Override public String getFriendlyType() { return friendlyType; } private void setType(final String type) { this.type = type; if (!TYPES.containsKey(type)) { TYPES.put(type, this); } } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((type == null) ? 0 : type.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; ShippingServiceType other = (ShippingServiceType) obj; if (type == null) { if (other.type != null) return false; } else if (!type.equals(other.type)) return false; return true; } }
1no label
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_pricing_service_workflow_type_ShippingServiceType.java
558
typeIntersection = Collections2.filter(indexService.mapperService().types(), new Predicate<String>() { @Override public boolean apply(String type) { return Regex.simpleMatch(types, type); } });
0true
src_main_java_org_elasticsearch_action_admin_indices_mapping_get_TransportGetFieldMappingsAction.java
2,612
private class SendPingRequest implements Runnable { private final DiscoveryNode node; private SendPingRequest(DiscoveryNode node) { this.node = node; } @Override public void run() { if (!running) { return; } transportService.sendRequest(node, PingRequestHandler.ACTION, new PingRequest(node.id()), options().withType(TransportRequestOptions.Type.PING).withTimeout(pingRetryTimeout), new BaseTransportResponseHandler<PingResponse>() { @Override public PingResponse newInstance() { return new PingResponse(); } @Override public void handleResponse(PingResponse response) { if (!running) { return; } NodeFD nodeFD = nodesFD.get(node); if (nodeFD != null) { if (!nodeFD.running) { return; } nodeFD.retryCount = 0; threadPool.schedule(pingInterval, ThreadPool.Names.SAME, SendPingRequest.this); } } @Override public void handleException(TransportException exp) { // check if the master node did not get switched on us... if (!running) { return; } if (exp instanceof ConnectTransportException) { // ignore this one, we already handle it by registering a connection listener return; } NodeFD nodeFD = nodesFD.get(node); if (nodeFD != null) { if (!nodeFD.running) { return; } int retryCount = ++nodeFD.retryCount; logger.trace("[node ] failed to ping [{}], retry [{}] out of [{}]", exp, node, retryCount, pingRetryCount); if (retryCount >= pingRetryCount) { logger.debug("[node ] failed to ping [{}], tried [{}] times, each with maximum [{}] timeout", node, pingRetryCount, pingRetryTimeout); // not good, failure if (nodesFD.remove(node) != null) { notifyNodeFailure(node, "failed to ping, tried [" + pingRetryCount + "] times, each with maximum [" + pingRetryTimeout + "] timeout"); } } else { // resend the request, not reschedule, rely on send timeout transportService.sendRequest(node, PingRequestHandler.ACTION, new PingRequest(node.id()), options().withType(TransportRequestOptions.Type.PING).withTimeout(pingRetryTimeout), this); } } } @Override public String executor() { return ThreadPool.Names.SAME; } }); } }
1no label
src_main_java_org_elasticsearch_discovery_zen_fd_NodesFaultDetection.java
236
.registerHookValue(profilerPrefix + "current", "Number of entries in cache", METRIC_TYPE.SIZE, new OProfilerHookValue() { public Object getValue() { return getSize(); } }, profilerMetadataPrefix + "current");
0true
core_src_main_java_com_orientechnologies_orient_core_cache_OAbstractRecordCache.java
1,461
public class OGraphCommandExecutorSQLFactory implements OCommandExecutorSQLFactory { private static final Map<String, Class<? extends OCommandExecutorSQLAbstract>> COMMANDS; static { // COMMANDS final Map<String, Class<? extends OCommandExecutorSQLAbstract>> commands = new HashMap<String, Class<? extends OCommandExecutorSQLAbstract>>(); commands.put(OCommandExecutorSQLCreateEdge.NAME, OCommandExecutorSQLCreateEdge.class); commands.put(OCommandExecutorSQLDeleteEdge.NAME, OCommandExecutorSQLDeleteEdge.class); commands.put(OCommandExecutorSQLCreateVertex.NAME, OCommandExecutorSQLCreateVertex.class); commands.put(OCommandExecutorSQLDeleteVertex.NAME, OCommandExecutorSQLDeleteVertex.class); COMMANDS = Collections.unmodifiableMap(commands); } /** * {@inheritDoc} */ public Set<String> getCommandNames() { return COMMANDS.keySet(); } /** * {@inheritDoc} */ public OCommandExecutorSQLAbstract createCommand(final String name) throws OCommandExecutionException { final Class<? extends OCommandExecutorSQLAbstract> clazz = COMMANDS.get(name); if (clazz == null) { throw new OCommandExecutionException("Unknowned command name :" + name); } try { return clazz.newInstance(); } catch (Exception e) { throw new OCommandExecutionException("Error in creation of command " + name + "(). Probably there is not an empty constructor or the constructor generates errors", e); } } /** * Returns a OrientBaseGraph implementation from the current database in thread local. * * @return */ public static OrientBaseGraph getGraph() { ODatabaseRecord database = ODatabaseRecordThreadLocal.INSTANCE.get(); if (!(database instanceof ODatabaseDocumentTx)) database = new ODatabaseDocumentTx((ODatabaseRecordTx) database); return new OrientGraphNoTx((ODatabaseDocumentTx) database); } }
1no label
graphdb_src_main_java_com_orientechnologies_orient_graph_sql_OGraphCommandExecutorSQLFactory.java
1,512
public class Node { private final ILogger logger; private final AtomicBoolean joined = new AtomicBoolean(false); private volatile boolean active; private volatile boolean completelyShutdown; private final Set<Address> failedConnections = Collections.newSetFromMap(new ConcurrentHashMap<Address, Boolean>()); private final NodeShutdownHookThread shutdownHookThread = new NodeShutdownHookThread("hz.ShutdownThread"); private final SerializationServiceImpl serializationService; public final NodeEngineImpl nodeEngine; public final ClientEngineImpl clientEngine; public final InternalPartitionService partitionService; public final ClusterServiceImpl clusterService; public final MulticastService multicastService; public final ConnectionManager connectionManager; public final TextCommandServiceImpl textCommandService; public final Config config; public final GroupProperties groupProperties; public final Address address; public final MemberImpl localMember; private volatile Address masterAddress = null; public final HazelcastInstanceImpl hazelcastInstance; public final LoggingServiceImpl loggingService; private final SystemLogService systemLogService; private final Joiner joiner; public final NodeInitializer initializer; private ManagementCenterService managementCenterService; public final SecurityContext securityContext; public final ThreadGroup threadGroup; private final ClassLoader configClassLoader; private final BuildInfo buildInfo; public Node(HazelcastInstanceImpl hazelcastInstance, Config config, NodeContext nodeContext) { this.hazelcastInstance = hazelcastInstance; this.threadGroup = hazelcastInstance.threadGroup; this.config = config; configClassLoader = config.getClassLoader(); this.groupProperties = new GroupProperties(config); SerializationService ss; try { String partitioningStrategyClassName = groupProperties.PARTITIONING_STRATEGY_CLASS.getString(); final PartitioningStrategy partitioningStrategy; if (partitioningStrategyClassName != null && partitioningStrategyClassName.length() > 0) { partitioningStrategy = ClassLoaderUtil.newInstance(configClassLoader, partitioningStrategyClassName); } else { partitioningStrategy = new DefaultPartitioningStrategy(); } ss = new SerializationServiceBuilder() .setClassLoader(configClassLoader) .setConfig(config.getSerializationConfig() != null ? config.getSerializationConfig() : new SerializationConfig()) .setManagedContext(hazelcastInstance.managedContext) .setPartitioningStrategy(partitioningStrategy) .setHazelcastInstance(hazelcastInstance) .build(); } catch (Exception e) { throw ExceptionUtil.rethrow(e); } buildInfo = BuildInfoProvider.getBuildInfo(); serializationService = (SerializationServiceImpl) ss; systemLogService = new SystemLogService(groupProperties.SYSTEM_LOG_ENABLED.getBoolean()); String loggingType = groupProperties.LOGGING_TYPE.getString(); loggingService = new LoggingServiceImpl(systemLogService, config.getGroupConfig().getName(), loggingType, buildInfo); final AddressPicker addressPicker = nodeContext.createAddressPicker(this); try { addressPicker.pickAddress(); } catch (Throwable e) { throw ExceptionUtil.rethrow(e); } final ServerSocketChannel serverSocketChannel = addressPicker.getServerSocketChannel(); address = addressPicker.getPublicAddress(); final Map<String, Object> memberAttributes = findMemberAttributes(config.getMemberAttributeConfig().asReadOnly()); localMember = new MemberImpl(address, true, UuidUtil.createMemberUuid(address), hazelcastInstance, memberAttributes); loggingService.setThisMember(localMember); logger = loggingService.getLogger(Node.class.getName()); initializer = NodeInitializerFactory.create(configClassLoader); try { initializer.beforeInitialize(this); } catch (Throwable e) { try { serverSocketChannel.close(); } catch (Throwable ignored) { } throw ExceptionUtil.rethrow(e); } securityContext = config.getSecurityConfig().isEnabled() ? initializer.getSecurityContext() : null; nodeEngine = new NodeEngineImpl(this); clientEngine = new ClientEngineImpl(this); connectionManager = nodeContext.createConnectionManager(this, serverSocketChannel); partitionService = new InternalPartitionServiceImpl(this); clusterService = new ClusterServiceImpl(this); textCommandService = new TextCommandServiceImpl(this); initializer.printNodeInfo(this); VersionCheck.check(this, getBuildInfo().getBuild(), getBuildInfo().getVersion()); JoinConfig join = config.getNetworkConfig().getJoin(); MulticastService mcService = null; try { if (join.getMulticastConfig().isEnabled()) { MulticastConfig multicastConfig = join.getMulticastConfig(); MulticastSocket multicastSocket = new MulticastSocket(null); multicastSocket.setReuseAddress(true); // bind to receive interface multicastSocket.bind(new InetSocketAddress(multicastConfig.getMulticastPort())); multicastSocket.setTimeToLive(multicastConfig.getMulticastTimeToLive()); try { // set the send interface final Address bindAddress = addressPicker.getBindAddress(); // http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=4417033 // http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6402758 if (!bindAddress.getInetAddress().isLoopbackAddress()) { multicastSocket.setInterface(bindAddress.getInetAddress()); } } catch (Exception e) { logger.warning(e); } multicastSocket.setReceiveBufferSize(64 * 1024); multicastSocket.setSendBufferSize(64 * 1024); String multicastGroup = System.getProperty("hazelcast.multicast.group"); if (multicastGroup == null) { multicastGroup = multicastConfig.getMulticastGroup(); } multicastConfig.setMulticastGroup(multicastGroup); multicastSocket.joinGroup(InetAddress.getByName(multicastGroup)); multicastSocket.setSoTimeout(1000); mcService = new MulticastService(this, multicastSocket); mcService.addMulticastListener(new NodeMulticastListener(this)); } } catch (Exception e) { logger.severe(e); } this.multicastService = mcService; initializeListeners(config); joiner = nodeContext.createJoiner(this); } private void initializeListeners(Config config) { for (final ListenerConfig listenerCfg : config.getListenerConfigs()) { Object listener = listenerCfg.getImplementation(); if (listener == null) { try { listener = ClassLoaderUtil.newInstance(configClassLoader, listenerCfg.getClassName()); } catch (Exception e) { logger.severe(e); } } if (listener instanceof HazelcastInstanceAware) { ((HazelcastInstanceAware) listener).setHazelcastInstance(hazelcastInstance); } boolean known = false; if (listener instanceof DistributedObjectListener) { final ProxyServiceImpl proxyService = (ProxyServiceImpl) nodeEngine.getProxyService(); proxyService.addProxyListener((DistributedObjectListener) listener); known = true; } if (listener instanceof MembershipListener) { clusterService.addMembershipListener((MembershipListener) listener); known = true; } if (listener instanceof MigrationListener) { partitionService.addMigrationListener((MigrationListener) listener); known = true; } if (listener instanceof LifecycleListener) { hazelcastInstance.lifecycleService.addLifecycleListener((LifecycleListener) listener); known = true; } if (listener != null && !known) { final String error = "Unknown listener type: " + listener.getClass(); Throwable t = new IllegalArgumentException(error); logger.warning(error, t); } } } public ManagementCenterService getManagementCenterService() { return managementCenterService; } public SystemLogService getSystemLogService() { return systemLogService; } public void failedConnection(Address address) { if (logger.isFinestEnabled()) { logger.finest(getThisAddress() + " failed connecting to " + address); } failedConnections.add(address); } public SerializationService getSerializationService() { return serializationService; } public ClusterServiceImpl getClusterService() { return clusterService; } public InternalPartitionService getPartitionService() { return partitionService; } public Address getMasterAddress() { return masterAddress; } public Address getThisAddress() { return address; } public MemberImpl getLocalMember() { return localMember; } public String getName() { return hazelcastInstance.getName(); } public String getThreadNamePrefix(String name) { return "hz." + getName() + "." + name; } public String getThreadPoolNamePrefix(String poolName) { return getThreadNamePrefix(poolName) + ".thread-"; } public boolean joined() { return joined.get(); } public boolean isMaster() { return address != null && address.equals(masterAddress); } public void setMasterAddress(final Address master) { if (master != null) { if (logger.isFinestEnabled()) { logger.finest("** setting master address to " + master); } } masterAddress = master; } public void start() { if (logger.isFinestEnabled()) { logger.finest("We are asked to start and completelyShutdown is " + String.valueOf(completelyShutdown)); } if (completelyShutdown) return; nodeEngine.start(); connectionManager.start(); if (config.getNetworkConfig().getJoin().getMulticastConfig().isEnabled()) { final Thread multicastServiceThread = new Thread(hazelcastInstance.threadGroup, multicastService, getThreadNamePrefix("MulticastThread")); multicastServiceThread.start(); } setActive(true); if (!completelyShutdown) { logger.finest("Adding ShutdownHook"); Runtime.getRuntime().addShutdownHook(shutdownHookThread); } logger.finest("finished starting threads, calling join"); join(); int clusterSize = clusterService.getSize(); if (config.getNetworkConfig().isPortAutoIncrement() && address.getPort() >= config.getNetworkConfig().getPort() + clusterSize) { StringBuilder sb = new StringBuilder("Config seed port is "); sb.append(config.getNetworkConfig().getPort()); sb.append(" and cluster size is "); sb.append(clusterSize); sb.append(". Some of the ports seem occupied!"); logger.warning(sb.toString()); } try { managementCenterService = new ManagementCenterService(hazelcastInstance); } catch (Exception e) { logger.warning("ManagementCenterService could not be constructed!", e); } initializer.afterInitialize(this); } public void shutdown(final boolean terminate) { long start = Clock.currentTimeMillis(); if (logger.isFinestEnabled()) { logger.finest("** we are being asked to shutdown when active = " + String.valueOf(active)); } if (!terminate && isActive()) { final int maxWaitSeconds = groupProperties.GRACEFUL_SHUTDOWN_MAX_WAIT.getInteger(); if (!partitionService.prepareToSafeShutdown(maxWaitSeconds, TimeUnit.SECONDS)) { logger.warning("Graceful shutdown could not be completed in " + maxWaitSeconds + " seconds!"); } } if (isActive()) { if (!terminate) { clusterService.sendShutdownMessage(); } // set the joined=false first so that // threads do not process unnecessary // events, such as remove address joined.set(false); setActive(false); setMasterAddress(null); try { Runtime.getRuntime().removeShutdownHook(shutdownHookThread); } catch (Throwable ignored) { } if (managementCenterService != null) { managementCenterService.shutdown(); } logger.finest("Shutting down node engine"); nodeEngine.shutdown(terminate); if (multicastService != null) { logger.finest("Shutting down multicast service"); multicastService.stop(); } logger.finest("Shutting down connection manager"); connectionManager.shutdown(); textCommandService.stop(); masterAddress = null; if (securityContext != null) { securityContext.destroy(); } initializer.destroy(); serializationService.destroy(); int numThreads = threadGroup.activeCount(); Thread[] threads = new Thread[numThreads * 2]; numThreads = threadGroup.enumerate(threads, false); for (int i = 0; i < numThreads; i++) { Thread thread = threads[i]; if (thread.isAlive()) { if (logger.isFinestEnabled()) { logger.finest("Shutting down thread " + thread.getName()); } thread.interrupt(); } } failedConnections.clear(); systemLogService.shutdown(); logger.info("Hazelcast Shutdown is completed in " + (Clock.currentTimeMillis() - start) + " ms."); } } public void onRestart() { joined.set(false); joiner.reset(); final String uuid = UuidUtil.createMemberUuid(address); if (logger.isFinestEnabled()) { logger.finest("Generated new UUID for local member: " + uuid); } localMember.setUuid(uuid); } public ILogger getLogger(String name) { return loggingService.getLogger(name); } public ILogger getLogger(Class clazz) { return loggingService.getLogger(clazz); } public GroupProperties getGroupProperties() { return groupProperties; } public TextCommandService getTextCommandService() { return textCommandService; } public ConnectionManager getConnectionManager() { return connectionManager; } public void inactivate() { joined.set(false); setActive(false); } public Set<Address> getFailedConnections() { return failedConnections; } public ClassLoader getConfigClassLoader() { return configClassLoader; } public class NodeShutdownHookThread extends Thread { NodeShutdownHookThread(String name) { super(name); } @Override public void run() { try { if (isActive() && !completelyShutdown) { completelyShutdown = true; if (groupProperties.SHUTDOWNHOOK_ENABLED.getBoolean()) { shutdown(true); } } else { logger.finest("shutdown hook - we are not --> active and not completely down so we are not calling shutdown"); } } catch (Exception e) { logger.warning(e); } } } public void setJoined() { joined.set(true); systemLogService.logJoin("setJoined() master: " + masterAddress); } public JoinRequest createJoinRequest() { return createJoinRequest(false); } public JoinRequest createJoinRequest(boolean withCredentials) { final Credentials credentials = (withCredentials && securityContext != null) ? securityContext.getCredentialsFactory().newCredentials() : null; return new JoinRequest(Packet.VERSION, buildInfo.getBuildNumber(), address, localMember.getUuid(), createConfigCheck(), credentials, clusterService.getSize(), 0, config.getMemberAttributeConfig().getAttributes()); } public ConfigCheck createConfigCheck() { final ConfigCheck configCheck = new ConfigCheck(); final GroupConfig groupConfig = config.getGroupConfig(); final PartitionGroupConfig partitionGroupConfig = config.getPartitionGroupConfig(); final boolean partitionGroupEnabled = partitionGroupConfig != null && partitionGroupConfig.isEnabled(); PartitionGroupConfig.MemberGroupType memberGroupType = partitionGroupEnabled ? partitionGroupConfig.getGroupType() : PartitionGroupConfig.MemberGroupType.PER_MEMBER; configCheck.setGroupName(groupConfig.getName()).setGroupPassword(groupConfig.getPassword()) .setJoinerType(joiner != null ? joiner.getType() : "") .setPartitionGroupEnabled(partitionGroupEnabled) .setMemberGroupType(memberGroupType); return configCheck; } public void rejoin() { prepareForRejoin(); join(); } private void prepareForRejoin() { systemLogService.logJoin("Rejoining!"); masterAddress = null; joined.set(false); clusterService.reset(); failedConnections.clear(); } public void join() { if (joiner == null) { logger.warning("No join method is enabled! Starting standalone."); setAsMaster(); return; } final long maxJoinMillis = getGroupProperties().MAX_JOIN_SECONDS.getInteger() * 1000; //This method used to be recursive. The problem is that eventually you can get a stackoverflow if //there are enough retries. With an iterative approach you don't suffer from this problem. int rejoinCount = 0; for (; ; ) { final long joinStartTime = joiner.getStartTime(); try { joiner.join(joined); return; } catch (Exception e) { rejoinCount++; if (Clock.currentTimeMillis() - joinStartTime < maxJoinMillis) { logger.warning("Trying to rejoin for the " + rejoinCount + " time: " + e.getMessage()); prepareForRejoin(); } else { logger.severe("Could not join cluster after " + rejoinCount + " attempts, shutting down!", e); shutdown(true); return; } } } } public Joiner getJoiner() { return joiner; } Joiner createJoiner() { JoinConfig join = config.getNetworkConfig().getJoin(); if (join.getMulticastConfig().isEnabled() && multicastService != null) { logger.info("Creating MulticastJoiner"); systemLogService.logJoin("Creating MulticastJoiner"); return new MulticastJoiner(this); } else if (join.getTcpIpConfig().isEnabled()) { logger.info("Creating TcpIpJoiner"); systemLogService.logJoin("Creating TcpIpJoiner"); return new TcpIpJoiner(this); } else if (join.getAwsConfig().isEnabled()) { Class clazz; try { logger.info("Creating AWSJoiner"); clazz = Class.forName("com.hazelcast.cluster.TcpIpJoinerOverAWS"); Constructor constructor = clazz.getConstructor(Node.class); systemLogService.logJoin("Creating AWSJoiner"); return (Joiner) constructor.newInstance(this); } catch (Exception e) { logger.severe("Error while creating AWSJoiner!", e); } } return null; } public void setAsMaster() { logger.finest("This node is being set as the master"); systemLogService.logJoin("No master node found! Setting this node as the master."); masterAddress = address; setJoined(); } public Config getConfig() { return config; } /** * @param active the active to set */ public void setActive(boolean active) { this.active = active; } /** * @return the active */ public boolean isActive() { return active; } public String toString() { return "Node[" + getName() + "]"; } public BuildInfo getBuildInfo() { return buildInfo; } private Map<String, Object> findMemberAttributes(MemberAttributeConfig attributeConfig) { Map<String, Object> attributes = new HashMap<String, Object>(attributeConfig.getAttributes()); Properties properties = System.getProperties(); for (String key : properties.stringPropertyNames()) { if (key.startsWith("hazelcast.member.attribute.")) { String shortKey = key.substring("hazelcast.member.attribute.".length()); String value = properties.getProperty(key); attributes.put(shortKey, value); } } return attributes; } }
1no label
hazelcast_src_main_java_com_hazelcast_instance_Node.java
1,054
public class JoinConfig { private MulticastConfig multicastConfig = new MulticastConfig(); private TcpIpConfig tcpIpConfig = new TcpIpConfig(); private AwsConfig awsConfig = new AwsConfig(); /** * @return the multicastConfig */ public MulticastConfig getMulticastConfig() { return multicastConfig; } /** * @param multicastConfig the multicastConfig to set * @throws IllegalArgumentException if multicastConfig is null. */ public JoinConfig setMulticastConfig(final MulticastConfig multicastConfig) { this.multicastConfig = isNotNull(multicastConfig, "multicastConfig"); return this; } /** * @return the tcpIpConfig */ public TcpIpConfig getTcpIpConfig() { return tcpIpConfig; } /** * @param tcpIpConfig the tcpIpConfig to set * @throws IllegalArgumentException if tcpIpConfig is null. */ public JoinConfig setTcpIpConfig(final TcpIpConfig tcpIpConfig) { this.tcpIpConfig = isNotNull(tcpIpConfig,"tcpIpConfig"); return this; } /** * @return the awsConfig */ public AwsConfig getAwsConfig() { return awsConfig; } /** * @param awsConfig the AwsConfig to set * @throws IllegalArgumentException if awsConfig is null. */ public JoinConfig setAwsConfig(final AwsConfig awsConfig) { this.awsConfig = isNotNull(awsConfig,"awsConfig"); return this; } @Override public String toString() { final StringBuilder sb = new StringBuilder("JoinConfig{"); sb.append("multicastConfig=").append(multicastConfig); sb.append(", tcpIpConfig=").append(tcpIpConfig); sb.append(", awsConfig=").append(awsConfig); sb.append('}'); return sb.toString(); } }
1no label
hazelcast_src_main_java_com_hazelcast_config_JoinConfig.java
1,773
public class OMailPlugin extends OServerPluginAbstract implements OScriptInjection { private static final String CONFIG_PROFILE_PREFIX = "profile."; private static final String CONFIG_MAIL_PREFIX = "mail."; private Map<String, OMailProfile> profiles = new HashMap<String, OMailProfile>(); public OMailPlugin() { Orient.instance().getScriptManager().registerInjection(this); } @Override public void config(final OServer oServer, final OServerParameterConfiguration[] iParams) { for (OServerParameterConfiguration param : iParams) { if (param.name.equalsIgnoreCase("enabled")) { if (!Boolean.parseBoolean(param.value)) // DISABLE IT return; } else if (param.name.startsWith(CONFIG_PROFILE_PREFIX)) { final String parts = param.name.substring(CONFIG_PROFILE_PREFIX.length()); int pos = parts.indexOf('.'); if (pos == -1) continue; final String profileName = parts.substring(0, pos); final String profileParam = parts.substring(pos + 1); OMailProfile profile = profiles.get(profileName); if (profile == null) { profile = new OMailProfile(); profiles.put(profileName, profile); } if (profileParam.startsWith(CONFIG_MAIL_PREFIX)) { profile.put("mail." + profileParam.substring(CONFIG_MAIL_PREFIX.length()), param.value); } } } OLogManager.instance().info(this, "Mail plugin installed and active. Loaded %d profile(s): %s", profiles.size(), profiles.keySet()); } /** * Sends an email. Supports the following configuration: subject, message, to, cc, bcc, date, attachments * * @param iMessage * Configuration as Map<String,Object> * @throws AddressException * @throws MessagingException * @throws ParseException */ public void send(final Map<String, Object> iMessage) throws AddressException, MessagingException, ParseException { final String profileName = (String) iMessage.get("profile"); final OMailProfile profile = profiles.get(profileName); if (profile == null) throw new IllegalArgumentException("Mail profile '" + profileName + "' is not configured on server"); // creates a new session with an authenticator Authenticator auth = new OSMTPAuthenticator((String) profile.getProperty("mail.smtp.user"), (String) profile.getProperty("mail.smtp.password")); Session session = Session.getInstance(profile, auth); // creates a new e-mail message MimeMessage msg = new MimeMessage(session); msg.setFrom(new InternetAddress((String) iMessage.get("from"))); InternetAddress[] toAddresses = { new InternetAddress( (String) iMessage.get("to")) }; msg.setRecipients(Message.RecipientType.TO, toAddresses); String cc = (String) iMessage.get("cc"); if (cc != null && !cc.isEmpty()) { InternetAddress[] ccAddresses = { new InternetAddress(cc) }; msg.setRecipients(Message.RecipientType.CC, ccAddresses); } String bcc = (String) iMessage.get("bcc"); if (bcc != null && !bcc.isEmpty()) { InternetAddress[] bccAddresses = { new InternetAddress(bcc) }; msg.setRecipients(Message.RecipientType.BCC, bccAddresses); } msg.setSubject((String) iMessage.get("subject")); // DATE Object date = iMessage.get("date"); final Date sendDate; if (date == null) // NOT SPECIFIED = NOW sendDate = new Date(); else if (date instanceof Date) // PASSED sendDate = (Date) date; else { // FORMAT IT String dateFormat = (String) profile.getProperty("mail.date.format"); if (dateFormat == null) dateFormat = "yyyy-MM-dd HH:mm:ss"; sendDate = new SimpleDateFormat(dateFormat).parse(date.toString()); } msg.setSentDate(sendDate); // creates message part MimeBodyPart messageBodyPart = new MimeBodyPart(); messageBodyPart.setContent(iMessage.get("message"), "text/html"); // creates multi-part Multipart multipart = new MimeMultipart(); multipart.addBodyPart(messageBodyPart); final String[] attachments = (String[]) iMessage.get("attachments"); // adds attachments if (attachments != null && attachments.length > 0) { for (String filePath : attachments) { addAttachment(multipart, filePath); } } // sets the multi-part as e-mail's content msg.setContent(multipart); // sends the e-mail Transport.send(msg); } /** * Adds a file as an attachment to the email's content * * @param multipart * @param filePath * @throws MessagingException */ private void addAttachment(final Multipart multipart, final String filePath) throws MessagingException { MimeBodyPart attachPart = new MimeBodyPart(); DataSource source = new FileDataSource(filePath); attachPart.setDataHandler(new DataHandler(source)); attachPart.setFileName(new File(filePath).getName()); multipart.addBodyPart(attachPart); } @Override public void bind(Bindings binding) { binding.put("mail", this); } @Override public void unbind(Bindings binding) { binding.remove("mail"); } @Override public String getName() { return "mail"; } public Set<String> getProfileNames() { return profiles.keySet(); } public OMailProfile getProfile(final String iName) { return profiles.get(iName); } public OMailPlugin registerProfile(final String iName, final OMailProfile iProfile) { profiles.put(iName, iProfile); return this; } }
1no label
server_src_main_java_com_orientechnologies_orient_server_plugin_mail_OMailPlugin.java
3,608
final class TransactionContextImpl implements TransactionContext { private final NodeEngineImpl nodeEngine; private final TransactionImpl transaction; private final TransactionManagerServiceImpl transactionManager; private final Map<TransactionalObjectKey, TransactionalObject> txnObjectMap = new HashMap<TransactionalObjectKey, TransactionalObject>(2); private XAResourceImpl xaResource; TransactionContextImpl(TransactionManagerServiceImpl transactionManagerService, NodeEngineImpl nodeEngine, TransactionOptions options, String ownerUuid) { this.transactionManager = transactionManagerService; this.nodeEngine = nodeEngine; this.transaction = new TransactionImpl(transactionManagerService, nodeEngine, options, ownerUuid); } @Override public XAResourceImpl getXaResource() { if (xaResource == null) { xaResource = new XAResourceImpl(transactionManager, this, nodeEngine); } return xaResource; } @Override public boolean isXAManaged() { return transaction.getXid() != null; } @Override public String getTxnId() { return transaction.getTxnId(); } @Override public void beginTransaction() { transaction.begin(); } @Override public void commitTransaction() throws TransactionException { if (transaction.getTransactionType().equals(TransactionOptions.TransactionType.TWO_PHASE)) { transaction.prepare(); } transaction.commit(); } @Override public void rollbackTransaction() { transaction.rollback(); } @SuppressWarnings("unchecked") @Override public <K, V> TransactionalMap<K, V> getMap(String name) { return (TransactionalMap<K, V>) getTransactionalObject(MapService.SERVICE_NAME, name); } @SuppressWarnings("unchecked") @Override public <E> TransactionalQueue<E> getQueue(String name) { return (TransactionalQueue<E>) getTransactionalObject(QueueService.SERVICE_NAME, name); } @SuppressWarnings("unchecked") @Override public <K, V> TransactionalMultiMap<K, V> getMultiMap(String name) { return (TransactionalMultiMap<K, V>) getTransactionalObject(MultiMapService.SERVICE_NAME, name); } @SuppressWarnings("unchecked") @Override public <E> TransactionalList<E> getList(String name) { return (TransactionalList<E>) getTransactionalObject(ListService.SERVICE_NAME, name); } @SuppressWarnings("unchecked") @Override public <E> TransactionalSet<E> getSet(String name) { return (TransactionalSet<E>) getTransactionalObject(SetService.SERVICE_NAME, name); } @SuppressWarnings("unchecked") @Override public TransactionalObject getTransactionalObject(String serviceName, String name) { if (transaction.getState() != Transaction.State.ACTIVE) { throw new TransactionNotActiveException("No transaction is found while accessing " + "transactional object -> " + serviceName + "[" + name + "]!"); } TransactionalObjectKey key = new TransactionalObjectKey(serviceName, name); TransactionalObject obj = txnObjectMap.get(key); if (obj != null) { return obj; } final Object service = nodeEngine.getService(serviceName); if (service instanceof TransactionalService) { nodeEngine.getProxyService().initializeDistributedObject(serviceName, name); obj = ((TransactionalService) service).createTransactionalObject(name, transaction); txnObjectMap.put(key, obj); } else { if (service == null) { if (!nodeEngine.isActive()) { throw new HazelcastInstanceNotActiveException(); } throw new IllegalArgumentException("Unknown Service[" + serviceName + "]!"); } throw new IllegalArgumentException("Service[" + serviceName + "] is not transactional!"); } return obj; } Transaction getTransaction() { return transaction; } private static class TransactionalObjectKey { private final String serviceName; private final String name; TransactionalObjectKey(String serviceName, String name) { this.serviceName = serviceName; this.name = name; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (!(o instanceof TransactionalObjectKey)) { return false; } TransactionalObjectKey that = (TransactionalObjectKey) o; if (!name.equals(that.name)) { return false; } if (!serviceName.equals(that.serviceName)) { return false; } return true; } @Override public int hashCode() { int result = serviceName.hashCode(); result = 31 * result + name.hashCode(); return result; } } }
1no label
hazelcast_src_main_java_com_hazelcast_transaction_impl_TransactionContextImpl.java
3,207
public class ReplicatedMapDataSerializerHook implements DataSerializerHook { public static final int F_ID = FactoryIdHelper.getFactoryId(FactoryIdHelper.REPLICATED_MAP_DS_FACTORY, -22); public static final int VECTOR = 0; public static final int RECORD = 1; public static final int REPL_UPDATE_MESSAGE = 2; public static final int REPL_CLEAR_MESSAGE = 3; public static final int REPL_MULTI_UPDATE_MESSAGE = 4; public static final int OP_INIT_CHUNK = 5; public static final int OP_POST_JOIN = 6; public static final int OP_CLEAR = 7; public static final int MAP_STATS = 8; private static final int LEN = MAP_STATS + 1; @Override public int getFactoryId() { return F_ID; } @Override public DataSerializableFactory createFactory() { ConstructorFunction<Integer, IdentifiedDataSerializable>[] constructors = new ConstructorFunction[LEN]; constructors[VECTOR] = new ConstructorFunction<Integer, IdentifiedDataSerializable>() { public IdentifiedDataSerializable createNew(Integer arg) { return new VectorClock(); } }; constructors[RECORD] = new ConstructorFunction<Integer, IdentifiedDataSerializable>() { public IdentifiedDataSerializable createNew(Integer arg) { return new ReplicatedRecord(); } }; constructors[REPL_UPDATE_MESSAGE] = new ConstructorFunction<Integer, IdentifiedDataSerializable>() { public IdentifiedDataSerializable createNew(Integer arg) { return new ReplicationMessage(); } }; constructors[REPL_CLEAR_MESSAGE] = new ConstructorFunction<Integer, IdentifiedDataSerializable>() { public IdentifiedDataSerializable createNew(Integer arg) { return new VectorClock(); } }; constructors[REPL_MULTI_UPDATE_MESSAGE] = new ConstructorFunction<Integer, IdentifiedDataSerializable>() { @Override public IdentifiedDataSerializable createNew(Integer arg) { return new MultiReplicationMessage(); } }; constructors[OP_INIT_CHUNK] = new ConstructorFunction<Integer, IdentifiedDataSerializable>() { @Override public IdentifiedDataSerializable createNew(Integer arg) { return new ReplicatedMapInitChunkOperation(); } }; constructors[OP_POST_JOIN] = new ConstructorFunction<Integer, IdentifiedDataSerializable>() { @Override public IdentifiedDataSerializable createNew(Integer arg) { return new ReplicatedMapPostJoinOperation(); } }; constructors[OP_CLEAR] = new ConstructorFunction<Integer, IdentifiedDataSerializable>() { @Override public IdentifiedDataSerializable createNew(Integer arg) { return new ReplicatedMapClearOperation(); } }; constructors[MAP_STATS] = new ConstructorFunction<Integer, IdentifiedDataSerializable>() { @Override public IdentifiedDataSerializable createNew(Integer arg) { return new LocalReplicatedMapStatsImpl(); } }; return new ArrayDataSerializableFactory(constructors); } }
1no label
hazelcast_src_main_java_com_hazelcast_replicatedmap_operation_ReplicatedMapDataSerializerHook.java
130
public class OBinaryConverterFactory { private static final boolean unsafeWasDetected; static { boolean unsafeDetected = false; try { Class<?> sunClass = Class.forName("sun.misc.Unsafe"); unsafeDetected = sunClass != null; } catch (ClassNotFoundException cnfe) { // Ignore } unsafeWasDetected = unsafeDetected; } public static OBinaryConverter getConverter() { boolean useUnsafe = Boolean.valueOf(System.getProperty("memory.useUnsafe")); if (useUnsafe && unsafeWasDetected) return OUnsafeBinaryConverter.INSTANCE; return OSafeBinaryConverter.INSTANCE; } }
0true
commons_src_main_java_com_orientechnologies_common_serialization_OBinaryConverterFactory.java
356
public class DirectCopyClassTransformer implements BroadleafClassTransformer { protected SupportLogger logger; protected String moduleName; protected Map<String, String> xformTemplates = new HashMap<String, String>(); protected static List<String> transformedMethods = new ArrayList<String>(); public DirectCopyClassTransformer(String moduleName) { this.moduleName = moduleName; logger = SupportLogManager.getLogger(moduleName, this.getClass()); } @Override public void compileJPAProperties(Properties props, Object key) throws Exception { // When simply copying properties over for Java class files, JPA properties do not need modification } @Override public byte[] transform(ClassLoader loader, String className, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] classfileBuffer) throws IllegalClassFormatException { String convertedClassName = className.replace('/', '.'); if (xformTemplates.containsKey(convertedClassName)) { String xformKey = convertedClassName; String[] xformVals = xformTemplates.get(xformKey).split(","); logger.lifecycle(LifeCycleEvent.START, String.format("Transform - Copying into [%s] from [%s]", xformKey, StringUtils.join(xformVals, ","))); try { // Load the destination class and defrost it so it is eligible for modifications ClassPool classPool = ClassPool.getDefault(); CtClass clazz = classPool.makeClass(new ByteArrayInputStream(classfileBuffer), false); clazz.defrost(); for (String xformVal : xformVals) { // Load the source class String trimmed = xformVal.trim(); classPool.appendClassPath(new LoaderClassPath(Class.forName(trimmed).getClassLoader())); CtClass template = classPool.get(trimmed); // Add in extra interfaces CtClass[] interfacesToCopy = template.getInterfaces(); for (CtClass i : interfacesToCopy) { logger.debug(String.format("Adding interface [%s]", i.getName())); clazz.addInterface(i); } // Copy over all declared fields from the template class // Note that we do not copy over fields with the @NonCopiedField annotation CtField[] fieldsToCopy = template.getDeclaredFields(); for (CtField field : fieldsToCopy) { if (field.hasAnnotation(NonCopied.class)) { logger.debug(String.format("Not adding field [%s]", field.getName())); } else { logger.debug(String.format("Adding field [%s]", field.getName())); CtField copiedField = new CtField(field, clazz); boolean defaultConstructorFound = false; String implClass = getImplementationType(field.getType().getName()); // Look through all of the constructors in the implClass to see // if there is one that takes zero parameters try { CtConstructor[] implConstructors = classPool.get(implClass).getConstructors(); if (implConstructors != null) { for (CtConstructor cons : implConstructors) { if (cons.getParameterTypes().length == 0) { defaultConstructorFound = true; break; } } } } catch (NotFoundException e) { // Do nothing -- if we don't find this implementation, it's probably because it's // an array. In this case, we will not initialize the field. } if (defaultConstructorFound) { clazz.addField(copiedField, "new " + implClass + "()"); } else { clazz.addField(copiedField); } } } // Copy over all declared methods from the template class CtMethod[] methodsToCopy = template.getDeclaredMethods(); for (CtMethod method : methodsToCopy) { if (method.hasAnnotation(NonCopied.class)) { logger.debug(String.format("Not adding method [%s]", method.getName())); } else { try { CtClass[] paramTypes = method.getParameterTypes(); CtMethod originalMethod = clazz.getDeclaredMethod(method.getName(), paramTypes); if (transformedMethods.contains(methodDescription(originalMethod))) { throw new RuntimeException("Method already replaced " + methodDescription(originalMethod)); } else { logger.debug(String.format("Marking as replaced [%s]", methodDescription(originalMethod))); transformedMethods.add(methodDescription(originalMethod)); } logger.debug(String.format("Removing method [%s]", method.getName())); clazz.removeMethod(originalMethod); } catch (NotFoundException e) { // Do nothing -- we don't need to remove a method because it doesn't exist } logger.debug(String.format("Adding method [%s]", method.getName())); CtMethod copiedMethod = new CtMethod(method, clazz, null); clazz.addMethod(copiedMethod); } } } logger.lifecycle(LifeCycleEvent.END, String.format("Transform - Copying into [%s] from [%s]", xformKey, StringUtils.join(xformVals, ","))); return clazz.toBytecode(); } catch (Exception e) { throw new RuntimeException("Unable to transform class", e); } } return null; } /** * This method will do its best to return an implementation type for a given classname. This will allow weaving * template classes to have initialized values. * * We provide default implementations for List, Map, and Set, and will attempt to utilize a default constructor for * other classes. * * If the className contains an '[', we will return null. */ protected String getImplementationType(String className) { if (className.equals("java.util.List")) { return "java.util.ArrayList"; } else if (className.equals("java.util.Map")) { return "java.util.HashMap"; } else if (className.equals("java.util.Set")) { return "java.util.HashSet"; } else if (className.contains("[")) { return null; } return className; } protected String methodDescription(CtMethod method) { return method.getDeclaringClass().getName() + "|" + method.getName() + "|" + method.getSignature(); } public Map<String, String> getXformTemplates() { return xformTemplates; } public void setXformTemplates(Map<String, String> xformTemplates) { this.xformTemplates = xformTemplates; } }
1no label
common_src_main_java_org_broadleafcommerce_common_extensibility_jpa_copy_DirectCopyClassTransformer.java
89
public static class DecimalSerializer extends AbstractDecimalSerializer<Decimal> { public DecimalSerializer() { super(DECIMALS, Decimal.class); } @Override protected Decimal construct(long format, int decimals) { assert decimals==DECIMALS; return new Decimal(format); } }
0true
titan-core_src_main_java_com_thinkaurelius_titan_core_attribute_Decimal.java
1,219
return new V<T>() { @Override public boolean release() throws ElasticsearchException { if (t != Thread.currentThread()) { // Releasing from a different thread doesn't break anything but this is bad practice as pages should be acquired // as late as possible and released as soon as possible in a try/finally fashion throw new RuntimeException("Page was allocated in " + t + " but released in " + Thread.currentThread()); } final Throwable t = ACQUIRED_PAGES.remove(v); if (t == null) { throw new IllegalStateException("Releasing a page that has not been acquired"); } return v.release(); } @Override public T v() { return v.v(); } @Override public boolean isRecycled() { return v.isRecycled(); } };
1no label
src_test_java_org_elasticsearch_cache_recycler_MockPageCacheRecycler.java
565
public class PutMappingResponse extends AcknowledgedResponse { PutMappingResponse() { } PutMappingResponse(boolean acknowledged) { super(acknowledged); } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); readAcknowledged(in); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); writeAcknowledged(out); } }
0true
src_main_java_org_elasticsearch_action_admin_indices_mapping_put_PutMappingResponse.java
187
public interface OResettable { public void reset(); }
0true
commons_src_main_java_com_orientechnologies_common_util_OResettable.java
555
public class GetMappingsRequestBuilder extends ClusterInfoRequestBuilder<GetMappingsRequest, GetMappingsResponse, GetMappingsRequestBuilder> { public GetMappingsRequestBuilder(InternalGenericClient client, String... indices) { super(client, new GetMappingsRequest().indices(indices)); } @Override protected void doExecute(ActionListener<GetMappingsResponse> listener) { ((IndicesAdminClient) client).getMappings(request, listener); } }
0true
src_main_java_org_elasticsearch_action_admin_indices_mapping_get_GetMappingsRequestBuilder.java
679
public class PutWarmerAction extends IndicesAction<PutWarmerRequest, PutWarmerResponse, PutWarmerRequestBuilder> { public static final PutWarmerAction INSTANCE = new PutWarmerAction(); public static final String NAME = "indices/warmer/put"; private PutWarmerAction() { super(NAME); } @Override public PutWarmerResponse newResponse() { return new PutWarmerResponse(); } @Override public PutWarmerRequestBuilder newRequestBuilder(IndicesAdminClient client) { return new PutWarmerRequestBuilder(client); } }
0true
src_main_java_org_elasticsearch_action_admin_indices_warmer_put_PutWarmerAction.java
614
public static enum Flag { // Do not change the order of these flags we use // the ordinal for encoding! Only append to the end! Store("store"), Indexing("indexing"), Get("get"), Search("search"), Merge("merge"), Flush("flush"), Refresh("refresh"), FilterCache("filter_cache"), IdCache("id_cache"), FieldData("fielddata"), Docs("docs"), Warmer("warmer"), Percolate("percolate"), Completion("completion"), Segments("segments"), Translog("translog"); private final String restName; Flag(String restName) { this.restName = restName; } public String getRestName() { return restName; } }
0true
src_main_java_org_elasticsearch_action_admin_indices_stats_CommonStatsFlags.java
131
public class OSafeBinaryConverter implements OBinaryConverter { public static final OSafeBinaryConverter INSTANCE = new OSafeBinaryConverter(); public void putShort(byte[] buffer, int index, short value, ByteOrder byteOrder) { if (byteOrder.equals(ByteOrder.BIG_ENDIAN)) short2BytesBigEndian(value, buffer, index); else short2BytesLittleEndian(value, buffer, index); } public short getShort(byte[] buffer, int index, ByteOrder byteOrder) { if (byteOrder.equals(ByteOrder.BIG_ENDIAN)) return bytes2ShortBigEndian(buffer, index); return bytes2ShortLittleEndian(buffer, index); } public void putInt(byte[] buffer, int pointer, int value, ByteOrder byteOrder) { if (byteOrder.equals(ByteOrder.BIG_ENDIAN)) int2BytesBigEndian(value, buffer, pointer); else int2BytesLittleEndian(value, buffer, pointer); } public int getInt(byte[] buffer, int pointer, ByteOrder byteOrder) { if (byteOrder.equals(ByteOrder.BIG_ENDIAN)) return bytes2IntBigEndian(buffer, pointer); return bytes2IntLittleEndian(buffer, pointer); } public void putLong(byte[] buffer, int index, long value, ByteOrder byteOrder) { if (byteOrder.equals(ByteOrder.BIG_ENDIAN)) long2BytesBigEndian(value, buffer, index); else long2BytesLittleEndian(value, buffer, index); } public long getLong(byte[] buffer, int index, ByteOrder byteOrder) { if (byteOrder.equals(ByteOrder.BIG_ENDIAN)) return bytes2LongBigEndian(buffer, index); return bytes2LongLittleEndian(buffer, index); } public void putChar(byte[] buffer, int index, char character, ByteOrder byteOrder) { if (byteOrder.equals(ByteOrder.BIG_ENDIAN)) { buffer[index] = (byte) (character >>> 8); buffer[index + 1] = (byte) character; } else { buffer[index + 1] = (byte) (character >>> 8); buffer[index] = (byte) character; } } public char getChar(byte[] buffer, int index, ByteOrder byteOrder) { if (byteOrder.equals(ByteOrder.BIG_ENDIAN)) return (char) (((buffer[index] & 0xFF) << 8) + (buffer[index + 1] & 0xFF)); return (char) (((buffer[index + 1] & 0xFF) << 8) + (buffer[index] & 0xFF)); } public boolean nativeAccelerationUsed() { return false; } private static byte[] short2BytesBigEndian(final short value, final byte[] b, final int iBeginOffset) { b[iBeginOffset] = (byte) ((value >>> 8) & 0xFF); b[iBeginOffset + 1] = (byte) (value & 0xFF); return b; } private static byte[] short2BytesLittleEndian(final short value, final byte[] b, final int iBeginOffset) { b[iBeginOffset + 1] = (byte) ((value >>> 8) & 0xFF); b[iBeginOffset] = (byte) (value & 0xFF); return b; } private static short bytes2ShortBigEndian(final byte[] b, final int offset) { return (short) ((b[offset] << 8) | (b[offset + 1] & 0xff)); } private static short bytes2ShortLittleEndian(final byte[] b, final int offset) { return (short) ((b[offset + 1] << 8) | (b[offset] & 0xff)); } private static int bytes2IntBigEndian(final byte[] b, final int offset) { return (b[offset]) << 24 | (0xff & b[offset + 1]) << 16 | (0xff & b[offset + 2]) << 8 | ((0xff & b[offset + 3])); } private static int bytes2IntLittleEndian(final byte[] b, final int offset) { return (b[offset + 3]) << 24 | (0xff & b[offset + 2]) << 16 | (0xff & b[offset + 1]) << 8 | ((0xff & b[offset])); } private static byte[] int2BytesBigEndian(final int value, final byte[] b, final int iBeginOffset) { b[iBeginOffset] = (byte) ((value >>> 24) & 0xFF); b[iBeginOffset + 1] = (byte) ((value >>> 16) & 0xFF); b[iBeginOffset + 2] = (byte) ((value >>> 8) & 0xFF); b[iBeginOffset + 3] = (byte) (value & 0xFF); return b; } private static byte[] int2BytesLittleEndian(final int value, final byte[] b, final int iBeginOffset) { b[iBeginOffset + 3] = (byte) ((value >>> 24) & 0xFF); b[iBeginOffset + 2] = (byte) ((value >>> 16) & 0xFF); b[iBeginOffset + 1] = (byte) ((value >>> 8) & 0xFF); b[iBeginOffset] = (byte) (value & 0xFF); return b; } private static byte[] long2BytesBigEndian(final long value, final byte[] b, final int iBeginOffset) { b[iBeginOffset] = (byte) ((value >>> 56) & 0xFF); b[iBeginOffset + 1] = (byte) ((value >>> 48) & 0xFF); b[iBeginOffset + 2] = (byte) ((value >>> 40) & 0xFF); b[iBeginOffset + 3] = (byte) ((value >>> 32) & 0xFF); b[iBeginOffset + 4] = (byte) ((value >>> 24) & 0xFF); b[iBeginOffset + 5] = (byte) ((value >>> 16) & 0xFF); b[iBeginOffset + 6] = (byte) ((value >>> 8) & 0xFF); b[iBeginOffset + 7] = (byte) (value & 0xFF); return b; } private static byte[] long2BytesLittleEndian(final long value, final byte[] b, final int iBeginOffset) { b[iBeginOffset + 7] = (byte) ((value >>> 56) & 0xFF); b[iBeginOffset + 6] = (byte) ((value >>> 48) & 0xFF); b[iBeginOffset + 5] = (byte) ((value >>> 40) & 0xFF); b[iBeginOffset + 4] = (byte) ((value >>> 32) & 0xFF); b[iBeginOffset + 3] = (byte) ((value >>> 24) & 0xFF); b[iBeginOffset + 2] = (byte) ((value >>> 16) & 0xFF); b[iBeginOffset + 1] = (byte) ((value >>> 8) & 0xFF); b[iBeginOffset] = (byte) (value & 0xFF); return b; } private static long bytes2LongBigEndian(final byte[] b, final int offset) { return ((0xff & b[offset + 7]) | (0xff & b[offset + 6]) << 8 | (0xff & b[offset + 5]) << 16 | (long) (0xff & b[offset + 4]) << 24 | (long) (0xff & b[offset + 3]) << 32 | (long) (0xff & b[offset + 2]) << 40 | (long) (0xff & b[offset + 1]) << 48 | (long) (0xff & b[offset]) << 56); } private static long bytes2LongLittleEndian(final byte[] b, final int offset) { return ((0xff & b[offset]) | (0xff & b[offset + 1]) << 8 | (0xff & b[offset + 2]) << 16 | (long) (0xff & b[offset + 3]) << 24 | (long) (0xff & b[offset + 4]) << 32 | (long) (0xff & b[offset + 5]) << 40 | (long) (0xff & b[offset + 6]) << 48 | (long) (0xff & b[offset + 7]) << 56); } }
0true
commons_src_main_java_com_orientechnologies_common_serialization_OSafeBinaryConverter.java
3,748
return new DataSerializableFactory() { @Override public IdentifiedDataSerializable create(int typeId) { if (typeId == SESSION_ATTRIBUTE_ID) { return new SessionAttributePredicate(); } throw new IllegalArgumentException(); } };
1no label
hazelcast-wm_src_main_java_com_hazelcast_web_WebDataSerializerHook.java
171
Executors.newSingleThreadExecutor().submit(new Runnable() { public void run() { while (true) { try { Thread.sleep(STATS_SECONDS * 1000); System.out.println("cluster size:" + client.getCluster().getMembers().size()); Stats currentStats = stats.getAndReset(); System.out.println(currentStats); System.out.println("Operations per Second : " + currentStats.total() / STATS_SECONDS); } catch (Exception e) { e.printStackTrace(); } } } });
0true
hazelcast-client_src_test_java_com_hazelcast_client_SimpleMapTestFromClient.java
745
public class ExplainResponse extends ActionResponse { private boolean exists; private Explanation explanation; private GetResult getResult; ExplainResponse() { } public ExplainResponse(boolean exists) { this.exists = exists; } public ExplainResponse(boolean exists, Explanation explanation) { this.exists = exists; this.explanation = explanation; } public ExplainResponse(boolean exists, Explanation explanation, GetResult getResult) { this.exists = exists; this.explanation = explanation; this.getResult = getResult; } public Explanation getExplanation() { return explanation; } public boolean isMatch() { return explanation != null && explanation.isMatch(); } public boolean hasExplanation() { return explanation != null; } public boolean isExists() { return exists; } public GetResult getGetResult() { return getResult; } public void readFrom(StreamInput in) throws IOException { super.readFrom(in); exists = in.readBoolean(); if (in.readBoolean()) { explanation = readExplanation(in); } if (in.readBoolean()) { getResult = GetResult.readGetResult(in); } } public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeBoolean(exists); if (explanation == null) { out.writeBoolean(false); } else { out.writeBoolean(true); writeExplanation(out, explanation); } if (getResult == null) { out.writeBoolean(false); } else { out.writeBoolean(true); getResult.writeTo(out); } } }
0true
src_main_java_org_elasticsearch_action_explain_ExplainResponse.java
3,022
public class QueueContainer implements IdentifiedDataSerializable { private LinkedList<QueueItem> itemQueue; private HashMap<Long, QueueItem> backupMap; private final Map<Long, TxQueueItem> txMap = new HashMap<Long, TxQueueItem>(); private final HashMap<Long, Data> dataMap = new HashMap<Long, Data>(); private QueueConfig config; private QueueStoreWrapper store; private NodeEngine nodeEngine; private QueueService service; private ILogger logger; private long idGenerator; private final QueueWaitNotifyKey pollWaitNotifyKey; private final QueueWaitNotifyKey offerWaitNotifyKey; private String name; private long minAge = Long.MAX_VALUE; private long maxAge = Long.MIN_VALUE; private long totalAge; private long totalAgedCount; private boolean isEvictionScheduled; public QueueContainer(String name) { this.name = name; pollWaitNotifyKey = new QueueWaitNotifyKey(name, "poll"); offerWaitNotifyKey = new QueueWaitNotifyKey(name, "offer"); } public QueueContainer(String name, QueueConfig config, NodeEngine nodeEngine, QueueService service) throws Exception { this(name); setConfig(config, nodeEngine, service); } public void init(boolean fromBackup) { if (!fromBackup && store.isEnabled()) { Set<Long> keys = store.loadAllKeys(); if (keys != null) { long maxId = -1; for (Long key : keys) { QueueItem item = new QueueItem(this, key, null); getItemQueue().offer(item); maxId = Math.max(maxId, key); } idGenerator = maxId + 1; } } } //TX Methods public boolean txnEnsureReserve(long itemId) { if (txMap.get(itemId) == null) { throw new TransactionException("No reserve for itemId: " + itemId); } return true; } //TX Poll public QueueItem txnPollReserve(long reservedOfferId, String transactionId) { QueueItem item = getItemQueue().peek(); if (item == null) { TxQueueItem txItem = txMap.remove(reservedOfferId); if (txItem == null) { return null; } item = new QueueItem(this, txItem.getItemId(), txItem.getData()); return item; } if (store.isEnabled() && item.getData() == null) { try { load(item); } catch (Exception e) { throw new HazelcastException(e); } } getItemQueue().poll(); txMap.put(item.getItemId(), new TxQueueItem(item).setPollOperation(true).setTransactionId(transactionId)); return item; } public boolean txnPollBackupReserve(long itemId, String transactionId) { QueueItem item = getBackupMap().remove(itemId); if (item == null) { throw new TransactionException("Backup reserve failed: " + itemId); } txMap.put(itemId, new TxQueueItem(item).setPollOperation(true).setTransactionId(transactionId)); return true; } public Data txnCommitPoll(long itemId) { final Data result = txnCommitPollBackup(itemId); scheduleEvictionIfEmpty(); return result; } public Data txnCommitPollBackup(long itemId) { TxQueueItem item = txMap.remove(itemId); if (item == null) { logger.warning("txnCommitPoll operation-> No txn item for itemId: " + itemId); return null; } if (store.isEnabled()) { try { store.delete(item.getItemId()); } catch (Exception e) { logger.severe("Error during store delete: " + item.getItemId(), e); } } return item.getData(); } public boolean txnRollbackPoll(long itemId, boolean backup) { QueueItem item = txMap.remove(itemId); if (item == null) { return false; } if (!backup) { getItemQueue().offerFirst(item); } cancelEvictionIfExists(); return true; } //TX Offer public long txnOfferReserve(String transactionId) { TxQueueItem item = new TxQueueItem(this, nextId(), null).setTransactionId(transactionId).setPollOperation(false); txMap.put(item.getItemId(), item); return item.getItemId(); } public void txnOfferBackupReserve(long itemId, String transactionId) { QueueItem item = new QueueItem(this, itemId, null); Object o = txMap.put(itemId, new TxQueueItem(item).setPollOperation(false).setTransactionId(transactionId)); if (o != null) { logger.severe("txnOfferBackupReserve operation-> Item exists already at txMap for itemId: " + itemId); } } public boolean txnCommitOffer(long itemId, Data data, boolean backup) { QueueItem item = txMap.remove(itemId); if (item == null && !backup) { throw new TransactionException("No reserve :" + itemId); } else if (item == null) { item = new QueueItem(this, itemId, data); } item.setData(data); if (!backup) { getItemQueue().offer(item); cancelEvictionIfExists(); } else { getBackupMap().put(itemId, item); } if (store.isEnabled() && !backup) { try { store.store(item.getItemId(), data); } catch (Exception e) { logger.warning("Exception during store", e); } } return true; } public boolean txnRollbackOffer(long itemId) { final boolean result = txnRollbackOfferBackup(itemId); scheduleEvictionIfEmpty(); return result; } public boolean txnRollbackOfferBackup(long itemId) { QueueItem item = txMap.remove(itemId); if (item == null) { logger.warning("txnRollbackOffer operation-> No txn item for itemId: " + itemId); return false; } return true; } public QueueItem txnPeek(long offerId, String transactionId) { QueueItem item = getItemQueue().peek(); if (item == null) { if (offerId == -1) { return null; } TxQueueItem txItem = txMap.get(offerId); if (txItem == null) { return null; } item = new QueueItem(this, txItem.getItemId(), txItem.getData()); return item; } if (store.isEnabled() && item.getData() == null) { try { load(item); } catch (Exception e) { throw new HazelcastException(e); } } return item; } //TX Methods Ends public long offer(Data data) { QueueItem item = new QueueItem(this, nextId(), null); if (store.isEnabled()) { try { store.store(item.getItemId(), data); } catch (Exception e) { throw new HazelcastException(e); } } if (!store.isEnabled() || store.getMemoryLimit() > getItemQueue().size()) { item.setData(data); } getItemQueue().offer(item); cancelEvictionIfExists(); return item.getItemId(); } public void offerBackup(Data data, long itemId) { QueueItem item = new QueueItem(this, itemId, null); if (!store.isEnabled() || store.getMemoryLimit() > getItemQueue().size()) { item.setData(data); } getBackupMap().put(itemId, item); } public Map<Long, Data> addAll(Collection<Data> dataList) { Map<Long, Data> map = new HashMap<Long, Data>(dataList.size()); List<QueueItem> list = new ArrayList<QueueItem>(dataList.size()); for (Data data : dataList) { QueueItem item = new QueueItem(this, nextId(), null); if (!store.isEnabled() || store.getMemoryLimit() > getItemQueue().size()) { item.setData(data); } map.put(item.getItemId(), data); list.add(item); } if (store.isEnabled() && !map.isEmpty()) { try { store.storeAll(map); } catch (Exception e) { throw new HazelcastException(e); } } if (!list.isEmpty()) { getItemQueue().addAll(list); cancelEvictionIfExists(); } return map; } public void addAllBackup(Map<Long, Data> dataMap) { for (Map.Entry<Long, Data> entry : dataMap.entrySet()) { QueueItem item = new QueueItem(this, entry.getKey(), null); if (!store.isEnabled() || store.getMemoryLimit() > getItemQueue().size()) { item.setData(entry.getValue()); } getBackupMap().put(item.getItemId(), item); } } public QueueItem peek() { QueueItem item = getItemQueue().peek(); if (item == null) { return null; } if (store.isEnabled() && item.getData() == null) { try { load(item); } catch (Exception e) { throw new HazelcastException(e); } } return item; } public QueueItem poll() { QueueItem item = peek(); if (item == null) { return null; } if (store.isEnabled()) { try { store.delete(item.getItemId()); } catch (Exception e) { throw new HazelcastException(e); } } getItemQueue().poll(); age(item, Clock.currentTimeMillis()); scheduleEvictionIfEmpty(); return item; } public void pollBackup(long itemId) { QueueItem item = getBackupMap().remove(itemId); if (item != null) { //For Stats age(item, Clock.currentTimeMillis()); } } public Map<Long, Data> drain(int maxSize) { if (maxSize < 0 || maxSize > getItemQueue().size()) { maxSize = getItemQueue().size(); } LinkedHashMap<Long, Data> map = new LinkedHashMap<Long, Data>(maxSize); Iterator<QueueItem> iter = getItemQueue().iterator(); for (int i = 0; i < maxSize; i++) { QueueItem item = iter.next(); if (store.isEnabled() && item.getData() == null) { try { load(item); } catch (Exception e) { throw new HazelcastException(e); } } map.put(item.getItemId(), item.getData()); } if (store.isEnabled() && maxSize != 0) { try { store.deleteAll(map.keySet()); } catch (Exception e) { throw new HazelcastException(e); } } long current = Clock.currentTimeMillis(); for (int i = 0; i < maxSize; i++) { QueueItem item = getItemQueue().poll(); //For Stats age(item, current); } if (maxSize != 0) { scheduleEvictionIfEmpty(); } return map; } public void drainFromBackup(Set<Long> itemIdSet) { for (Long itemId : itemIdSet) { pollBackup(itemId); } dataMap.clear(); } public int size() { return Math.min(config.getMaxSize(), getItemQueue().size()); } public int backupSize() { return getBackupMap().size(); } public Map<Long, Data> clear() { long current = Clock.currentTimeMillis(); LinkedHashMap<Long, Data> map = new LinkedHashMap<Long, Data>(getItemQueue().size()); for (QueueItem item : getItemQueue()) { map.put(item.getItemId(), item.getData()); // For stats age(item, current); } if (store.isEnabled() && !map.isEmpty()) { try { store.deleteAll(map.keySet()); } catch (Exception e) { throw new HazelcastException(e); } } getItemQueue().clear(); dataMap.clear(); scheduleEvictionIfEmpty(); return map; } public void clearBackup(Set<Long> itemIdSet) { drainFromBackup(itemIdSet); } /** * iterates all items, checks equality with data * This method does not trigger store load. */ public long remove(Data data) { Iterator<QueueItem> iter = getItemQueue().iterator(); while (iter.hasNext()) { QueueItem item = iter.next(); if (data.equals(item.getData())) { if (store.isEnabled()) { try { store.delete(item.getItemId()); } catch (Exception e) { throw new HazelcastException(e); } } iter.remove(); //For Stats age(item, Clock.currentTimeMillis()); scheduleEvictionIfEmpty(); return item.getItemId(); } } return -1; } public void removeBackup(long itemId) { getBackupMap().remove(itemId); } /** * This method does not trigger store load. */ public boolean contains(Collection<Data> dataSet) { for (Data data : dataSet) { boolean contains = false; for (QueueItem item : getItemQueue()) { if (item.getData() != null && item.getData().equals(data)) { contains = true; break; } } if (!contains) { return false; } } return true; } /** * This method triggers store load. */ public List<Data> getAsDataList() { List<Data> dataList = new ArrayList<Data>(getItemQueue().size()); for (QueueItem item : getItemQueue()) { if (store.isEnabled() && item.getData() == null) { try { load(item); } catch (Exception e) { throw new HazelcastException(e); } } dataList.add(item.getData()); } return dataList; } /** * This method triggers store load */ public Map<Long, Data> compareAndRemove(Collection<Data> dataList, boolean retain) { LinkedHashMap<Long, Data> map = new LinkedHashMap<Long, Data>(); for (QueueItem item : getItemQueue()) { if (item.getData() == null && store.isEnabled()) { try { load(item); } catch (Exception e) { throw new HazelcastException(e); } } boolean contains = dataList.contains(item.getData()); if ((retain && !contains) || (!retain && contains)) { map.put(item.getItemId(), item.getData()); } } if (map.size() > 0) { if (store.isEnabled()) { try { store.deleteAll(map.keySet()); } catch (Exception e) { throw new HazelcastException(e); } } Iterator<QueueItem> iter = getItemQueue().iterator(); while (iter.hasNext()) { QueueItem item = iter.next(); if (map.containsKey(item.getItemId())) { iter.remove(); //For Stats age(item, Clock.currentTimeMillis()); } } scheduleEvictionIfEmpty(); } return map; } public void compareAndRemoveBackup(Set<Long> itemIdSet) { drainFromBackup(itemIdSet); } private void load(QueueItem item) throws Exception { int bulkLoad = store.getBulkLoad(); bulkLoad = Math.min(getItemQueue().size(), bulkLoad); if (bulkLoad == 1) { item.setData(store.load(item.getItemId())); } else if (bulkLoad > 1) { ListIterator<QueueItem> iter = getItemQueue().listIterator(); HashSet<Long> keySet = new HashSet<Long>(bulkLoad); for (int i = 0; i < bulkLoad; i++) { keySet.add(iter.next().getItemId()); } Map<Long, Data> values = store.loadAll(keySet); dataMap.putAll(values); item.setData(getDataFromMap(item.getItemId())); } } public boolean hasEnoughCapacity() { return hasEnoughCapacity(1); } public boolean hasEnoughCapacity(int delta) { return (getItemQueue().size() + delta) <= config.getMaxSize(); } LinkedList<QueueItem> getItemQueue() { if (itemQueue == null) { itemQueue = new LinkedList<QueueItem>(); if (backupMap != null && !backupMap.isEmpty()) { List<QueueItem> values = new ArrayList<QueueItem>(backupMap.values()); Collections.sort(values); itemQueue.addAll(values); backupMap.clear(); backupMap = null; } } return itemQueue; } Map<Long, QueueItem> getBackupMap() { if (backupMap == null) { backupMap = new HashMap<Long, QueueItem>(); if (itemQueue != null) { for (QueueItem item : itemQueue) { backupMap.put(item.getItemId(), item); } itemQueue.clear(); itemQueue = null; } } return backupMap; } public Data getDataFromMap(long itemId) { return dataMap.remove(itemId); } public void setConfig(QueueConfig config, NodeEngine nodeEngine, QueueService service) { this.nodeEngine = nodeEngine; this.service = service; logger = nodeEngine.getLogger(QueueContainer.class); store = new QueueStoreWrapper(nodeEngine.getSerializationService()); this.config = new QueueConfig(config); QueueStoreConfig storeConfig = config.getQueueStoreConfig(); store.setConfig(storeConfig, name); } long nextId() { return idGenerator++; } void setId(long itemId) { idGenerator = Math.max(itemId + 1, idGenerator); } public QueueWaitNotifyKey getPollWaitNotifyKey() { return pollWaitNotifyKey; } public QueueWaitNotifyKey getOfferWaitNotifyKey() { return offerWaitNotifyKey; } public QueueConfig getConfig() { return config; } private void age(QueueItem item, long currentTime) { long elapsed = currentTime - item.getCreationTime(); if (elapsed <= 0) { //elapsed time can not be a negative value, a system clock problem maybe. ignored return; } totalAgedCount++; totalAge += elapsed; minAge = Math.min(minAge, elapsed); maxAge = Math.max(maxAge, elapsed); } public void setStats(LocalQueueStatsImpl stats) { stats.setMinAge(minAge); stats.setMaxAge(maxAge); long totalAgedCountVal = Math.max(totalAgedCount, 1); stats.setAveAge(totalAge / totalAgedCountVal); } private void scheduleEvictionIfEmpty() { final int emptyQueueTtl = config.getEmptyQueueTtl(); if (emptyQueueTtl < 0) { return; } if (getItemQueue().isEmpty() && txMap.isEmpty() && !isEvictionScheduled) { if (emptyQueueTtl == 0) { nodeEngine.getProxyService().destroyDistributedObject(QueueService.SERVICE_NAME, name); } else if (emptyQueueTtl > 0) { service.scheduleEviction(name, TimeUnit.SECONDS.toMillis(emptyQueueTtl)); isEvictionScheduled = true; } } } public void cancelEvictionIfExists() { if (isEvictionScheduled) { service.cancelEviction(name); isEvictionScheduled = false; } } public boolean isEvictable() { return getItemQueue().isEmpty() && txMap.isEmpty(); } public void rollbackTransaction(String transactionId) { final Iterator<TxQueueItem> iterator = txMap.values().iterator(); while (iterator.hasNext()) { final TxQueueItem item = iterator.next(); if (transactionId.equals(item.getTransactionId())) { iterator.remove(); if (item.isPollOperation()) { getItemQueue().offerFirst(item); cancelEvictionIfExists(); } } } } @Override public void writeData(ObjectDataOutput out) throws IOException { out.writeUTF(name); out.writeInt(getItemQueue().size()); for (QueueItem item : getItemQueue()) { out.writeObject(item); } out.writeInt(txMap.size()); for (TxQueueItem item : txMap.values()) { item.writeData(out); } } @Override public void readData(ObjectDataInput in) throws IOException { name = in.readUTF(); int size = in.readInt(); for (int j = 0; j < size; j++) { QueueItem item = in.readObject(); getItemQueue().offer(item); setId(item.getItemId()); } int txSize = in.readInt(); for (int j = 0; j < txSize; j++) { TxQueueItem item = new TxQueueItem(this, -1, null); item.readData(in); txMap.put(item.getItemId(), item); setId(item.getItemId()); } } public void destroy() { if (itemQueue != null) { itemQueue.clear(); } if (backupMap != null) { backupMap.clear(); } txMap.clear(); dataMap.clear(); } @Override public int getFactoryId() { return QueueDataSerializerHook.F_ID; } @Override public int getId() { return QueueDataSerializerHook.QUEUE_CONTAINER; } }
1no label
hazelcast_src_main_java_com_hazelcast_queue_QueueContainer.java
37
@SuppressWarnings("serial") public class OMVRBTreeMemory<K, V> extends OMVRBTree<K, V> { /** * The number of entries in the tree */ protected int size = 0; protected int defaultPageSize = 63; /** * Memory based MVRB-Tree implementation. Constructs a new, empty tree map, using the natural ordering of its keys. All keys * inserted into the map must implement the {@link Comparable} interface. Furthermore, all such keys must be <i>mutually * comparable</i>: <tt>k1.compareTo(k2)</tt> must not throw a <tt>ClassCastException</tt> for any keys <tt>k1</tt> and <tt>k2</tt> * in the map. If the user attempts to put a key into the map that violates this constraint (for example, the user attempts to put * a string key into a map whose keys are integers), the <tt>put(Object key, Object value)</tt> call will throw a * <tt>ClassCastException</tt>. */ public OMVRBTreeMemory() { runtimeCheckEnabled = false; } public OMVRBTreeMemory(final int iPageSize, final float iLoadFactor) { this(iPageSize, iLoadFactor, 1); } public OMVRBTreeMemory(final int iPageSize, final float iLoadFactor, final int keySize) { super(keySize); defaultPageSize = iPageSize; pageLoadFactor = iLoadFactor; } /** * Constructs a new, empty tree map, ordered according to the given comparator. All keys inserted into the map must be <i>mutually * comparable</i> by the given comparator: <tt>comparator.compare(k1, * k2)</tt> must not throw a <tt>ClassCastException</tt> for any keys <tt>k1</tt> and <tt>k2</tt> in the map. If the user attempts * to put a key into the map that violates this constraint, the <tt>put(Object * key, Object value)</tt> call will throw a <tt>ClassCastException</tt>. * * @param comparator * the comparator that will be used to order this map. If <tt>null</tt>, the {@linkplain Comparable natural ordering} of * the keys will be used. */ public OMVRBTreeMemory(final Comparator<? super K> comparator) { super(comparator); } /** * Constructs a new tree map containing the same mappings as the given map, ordered according to the <i>natural ordering</i> of * its keys. All keys inserted into the new map must implement the {@link Comparable} interface. Furthermore, all such keys must * be <i>mutually comparable</i>: <tt>k1.compareTo(k2)</tt> must not throw a <tt>ClassCastException</tt> for any keys <tt>k1</tt> * and <tt>k2</tt> in the map. This method runs in n*log(n) time. * * @param m * the map whose mappings are to be placed in this map * @throws ClassCastException * if the keys in m are not {@link Comparable}, or are not mutually comparable * @throws NullPointerException * if the specified map is null */ public OMVRBTreeMemory(final Map<? extends K, ? extends V> m) { super(m); } /** * Constructs a new tree map containing the same mappings and using the same ordering as the specified sorted map. This method * runs in linear time. * * @param m * the sorted map whose mappings are to be placed in this map, and whose comparator is to be used to sort this map * @throws NullPointerException * if the specified map is null */ public OMVRBTreeMemory(final SortedMap<K, ? extends V> m) { super(m); } @Override public int getTreeSize() { return size; } protected void setSize(int pSize) { size = pSize; } public int getDefaultPageSize() { return defaultPageSize; } @Override protected OMVRBTreeEntry<K, V> createEntry(final K key, final V value) { return new OMVRBTreeEntryMemory<K, V>(this, key, value, null); } @Override protected OMVRBTreeEntry<K, V> createEntry(final OMVRBTreeEntry<K, V> parent) { return new OMVRBTreeEntryMemory<K, V>((OMVRBTreeEntryMemory<K, V>) parent, parent.getPageSplitItems()); } }
0true
commons_src_main_java_com_orientechnologies_common_collection_OMVRBTreeMemory.java
104
static final class TreeNode<K,V> extends Node<K,V> { TreeNode<K,V> parent; // red-black tree links TreeNode<K,V> left; TreeNode<K,V> right; TreeNode<K,V> prev; // needed to unlink next upon deletion boolean red; TreeNode(int hash, K key, V val, Node<K,V> next, TreeNode<K,V> parent) { super(hash, key, val, next); this.parent = parent; } Node<K,V> find(int h, Object k) { return findTreeNode(h, k, null); } /** * Returns the TreeNode (or null if not found) for the given key * starting at given root. */ final TreeNode<K,V> findTreeNode(int h, Object k, Class<?> kc) { if (k != null) { TreeNode<K,V> p = this; do { int ph, dir; K pk; TreeNode<K,V> q; TreeNode<K,V> pl = p.left, pr = p.right; if ((ph = p.hash) > h) p = pl; else if (ph < h) p = pr; else if ((pk = p.key) == k || (pk != null && k.equals(pk))) return p; else if (pl == null) p = pr; else if (pr == null) p = pl; else if ((kc != null || (kc = comparableClassFor(k)) != null) && (dir = compareComparables(kc, k, pk)) != 0) p = (dir < 0) ? pl : pr; else if ((q = pr.findTreeNode(h, k, kc)) != null) return q; else p = pl; } while (p != null); } return null; } }
0true
src_main_java_jsr166e_ConcurrentHashMapV8.java
771
@Deprecated @Entity @Inheritance(strategy = InheritanceType.JOINED) @Table(name = "BLC_SKU_AVAILABILITY") @Cache(usage = CacheConcurrencyStrategy.READ_WRITE, region="blInventoryElements") public class SkuAvailabilityImpl implements SkuAvailability { /** The Constant serialVersionUID. */ private static final long serialVersionUID = 1L; /** The id. */ @Id @GeneratedValue(generator = "SkuAvailabilityId") @GenericGenerator( name="SkuAvailabilityId", strategy="org.broadleafcommerce.common.persistence.IdOverrideTableGenerator", parameters = { @Parameter(name="segment_value", value="SkuAvailabilityImpl"), @Parameter(name="entity_name", value="org.broadleafcommerce.core.inventory.domain.SkuAvailabilityImpl") } ) @Column(name = "SKU_AVAILABILITY_ID") @AdminPresentation(friendlyName = "SkuAvailabilityImpl_Sku_Availability_ID", group = "SkuAvailabilityImpl_Primary_Key", visibility = VisibilityEnum.HIDDEN_ALL) protected Long id; /** The sale price. */ @Column(name = "SKU_ID") @Index(name="SKUAVAIL_SKU_INDEX", columnNames={"SKU_ID"}) @AdminPresentation(friendlyName = "SkuAvailabilityImpl_Sku_ID", visibility = VisibilityEnum.HIDDEN_ALL) protected Long skuId; /** The retail price. */ @Column(name = "LOCATION_ID") @Index(name="SKUAVAIL_LOCATION_INDEX", columnNames={"LOCATION_ID"}) @AdminPresentation(friendlyName = "SkuAvailabilityImpl_Location_ID", group = "SkuAvailabilityImpl_Description") protected Long locationId; /** The quantity on hand. */ @Column(name = "QTY_ON_HAND") @AdminPresentation(friendlyName = "SkuAvailabilityImpl_Quantity_On_Hand", group = "SkuAvailabilityImpl_Description") protected Integer quantityOnHand; /** The reserve quantity. */ @Column(name = "RESERVE_QTY") @AdminPresentation(friendlyName = "SkuAvailabilityImpl_Reserve_Quantity", group = "SkuAvailabilityImpl_Description") protected Integer reserveQuantity; /** The description. */ @Column(name = "AVAILABILITY_STATUS") @Index(name="SKUAVAIL_STATUS_INDEX", columnNames={"AVAILABILITY_STATUS"}) @AdminPresentation(friendlyName = "SkuAvailabilityImpl_Availability_Status", group = "SkuAvailabilityImpl_Description", fieldType= SupportedFieldType.BROADLEAF_ENUMERATION, broadleafEnumeration="org.broadleafcommerce.core.inventory.service.type.AvailabilityStatusType") protected String availabilityStatus; /** The date this product will be available. */ @Column(name = "AVAILABILITY_DATE") @AdminPresentation(friendlyName = "SkuAvailabilityImpl_Available_Date", group = "SkuAvailabilityImpl_Description") protected Date availabilityDate; @Override public Long getId() { return id; } @Override public Long getLocationId() { return locationId; } @Override public Integer getQuantityOnHand() { return quantityOnHand; } @Override public Long getSkuId() { return skuId; } @Override public void setId(Long id) { this.id = id; } @Override public void setLocationId(Long locationId) { this.locationId = locationId; } @Override public void setQuantityOnHand(Integer qoh) { this.quantityOnHand = qoh; } @Override public void setSkuId(Long skuId) { this.skuId = skuId; } @Override public Date getAvailabilityDate() { return availabilityDate; } @Override public void setAvailabilityDate(Date availabilityDate) { this.availabilityDate = availabilityDate; } @Override public AvailabilityStatusType getAvailabilityStatus() { return AvailabilityStatusType.getInstance(availabilityStatus); } @Override public void setAvailabilityStatus(final AvailabilityStatusType availabilityStatus) { if (availabilityStatus != null) { this.availabilityStatus = availabilityStatus.getType(); } } /** * Returns the reserve quantity. Nulls will be treated the same as 0. * Implementations may want to manage a reserve quantity at each location so that the * available quantity for purchases is the quantityOnHand - reserveQuantity. */ @Override public Integer getReserveQuantity() { return reserveQuantity; } /** * Sets the reserve quantity. * Implementations may want to manage a reserve quantity at each location so that the * available quantity for purchases is the quantityOnHand - reserveQuantity. */ @Override public void setReserveQuantity(Integer reserveQuantity) { this.reserveQuantity = reserveQuantity; } /** * Returns the getQuantityOnHand() - getReserveQuantity(). * Preferred implementation is to return null if getQuantityOnHand() is null and to treat * a null in getReserveQuantity() as ZERO. */ @Override public Integer getAvailableQuantity() { if (getQuantityOnHand() == null || getReserveQuantity() == null) { return getQuantityOnHand(); } else { return getQuantityOnHand() - getReserveQuantity(); } } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((locationId == null) ? 0 : locationId.hashCode()); result = prime * result + ((skuId == null) ? 0 : skuId.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; SkuAvailabilityImpl other = (SkuAvailabilityImpl) obj; if (id != null && other.id != null) { return id.equals(other.id); } if (locationId == null) { if (other.locationId != null) return false; } else if (!locationId.equals(other.locationId)) return false; if (skuId == null) { if (other.skuId != null) return false; } else if (!skuId.equals(other.skuId)) return false; return true; } }
1no label
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_inventory_domain_SkuAvailabilityImpl.java
25
abstract class EntrySetView extends AbstractSet<Map.Entry<K, V>> { private transient int size = -1, sizeModCount; @Override public int size() { if (fromStart && toEnd) return m.size(); if (size == -1 || sizeModCount != m.modCount) { sizeModCount = m.modCount; size = 0; Iterator<?> i = iterator(); while (i.hasNext()) { size++; i.next(); } } return size; } @Override public boolean isEmpty() { OMVRBTreeEntryPosition<K, V> n = absLowest(); return n == null || tooHigh(n.getKey()); } @Override public boolean contains(final Object o) { if (!(o instanceof OMVRBTreeEntry)) return false; final OMVRBTreeEntry<K, V> entry = (OMVRBTreeEntry<K, V>) o; final K key = entry.getKey(); if (!inRange(key)) return false; V nodeValue = m.get(key); return nodeValue != null && valEquals(nodeValue, entry.getValue()); } @Override public boolean remove(final Object o) { if (!(o instanceof OMVRBTreeEntry)) return false; final OMVRBTreeEntry<K, V> entry = (OMVRBTreeEntry<K, V>) o; final K key = entry.getKey(); if (!inRange(key)) return false; final OMVRBTreeEntry<K, V> node = m.getEntry(key, PartialSearchMode.NONE); if (node != null && valEquals(node.getValue(), entry.getValue())) { m.deleteEntry(node); return true; } return false; } }
0true
commons_src_main_java_com_orientechnologies_common_collection_OMVRBTree.java
219
PriorityQueue<Passage> passageQueue = new PriorityQueue<Passage>(n, new Comparator<Passage>() { @Override public int compare(Passage left, Passage right) { if (left.score < right.score) { return -1; } else if (left.score > right.score) { return 1; } else { return left.startOffset - right.startOffset; } } });
0true
src_main_java_org_apache_lucene_search_postingshighlight_XPostingsHighlighter.java
181
private static class AppendFunction implements IFunction<String,String> { private String add; private AppendFunction(String add) { this.add = add; } @Override public String apply(String input) { return input+add; } }
0true
hazelcast-client_src_test_java_com_hazelcast_client_atomicreference_ClientAtomicReferenceTest.java
808
@Entity @Table(name = "BLC_OFFER_AUDIT") @Inheritance(strategy=InheritanceType.JOINED) public class OfferAuditImpl implements OfferAudit { public static final long serialVersionUID = 1L; protected static final Log LOG = LogFactory.getLog(OfferAuditImpl.class); @Id @GeneratedValue(generator = "OfferAuditId") @GenericGenerator( name="OfferAuditId", strategy="org.broadleafcommerce.common.persistence.IdOverrideTableGenerator", parameters = { @Parameter(name="segment_value", value="OfferAuditImpl"), @Parameter(name="entity_name", value="org.broadleafcommerce.core.offer.domain.OfferAuditImpl") } ) @Column(name = "OFFER_AUDIT_ID") protected Long id; @Column(name = "OFFER_ID") @Index(name="OFFERAUDIT_OFFER_INDEX", columnNames={"OFFER_ID"}) protected Long offerId; @Column(name = "CUSTOMER_ID") @Index(name="OFFERAUDIT_CUSTOMER_INDEX", columnNames={"CUSTOMER_ID"}) protected Long customerId; @Column(name = "ORDER_ID") @Index(name="OFFERAUDIT_ORDER_INDEX", columnNames={"ORDER_ID"}) protected Long orderId; @Column(name = "REDEEMED_DATE") protected Date redeemedDate; @Override public Long getId() { return id; } @Override public void setId(Long id) { this.id = id; } @Override public Long getOfferId() { return offerId; } @Override public void setOfferId(Long offerId) { this.offerId = offerId; } @Override public Long getOfferCodeId() { throw new UnsupportedOperationException(); } @Override public void setOfferCodeId(Long offerCodeId) { throw new UnsupportedOperationException(); } @Override public Long getCustomerId() { return customerId; } @Override public void setCustomerId(Long customerId) { this.customerId = customerId; } @Override public Long getOrderId() { return orderId; } @Override public void setOrderId(Long orderId) { this.orderId = orderId; } @Override public Date getRedeemedDate() { return redeemedDate; } @Override public void setRedeemedDate(Date redeemedDate) { this.redeemedDate = redeemedDate; } @Override public int hashCode() { try { return new HashCodeBuilder() .append(customerId) .append(offerId) .append(getOfferCodeId()) .append(redeemedDate) .append(orderId) .build(); } catch (UnsupportedOperationException e) { return new HashCodeBuilder() .append(customerId) .append(offerId) .append(redeemedDate) .append(orderId) .build(); } } @Override public boolean equals(Object o) { if (o instanceof OfferAuditImpl) { OfferAuditImpl that = (OfferAuditImpl) o; try { return new EqualsBuilder() .append(this.id, that.id) .append(this.customerId, that.customerId) .append(this.offerId, that.offerId) .append(this.getOfferCodeId(), that.getOfferCodeId()) .append(this.redeemedDate, that.redeemedDate) .append(this.orderId, that.orderId) .build(); } catch (UnsupportedOperationException e) { return new EqualsBuilder() .append(this.id, that.id) .append(this.customerId, that.customerId) .append(this.offerId, that.offerId) .append(this.redeemedDate, that.redeemedDate) .append(this.orderId, that.orderId) .build(); } } return false; } }
1no label
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_offer_domain_OfferAuditImpl.java
504
public class CreateIndexRequestBuilder extends AcknowledgedRequestBuilder<CreateIndexRequest, CreateIndexResponse, CreateIndexRequestBuilder> { public CreateIndexRequestBuilder(IndicesAdminClient indicesClient) { super((InternalIndicesAdminClient) indicesClient, new CreateIndexRequest()); } public CreateIndexRequestBuilder(IndicesAdminClient indicesClient, String index) { super((InternalIndicesAdminClient) indicesClient, new CreateIndexRequest(index)); } /** * Sets the name of the index to be created */ public CreateIndexRequestBuilder setIndex(String index) { request.index(index); return this; } /** * The settings to create the index with. */ public CreateIndexRequestBuilder setSettings(Settings settings) { request.settings(settings); return this; } /** * The settings to create the index with. */ public CreateIndexRequestBuilder setSettings(Settings.Builder settings) { request.settings(settings); return this; } /** * Allows to set the settings using a json builder. */ public CreateIndexRequestBuilder setSettings(XContentBuilder builder) { request.settings(builder); return this; } /** * The settings to create the index with (either json/yaml/properties format) */ public CreateIndexRequestBuilder setSettings(String source) { request.settings(source); return this; } /** * A simplified version of settings that takes key value pairs settings. */ public CreateIndexRequestBuilder setSettings(Object... settings) { request.settings(settings); return this; } /** * The settings to create the index with (either json/yaml/properties format) */ public CreateIndexRequestBuilder setSettings(Map<String, Object> source) { request.settings(source); return this; } /** * Adds mapping that will be added when the index gets created. * * @param type The mapping type * @param source The mapping source */ public CreateIndexRequestBuilder addMapping(String type, String source) { request.mapping(type, source); return this; } /** * The cause for this index creation. */ public CreateIndexRequestBuilder setCause(String cause) { request.cause(cause); return this; } /** * Adds mapping that will be added when the index gets created. * * @param type The mapping type * @param source The mapping source */ public CreateIndexRequestBuilder addMapping(String type, XContentBuilder source) { request.mapping(type, source); return this; } /** * Adds mapping that will be added when the index gets created. * * @param type The mapping type * @param source The mapping source */ public CreateIndexRequestBuilder addMapping(String type, Map<String, Object> source) { request.mapping(type, source); return this; } /** * A specialized simplified mapping source method, takes the form of simple properties definition: * ("field1", "type=string,store=true"). */ public CreateIndexRequestBuilder addMapping(String type, Object... source) { request.mapping(type, source); return this; } /** * Sets the settings and mappings as a single source. */ public CreateIndexRequestBuilder setSource(String source) { request.source(source); return this; } /** * Sets the settings and mappings as a single source. */ public CreateIndexRequestBuilder setSource(BytesReference source) { request.source(source); return this; } /** * Sets the settings and mappings as a single source. */ public CreateIndexRequestBuilder setSource(byte[] source) { request.source(source); return this; } /** * Sets the settings and mappings as a single source. */ public CreateIndexRequestBuilder setSource(byte[] source, int offset, int length) { request.source(source, offset, length); return this; } /** * Sets the settings and mappings as a single source. */ public CreateIndexRequestBuilder setSource(Map<String, Object> source) { request.source(source); return this; } /** * Adds custom metadata to the index to be created. */ public CreateIndexRequestBuilder addCustom(IndexMetaData.Custom custom) { request.custom(custom); return this; } /** * Sets the settings and mappings as a single source. */ public CreateIndexRequestBuilder setSource(XContentBuilder source) { request.source(source); return this; } @Override protected void doExecute(ActionListener<CreateIndexResponse> listener) { ((IndicesAdminClient) client).create(request, listener); } }
0true
src_main_java_org_elasticsearch_action_admin_indices_create_CreateIndexRequestBuilder.java
1,350
private final class FlushTask implements Runnable { private FlushTask() { } @Override public void run() { try { commit(); } catch (Throwable e) { OLogManager.instance().error(this, "Error during WAL background flush", e); } } private void commit() throws IOException { if (pagesCache.isEmpty()) return; if (!flushNewData) return; flushNewData = false; final int maxSize = pagesCache.size(); ODirectMemoryPointer[] pagesToFlush = new ODirectMemoryPointer[maxSize]; long filePointer = nextPositionToFlush; int lastRecordOffset = -1; long lastPageIndex = -1; int flushedPages = 0; Iterator<OWALPage> pageIterator = pagesCache.iterator(); while (flushedPages < maxSize) { final OWALPage page = pageIterator.next(); synchronized (page) { ODirectMemoryPointer dataPointer; if (flushedPages == maxSize - 1) { dataPointer = new ODirectMemoryPointer(OWALPage.PAGE_SIZE); page.getPagePointer().moveData(0, dataPointer, 0, OWALPage.PAGE_SIZE); } else { dataPointer = page.getPagePointer(); } pagesToFlush[flushedPages] = dataPointer; int recordOffset = findLastRecord(page, true); if (recordOffset >= 0) { lastRecordOffset = recordOffset; lastPageIndex = flushedPages; } } flushedPages++; } flushId++; synchronized (rndFile) { rndFile.seek(filePointer); for (int i = 0; i < pagesToFlush.length; i++) { ODirectMemoryPointer dataPointer = pagesToFlush[i]; byte[] pageContent = dataPointer.get(0, OWALPage.PAGE_SIZE); if (i == pagesToFlush.length - 1) dataPointer.free(); OLongSerializer.INSTANCE.serializeNative(flushId, pageContent, OWALPage.FLUSH_ID_OFFSET); OIntegerSerializer.INSTANCE.serializeNative(i, pageContent, OWALPage.FLUSH_INDEX_OFFSET); flushPage(pageContent); filePointer += OWALPage.PAGE_SIZE; } rndFile.getFD().sync(); } long oldPositionToFlush = nextPositionToFlush; nextPositionToFlush = filePointer - OWALPage.PAGE_SIZE; if (lastRecordOffset >= 0) flushedLsn = new OLogSequenceNumber(order, oldPositionToFlush + lastPageIndex * OWALPage.PAGE_SIZE + lastRecordOffset); for (int i = 0; i < flushedPages - 1; i++) { OWALPage page = pagesCache.poll(); page.getPagePointer().free(); } assert !pagesCache.isEmpty(); } private void flushPage(byte[] content) throws IOException { CRC32 crc32 = new CRC32(); crc32.update(content, OIntegerSerializer.INT_SIZE, OWALPage.PAGE_SIZE - OIntegerSerializer.INT_SIZE); OIntegerSerializer.INSTANCE.serializeNative((int) crc32.getValue(), content, 0); rndFile.write(content); } }
1no label
core_src_main_java_com_orientechnologies_orient_core_storage_impl_local_paginated_wal_OWriteAheadLog.java
3,236
public class ShardFieldData extends AbstractIndexShardComponent implements IndexFieldDataCache.Listener { final CounterMetric evictionsMetric = new CounterMetric(); final CounterMetric totalMetric = new CounterMetric(); final ConcurrentMap<String, CounterMetric> perFieldTotals = ConcurrentCollections.newConcurrentMap(); private final CircuitBreakerService breakerService; @Inject public ShardFieldData(ShardId shardId, @IndexSettings Settings indexSettings, CircuitBreakerService breakerService) { super(shardId, indexSettings); this.breakerService = breakerService; } public FieldDataStats stats(String... fields) { ObjectLongOpenHashMap<String> fieldTotals = null; if (fields != null && fields.length > 0) { fieldTotals = new ObjectLongOpenHashMap<String>(); for (Map.Entry<String, CounterMetric> entry : perFieldTotals.entrySet()) { for (String field : fields) { if (Regex.simpleMatch(field, entry.getKey())) { fieldTotals.put(entry.getKey(), entry.getValue().count()); } } } } return new FieldDataStats(totalMetric.count(), evictionsMetric.count(), fieldTotals); } @Override public void onLoad(FieldMapper.Names fieldNames, FieldDataType fieldDataType, AtomicFieldData fieldData) { long sizeInBytes = fieldData.getMemorySizeInBytes(); totalMetric.inc(sizeInBytes); String keyFieldName = fieldNames.indexName(); CounterMetric total = perFieldTotals.get(keyFieldName); if (total != null) { total.inc(sizeInBytes); } else { total = new CounterMetric(); total.inc(sizeInBytes); CounterMetric prev = perFieldTotals.putIfAbsent(keyFieldName, total); if (prev != null) { prev.inc(sizeInBytes); } } } @Override public void onUnload(FieldMapper.Names fieldNames, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes, @Nullable AtomicFieldData fieldData) { if (wasEvicted) { evictionsMetric.inc(); } if (sizeInBytes != -1) { // Since field data is being unloaded (due to expiration or manual // clearing), we also need to decrement the used bytes in the breaker breakerService.getBreaker().addWithoutBreaking(-sizeInBytes); totalMetric.dec(sizeInBytes); String keyFieldName = fieldNames.indexName(); CounterMetric total = perFieldTotals.get(keyFieldName); if (total != null) { total.dec(sizeInBytes); } } } }
1no label
src_main_java_org_elasticsearch_index_fielddata_ShardFieldData.java
25
static final class OrCompletion extends Completion { final CompletableFuture<?> src; final CompletableFuture<?> snd; final CompletableFuture<Object> dst; OrCompletion(CompletableFuture<?> src, CompletableFuture<?> snd, CompletableFuture<Object> dst) { this.src = src; this.snd = snd; this.dst = dst; } public final void run() { final CompletableFuture<?> a; final CompletableFuture<?> b; final CompletableFuture<Object> dst; Object r, t; Throwable ex; if ((dst = this.dst) != null && (((a = this.src) != null && (r = a.result) != null) || ((b = this.snd) != null && (r = b.result) != null)) && compareAndSet(0, 1)) { if (r instanceof AltResult) { ex = ((AltResult)r).ex; t = null; } else { ex = null; t = r; } dst.internalComplete(t, ex); } } private static final long serialVersionUID = 5232453952276885070L; }
0true
src_main_java_jsr166e_CompletableFuture.java
179
public enum ProcessURLAction { PAGE, PRODUCT, CATEGORY, PROCEED, REDIRECT, UNKNOWN }
0true
admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_web_ProcessURLAction.java
22
static abstract class NavigableSubMap<K, V> extends AbstractMap<K, V> implements ONavigableMap<K, V>, java.io.Serializable { /** * The backing map. */ final OMVRBTree<K, V> m; /** * Endpoints are represented as triples (fromStart, lo, loInclusive) and (toEnd, hi, hiInclusive). If fromStart is true, then * the low (absolute) bound is the start of the backing map, and the other values are ignored. Otherwise, if loInclusive is * true, lo is the inclusive bound, else lo is the exclusive bound. Similarly for the upper bound. */ final K lo, hi; final boolean fromStart, toEnd; final boolean loInclusive, hiInclusive; NavigableSubMap(final OMVRBTree<K, V> m, final boolean fromStart, K lo, final boolean loInclusive, final boolean toEnd, K hi, final boolean hiInclusive) { if (!fromStart && !toEnd) { if (m.compare(lo, hi) > 0) throw new IllegalArgumentException("fromKey > toKey"); } else { if (!fromStart) // type check m.compare(lo, lo); if (!toEnd) m.compare(hi, hi); } this.m = m; this.fromStart = fromStart; this.lo = lo; this.loInclusive = loInclusive; this.toEnd = toEnd; this.hi = hi; this.hiInclusive = hiInclusive; } // internal utilities final boolean tooLow(final Object key) { if (!fromStart) { int c = m.compare(key, lo); if (c < 0 || (c == 0 && !loInclusive)) return true; } return false; } final boolean tooHigh(final Object key) { if (!toEnd) { int c = m.compare(key, hi); if (c > 0 || (c == 0 && !hiInclusive)) return true; } return false; } final boolean inRange(final Object key) { return !tooLow(key) && !tooHigh(key); } final boolean inClosedRange(final Object key) { return (fromStart || m.compare(key, lo) >= 0) && (toEnd || m.compare(hi, key) >= 0); } final boolean inRange(final Object key, final boolean inclusive) { return inclusive ? inRange(key) : inClosedRange(key); } /* * Absolute versions of relation operations. Subclasses map to these using like-named "sub" versions that invert senses for * descending maps */ final OMVRBTreeEntryPosition<K, V> absLowest() { OMVRBTreeEntry<K, V> e = (fromStart ? m.getFirstEntry() : (loInclusive ? m.getCeilingEntry(lo, PartialSearchMode.LOWEST_BOUNDARY) : m.getHigherEntry(lo))); return (e == null || tooHigh(e.getKey())) ? null : new OMVRBTreeEntryPosition<K, V>(e); } final OMVRBTreeEntryPosition<K, V> absHighest() { OMVRBTreeEntry<K, V> e = (toEnd ? m.getLastEntry() : (hiInclusive ? m.getFloorEntry(hi, PartialSearchMode.HIGHEST_BOUNDARY) : m.getLowerEntry(hi))); return (e == null || tooLow(e.getKey())) ? null : new OMVRBTreeEntryPosition<K, V>(e); } final OMVRBTreeEntryPosition<K, V> absCeiling(K key) { if (tooLow(key)) return absLowest(); OMVRBTreeEntry<K, V> e = m.getCeilingEntry(key, PartialSearchMode.NONE); return (e == null || tooHigh(e.getKey())) ? null : new OMVRBTreeEntryPosition<K, V>(e); } final OMVRBTreeEntryPosition<K, V> absHigher(K key) { if (tooLow(key)) return absLowest(); OMVRBTreeEntry<K, V> e = m.getHigherEntry(key); return (e == null || tooHigh(e.getKey())) ? null : new OMVRBTreeEntryPosition<K, V>(e); } final OMVRBTreeEntryPosition<K, V> absFloor(K key) { if (tooHigh(key)) return absHighest(); OMVRBTreeEntry<K, V> e = m.getFloorEntry(key, PartialSearchMode.NONE); return (e == null || tooLow(e.getKey())) ? null : new OMVRBTreeEntryPosition<K, V>(e); } final OMVRBTreeEntryPosition<K, V> absLower(K key) { if (tooHigh(key)) return absHighest(); OMVRBTreeEntry<K, V> e = m.getLowerEntry(key); return (e == null || tooLow(e.getKey())) ? null : new OMVRBTreeEntryPosition<K, V>(e); } /** Returns the absolute high fence for ascending traversal */ final OMVRBTreeEntryPosition<K, V> absHighFence() { return (toEnd ? null : new OMVRBTreeEntryPosition<K, V>(hiInclusive ? m.getHigherEntry(hi) : m.getCeilingEntry(hi, PartialSearchMode.LOWEST_BOUNDARY))); } /** Return the absolute low fence for descending traversal */ final OMVRBTreeEntryPosition<K, V> absLowFence() { return (fromStart ? null : new OMVRBTreeEntryPosition<K, V>(loInclusive ? m.getLowerEntry(lo) : m.getFloorEntry(lo, PartialSearchMode.HIGHEST_BOUNDARY))); } // Abstract methods defined in ascending vs descending classes // These relay to the appropriate absolute versions abstract OMVRBTreeEntry<K, V> subLowest(); abstract OMVRBTreeEntry<K, V> subHighest(); abstract OMVRBTreeEntry<K, V> subCeiling(K key); abstract OMVRBTreeEntry<K, V> subHigher(K key); abstract OMVRBTreeEntry<K, V> subFloor(K key); abstract OMVRBTreeEntry<K, V> subLower(K key); /** Returns ascending iterator from the perspective of this submap */ abstract OLazyIterator<K> keyIterator(); /** Returns descending iterator from the perspective of this submap */ abstract OLazyIterator<K> descendingKeyIterator(); // public methods @Override public boolean isEmpty() { return (fromStart && toEnd) ? m.isEmpty() : entrySet().isEmpty(); } @Override public int size() { return (fromStart && toEnd) ? m.size() : entrySet().size(); } @Override public final boolean containsKey(Object key) { return inRange(key) && m.containsKey(key); } @Override public final V put(K key, V value) { if (!inRange(key)) throw new IllegalArgumentException("key out of range"); return m.put(key, value); } @Override public final V get(Object key) { return !inRange(key) ? null : m.get(key); } @Override public final V remove(Object key) { return !inRange(key) ? null : m.remove(key); } public final Map.Entry<K, V> ceilingEntry(K key) { return exportEntry(subCeiling(key)); } public final K ceilingKey(K key) { return keyOrNull(subCeiling(key)); } public final Map.Entry<K, V> higherEntry(K key) { return exportEntry(subHigher(key)); } public final K higherKey(K key) { return keyOrNull(subHigher(key)); } public final Map.Entry<K, V> floorEntry(K key) { return exportEntry(subFloor(key)); } public final K floorKey(K key) { return keyOrNull(subFloor(key)); } public final Map.Entry<K, V> lowerEntry(K key) { return exportEntry(subLower(key)); } public final K lowerKey(K key) { return keyOrNull(subLower(key)); } public final K firstKey() { return key(subLowest()); } public final K lastKey() { return key(subHighest()); } public final Map.Entry<K, V> firstEntry() { return exportEntry(subLowest()); } public final Map.Entry<K, V> lastEntry() { return exportEntry(subHighest()); } public final Map.Entry<K, V> pollFirstEntry() { OMVRBTreeEntry<K, V> e = subLowest(); Map.Entry<K, V> result = exportEntry(e); if (e != null) m.deleteEntry(e); return result; } public final Map.Entry<K, V> pollLastEntry() { OMVRBTreeEntry<K, V> e = subHighest(); Map.Entry<K, V> result = exportEntry(e); if (e != null) m.deleteEntry(e); return result; } // Views transient ONavigableMap<K, V> descendingMapView = null; transient EntrySetView entrySetView = null; transient KeySet<K> navigableKeySetView = null; @SuppressWarnings("rawtypes") public final ONavigableSet<K> navigableKeySet() { KeySet<K> nksv = navigableKeySetView; return (nksv != null) ? nksv : (navigableKeySetView = new OMVRBTree.KeySet(this)); } @Override public final Set<K> keySet() { return navigableKeySet(); } public ONavigableSet<K> descendingKeySet() { return descendingMap().navigableKeySet(); } public final SortedMap<K, V> subMap(final K fromKey, final K toKey) { return subMap(fromKey, true, toKey, false); } public final SortedMap<K, V> headMap(final K toKey) { return headMap(toKey, false); } public final SortedMap<K, V> tailMap(final K fromKey) { return tailMap(fromKey, true); } // View classes abstract class EntrySetView extends AbstractSet<Map.Entry<K, V>> { private transient int size = -1, sizeModCount; @Override public int size() { if (fromStart && toEnd) return m.size(); if (size == -1 || sizeModCount != m.modCount) { sizeModCount = m.modCount; size = 0; Iterator<?> i = iterator(); while (i.hasNext()) { size++; i.next(); } } return size; } @Override public boolean isEmpty() { OMVRBTreeEntryPosition<K, V> n = absLowest(); return n == null || tooHigh(n.getKey()); } @Override public boolean contains(final Object o) { if (!(o instanceof OMVRBTreeEntry)) return false; final OMVRBTreeEntry<K, V> entry = (OMVRBTreeEntry<K, V>) o; final K key = entry.getKey(); if (!inRange(key)) return false; V nodeValue = m.get(key); return nodeValue != null && valEquals(nodeValue, entry.getValue()); } @Override public boolean remove(final Object o) { if (!(o instanceof OMVRBTreeEntry)) return false; final OMVRBTreeEntry<K, V> entry = (OMVRBTreeEntry<K, V>) o; final K key = entry.getKey(); if (!inRange(key)) return false; final OMVRBTreeEntry<K, V> node = m.getEntry(key, PartialSearchMode.NONE); if (node != null && valEquals(node.getValue(), entry.getValue())) { m.deleteEntry(node); return true; } return false; } } /** * Iterators for SubMaps */ abstract class SubMapIterator<T> implements OLazyIterator<T> { OMVRBTreeEntryPosition<K, V> lastReturned; OMVRBTreeEntryPosition<K, V> next; final K fenceKey; int expectedModCount; SubMapIterator(final OMVRBTreeEntryPosition<K, V> first, final OMVRBTreeEntryPosition<K, V> fence) { expectedModCount = m.modCount; lastReturned = null; next = first; fenceKey = fence == null ? null : fence.getKey(); } public final boolean hasNext() { if (next != null) { final K k = next.getKey(); return k != fenceKey && !k.equals(fenceKey); } return false; } final OMVRBTreeEntryPosition<K, V> nextEntry() { final OMVRBTreeEntryPosition<K, V> e; if (next != null) e = new OMVRBTreeEntryPosition<K, V>(next); else e = null; if (e == null || e.entry == null) throw new NoSuchElementException(); final K k = e.getKey(); if (k == fenceKey || k.equals(fenceKey)) throw new NoSuchElementException(); if (m.modCount != expectedModCount) throw new ConcurrentModificationException(); next.assign(OMVRBTree.next(e)); lastReturned = e; return e; } final OMVRBTreeEntryPosition<K, V> prevEntry() { final OMVRBTreeEntryPosition<K, V> e; if (next != null) e = new OMVRBTreeEntryPosition<K, V>(next); else e = null; if (e == null || e.entry == null) throw new NoSuchElementException(); final K k = e.getKey(); if (k == fenceKey || k.equals(fenceKey)) throw new NoSuchElementException(); if (m.modCount != expectedModCount) throw new ConcurrentModificationException(); next.assign(OMVRBTree.previous(e)); lastReturned = e; return e; } final public T update(final T iValue) { if (lastReturned == null) throw new IllegalStateException(); if (m.modCount != expectedModCount) throw new ConcurrentModificationException(); return (T) lastReturned.entry.setValue((V) iValue); } final void removeAscending() { if (lastReturned == null) throw new IllegalStateException(); if (m.modCount != expectedModCount) throw new ConcurrentModificationException(); // deleted entries are replaced by their successors if (lastReturned.entry.getLeft() != null && lastReturned.entry.getRight() != null) next = lastReturned; m.deleteEntry(lastReturned.entry); lastReturned = null; expectedModCount = m.modCount; } final void removeDescending() { if (lastReturned == null) throw new IllegalStateException(); if (m.modCount != expectedModCount) throw new ConcurrentModificationException(); m.deleteEntry(lastReturned.entry); lastReturned = null; expectedModCount = m.modCount; } } final class SubMapEntryIterator extends SubMapIterator<Map.Entry<K, V>> { SubMapEntryIterator(final OMVRBTreeEntryPosition<K, V> first, final OMVRBTreeEntryPosition<K, V> fence) { super(first, fence); } public Map.Entry<K, V> next() { final Map.Entry<K, V> e = OMVRBTree.exportEntry(next); nextEntry(); return e; } public void remove() { removeAscending(); } } final class SubMapKeyIterator extends SubMapIterator<K> { SubMapKeyIterator(final OMVRBTreeEntryPosition<K, V> first, final OMVRBTreeEntryPosition<K, V> fence) { super(first, fence); } public K next() { return nextEntry().getKey(); } public void remove() { removeAscending(); } } final class DescendingSubMapEntryIterator extends SubMapIterator<Map.Entry<K, V>> { DescendingSubMapEntryIterator(final OMVRBTreeEntryPosition<K, V> last, final OMVRBTreeEntryPosition<K, V> fence) { super(last, fence); } public Map.Entry<K, V> next() { final Map.Entry<K, V> e = OMVRBTree.exportEntry(next); prevEntry(); return e; } public void remove() { removeDescending(); } } final class DescendingSubMapKeyIterator extends SubMapIterator<K> { DescendingSubMapKeyIterator(final OMVRBTreeEntryPosition<K, V> last, final OMVRBTreeEntryPosition<K, V> fence) { super(last, fence); } public K next() { return prevEntry().getKey(); } public void remove() { removeDescending(); } } }
0true
commons_src_main_java_com_orientechnologies_common_collection_OMVRBTree.java
58
class AddInitializerProposal extends InitializerProposal { private AddInitializerProposal(TypedDeclaration dec, int offset, int length, TextChange change) { super("Add initializer to '" + dec.getName() + "'", change, dec, dec.getType(), new Region(offset, length), MINOR_CHANGE, -1, null); } private static void addInitializerProposal(Tree.CompilationUnit cu, Collection<ICompletionProposal> proposals, IFile file, Tree.TypedDeclaration decNode, Tree.SpecifierOrInitializerExpression sie) { MethodOrValue dec = (MethodOrValue) decNode.getDeclarationModel(); if (dec==null) return; if (dec.getInitializerParameter()==null && !dec.isFormal()) { TextChange change = new TextFileChange("Add Initializer", file); int offset = decNode.getStopIndex(); String defaultValue = defaultValue(cu.getUnit(), dec.getType()); String def; int selectionOffset; if (decNode instanceof Tree.MethodDeclaration) { def = " => " + defaultValue; selectionOffset = offset + 4; } else { def = " = " + defaultValue; selectionOffset = offset + 3; } change.setEdit(new InsertEdit(offset, def)); proposals.add(new AddInitializerProposal(dec, selectionOffset, defaultValue.length(), change)); } } static void addInitializerProposals(Collection<ICompletionProposal> proposals, IFile file, Tree.CompilationUnit cu, Node node) { if (node instanceof Tree.AttributeDeclaration) { Tree.AttributeDeclaration attDecNode = (Tree.AttributeDeclaration) node; Tree.SpecifierOrInitializerExpression sie = attDecNode.getSpecifierOrInitializerExpression(); if (!(sie instanceof Tree.LazySpecifierExpression)) { addInitializerProposal(cu, proposals, file, attDecNode, sie); } } if (node instanceof Tree.MethodDeclaration) { Tree.MethodDeclaration methDecNode = (Tree.MethodDeclaration) node; Tree.SpecifierExpression sie = methDecNode.getSpecifierExpression(); addInitializerProposal(cu, proposals, file, methDecNode, sie); } } }
0true
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_correct_AddInitializerProposal.java
257
public interface EmailTrackingClicks extends Serializable { /** * @return the emailId */ public abstract Long getId(); /** * @param id the i to set */ public abstract void setId(Long id); /** * @return the dateClicked */ public abstract Date getDateClicked(); /** * @param dateClicked the dateClicked to set */ public abstract void setDateClicked(Date dateClicked); /** * @return the destinationUri */ public abstract String getDestinationUri(); /** * @param destinationUri the destinationUri to set */ public abstract void setDestinationUri(String destinationUri); /** * @return the queryString */ public abstract String getQueryString(); /** * @param queryString the queryString to set */ public abstract void setQueryString(String queryString); /** * @return the emailTracking */ public abstract EmailTracking getEmailTracking(); /** * @param emailTracking the emailTracking to set */ public abstract void setEmailTracking(EmailTracking emailTracking); public abstract String getCustomerId(); /** * @param customerId the customer to set */ public abstract void setCustomerId(String customerId); }
0true
common_src_main_java_org_broadleafcommerce_common_email_domain_EmailTrackingClicks.java
98
final Thread thread = new Thread() { public void run() { try { Thread.sleep(10); } catch (InterruptedException e) { e.printStackTrace(); } hz1.getLifecycleService().terminate(); } };
0true
hazelcast-client_src_test_java_com_hazelcast_client_ClientIssueTest.java
60
class AddParameterProposal extends InitializerProposal { private AddParameterProposal(Declaration d, Declaration dec, ProducedType type, int offset, int len, TextChange change, int exitPos, CeylonEditor editor) { super("Add '" + d.getName() + "' to parameter list of '" + dec.getName() + "'", change, dec, type, new Region(offset, len), ADD_CORR, exitPos, editor); } private static void addParameterProposal(Tree.CompilationUnit cu, Collection<ICompletionProposal> proposals, IFile file, Tree.TypedDeclaration decNode, Tree.SpecifierOrInitializerExpression sie, Node node, CeylonEditor editor) { MethodOrValue dec = (MethodOrValue) decNode.getDeclarationModel(); if (dec==null) return; if (dec.getInitializerParameter()==null && !dec.isFormal()) { TextChange change = new TextFileChange("Add Parameter", file); change.setEdit(new MultiTextEdit()); IDocument doc = EditorUtil.getDocument(change); //TODO: copy/pasted from SplitDeclarationProposal String params = null; if (decNode instanceof Tree.MethodDeclaration) { List<ParameterList> pls = ((Tree.MethodDeclaration) decNode).getParameterLists(); if (pls.isEmpty()) { return; } else { Integer start = pls.get(0).getStartIndex(); Integer end = pls.get(pls.size()-1).getStopIndex(); try { params = doc.get(start, end-start+1); } catch (BadLocationException e) { e.printStackTrace(); return; } } } Tree.Declaration container = findDeclarationWithBody(cu, decNode); Tree.ParameterList pl; if (container instanceof Tree.ClassDefinition) { pl = ((Tree.ClassDefinition) container).getParameterList(); if (pl==null) { return; } } else if (container instanceof Tree.MethodDefinition) { List<Tree.ParameterList> pls = ((Tree.MethodDefinition) container).getParameterLists(); if (pls.isEmpty()) { return; } pl = pls.get(0); } else { return; } String def; int len; if (sie==null) { String defaultValue = defaultValue(cu.getUnit(), dec.getType()); len = defaultValue.length(); if (decNode instanceof Tree.MethodDeclaration) { def = " => " + defaultValue; } else { def = " = " + defaultValue; } } else { len = 0; int start; try { def = doc.get(sie.getStartIndex(), sie.getStopIndex()-sie.getStartIndex()+1); start = sie.getStartIndex(); if (start>0 && doc.get(start-1,1).equals(" ")) { start--; def = " " + def; } } catch (BadLocationException e) { e.printStackTrace(); return; } change.addEdit(new DeleteEdit(start, sie.getStopIndex()-start+1)); } if (params!=null) { def = " = " + params + def; } String param = (pl.getParameters().isEmpty() ? "" : ", ") + dec.getName() + def; Integer offset = pl.getStopIndex(); change.addEdit(new InsertEdit(offset, param)); Tree.Type type = decNode.getType(); int shift=0; ProducedType paramType; if (type instanceof Tree.LocalModifier) { Integer typeOffset = type.getStartIndex(); paramType = type.getTypeModel(); String explicitType; if (paramType==null) { explicitType = "Object"; paramType = type.getUnit().getObjectDeclaration().getType(); } else { explicitType = paramType.getProducedTypeName(); HashSet<Declaration> decs = new HashSet<Declaration>(); importType(decs, paramType, cu); shift = applyImports(change, decs, cu, doc); } change.addEdit(new ReplaceEdit(typeOffset, type.getText().length(), explicitType)); } else { paramType = type.getTypeModel(); } int exitPos = node.getStopIndex()+1; proposals.add(new AddParameterProposal(dec, container.getDeclarationModel(), paramType, offset+param.length()+shift-len, len, change, exitPos, editor)); } } static void addParameterProposals(Collection<ICompletionProposal> proposals, IFile file, Tree.CompilationUnit cu, Node node, CeylonEditor editor) { if (node instanceof Tree.AttributeDeclaration) { Tree.AttributeDeclaration attDecNode = (Tree.AttributeDeclaration) node; Tree.SpecifierOrInitializerExpression sie = attDecNode.getSpecifierOrInitializerExpression(); if (!(sie instanceof Tree.LazySpecifierExpression)) { addParameterProposal(cu, proposals, file, attDecNode, sie, node, editor); } } if (node instanceof Tree.MethodDeclaration) { Tree.MethodDeclaration methDecNode = (Tree.MethodDeclaration) node; Tree.SpecifierExpression sie = methDecNode.getSpecifierExpression(); addParameterProposal(cu, proposals, file, methDecNode, sie, node, editor); } } }
0true
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_correct_AddParameterProposal.java
253
static final class RateLimitedIndexOutput extends BufferedIndexOutput { private final IndexOutput delegate; private final BufferedIndexOutput bufferedDelegate; private final RateLimiter rateLimiter; private final StoreRateLimiting.Listener rateListener; RateLimitedIndexOutput(final RateLimiter rateLimiter, final StoreRateLimiting.Listener rateListener, final IndexOutput delegate) { super(delegate instanceof BufferedIndexOutput ? ((BufferedIndexOutput) delegate).getBufferSize() : BufferedIndexOutput.DEFAULT_BUFFER_SIZE); if (delegate instanceof BufferedIndexOutput) { bufferedDelegate = (BufferedIndexOutput) delegate; this.delegate = delegate; } else { this.delegate = delegate; bufferedDelegate = null; } this.rateLimiter = rateLimiter; this.rateListener = rateListener; } @Override protected void flushBuffer(byte[] b, int offset, int len) throws IOException { rateListener.onPause(rateLimiter.pause(len)); if (bufferedDelegate != null) { bufferedDelegate.flushBuffer(b, offset, len); } else { delegate.writeBytes(b, offset, len); } } @Override public long length() throws IOException { return delegate.length(); } @Override public void seek(long pos) throws IOException { flush(); delegate.seek(pos); } @Override public void flush() throws IOException { try { super.flush(); } finally { delegate.flush(); } } @Override public void setLength(long length) throws IOException { delegate.setLength(length); } @Override public void close() throws IOException { try { super.close(); } finally { delegate.close(); } } }
1no label
src_main_java_org_apache_lucene_store_RateLimitedFSDirectory.java
407
EventHandler<PortableItemEvent> eventHandler = new EventHandler<PortableItemEvent>() { public void handle(PortableItemEvent portableItemEvent) { E item = includeValue ? (E) getContext().getSerializationService().toObject(portableItemEvent.getItem()) : null; Member member = getContext().getClusterService().getMember(portableItemEvent.getUuid()); ItemEvent<E> itemEvent = new ItemEvent<E>(getName(), portableItemEvent.getEventType(), item, member); if (portableItemEvent.getEventType() == ItemEventType.ADDED) { listener.itemAdded(itemEvent); } else { listener.itemRemoved(itemEvent); } } @Override public void onListenerRegister() { } };
1no label
hazelcast-client_src_main_java_com_hazelcast_client_proxy_AbstractClientCollectionProxy.java
101
public class OException extends RuntimeException { private static final long serialVersionUID = 3882447822497861424L; public OException() { } public OException(final String message) { super(message); } public OException(final Throwable cause) { super(cause); } public OException(final String message, final Throwable cause) { super(message, cause); } }
0true
commons_src_main_java_com_orientechnologies_common_exception_OException.java
328
public class MergeManagerSetupException extends Exception { private static final long serialVersionUID = 1L; public MergeManagerSetupException() { super(); } public MergeManagerSetupException(String arg0, Throwable arg1) { super(arg0, arg1); } public MergeManagerSetupException(String arg0) { super(arg0); } public MergeManagerSetupException(Throwable arg0) { super(arg0); } }
0true
common_src_main_java_org_broadleafcommerce_common_extensibility_context_merge_exceptions_MergeManagerSetupException.java
34
public class KeywordCompletionProposal extends CompletionProposal { public static final Set<String> expressionKeywords = new LinkedHashSet<String>(Arrays.asList( "object", "value", "void", "function", "this", "outer", "super", "of", "in", "else", "for", "if", "is", "exists", "nonempty", "then", "let")); public static final Set<String> postfixKeywords = new LinkedHashSet<String>(Arrays.asList( "of", "in", "else", "exists", "nonempty", "then")); public static final Set<String> conditionKeywords = new LinkedHashSet<String>(Arrays.asList("assert", "let", "while", "for", "if", "switch", "case", "catch")); static void addKeywordProposals(CeylonParseController cpc, int offset, String prefix, List<ICompletionProposal> result, Node node, OccurrenceLocation ol, boolean postfix, int previousTokenType) { if (isModuleDescriptor(cpc) && ol!=META && (ol==null||!ol.reference)) { //outside of backtick quotes, the only keyword allowed //in a module descriptor is "import" if ("import".startsWith(prefix)) { addKeywordProposal(offset, prefix, result, "import"); } } else if (!prefix.isEmpty() && ol!=CATCH && ol!=CASE) { //TODO: this filters out satisfies/extends in an object named arg Set<String> keywords; if (ol==EXPRESSION) { keywords = postfix ? postfixKeywords : expressionKeywords; } else { keywords = Escaping.KEYWORDS; } for (String keyword: keywords) { if (keyword.startsWith(prefix)) { addKeywordProposal(offset, prefix, result, keyword); } } } else if (ol==CASE && previousTokenType==CeylonLexer.LPAREN) { addKeywordProposal(offset, prefix, result, "is"); } else if (!prefix.isEmpty() && ol==CASE) { if ("case".startsWith(prefix)) { addKeywordProposal(offset, prefix, result, "case"); } } else if (ol==null && node instanceof Tree.ConditionList && previousTokenType==CeylonLexer.LPAREN) { addKeywordProposal(offset, prefix, result, "exists"); addKeywordProposal(offset, prefix, result, "nonempty"); } } KeywordCompletionProposal(int offset, String prefix, String keyword) { super(offset, prefix, null, keyword, conditionKeywords.contains(keyword) ? keyword+" ()" : keyword); } @Override public Point getSelection(IDocument document) { int close = text.indexOf(')'); if (close>0) { return new Point(offset + close - prefix.length(), 0); } else { return super.getSelection(document); } } @Override public int length(IDocument document) { return prefix.length(); } @Override public Image getImage() { return getDecoratedImage(CEYLON_LITERAL, 0, false); } @Override public StyledString getStyledDisplayString() { return new StyledString(getDisplayString(), Highlights.KW_STYLER); } static void addKeywordProposal(int offset, String prefix, List<ICompletionProposal> result, String keyword) { result.add(new KeywordCompletionProposal(offset, prefix, keyword)); } }
0true
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_complete_KeywordCompletionProposal.java
4,249
static class Location { public final long translogId; public final long translogLocation; public final int size; public Location(long translogId, long translogLocation, int size) { this.translogId = translogId; this.translogLocation = translogLocation; this.size = size; } }
1no label
src_main_java_org_elasticsearch_index_translog_Translog.java
8
public class HBaseStorageSetup { private static final Logger log = LoggerFactory.getLogger(HBaseStorageSetup.class); // hbase config for testing public static final String HBASE_PARENT_DIR_PROP = "test.hbase.parentdir"; private static final Pattern HBASE_SUPPORTED_VERSION_PATTERN = Pattern.compile("^0\\.(9[468])\\..*"); private static final String HBASE_PARENT_DIR; private static final String HBASE_TARGET_VERSION = VersionInfo.getVersion(); static { String parentDir = ".."; String tmp = System.getProperty(HBASE_PARENT_DIR_PROP); if (null != tmp) { parentDir = tmp; } HBASE_PARENT_DIR = parentDir; } private static final String HBASE_STAT_FILE = "/tmp/titan-hbase-test-daemon.stat"; private volatile static HBaseStatus HBASE = null; public static String getScriptDirForHBaseVersion(String hv) { return getDirForHBaseVersion(hv, "bin"); } public static String getConfDirForHBaseVersion(String hv) { return getDirForHBaseVersion(hv, "conf"); } public static String getDirForHBaseVersion(String hv, String lastSubdir) { Matcher m = HBASE_SUPPORTED_VERSION_PATTERN.matcher(hv); if (m.matches()) { String minor = m.group(1); String result = String.format("%s%stitan-hbase-0%s/%s/", HBASE_PARENT_DIR, File.separator, minor, lastSubdir); log.debug("Built {} path for HBase version {}: {}", lastSubdir, hv, result); return result; } else { throw new RuntimeException("Unsupported HBase test version " + hv + " does not match pattern " + HBASE_SUPPORTED_VERSION_PATTERN); } } public static ModifiableConfiguration getHBaseConfiguration() { ModifiableConfiguration config = GraphDatabaseConfiguration.buildConfiguration(); config.set(GraphDatabaseConfiguration.STORAGE_BACKEND, "hbase"); config.set(GraphDatabaseConfiguration.CLUSTER_PARTITION, true); config.set(GraphDatabaseConfiguration.TIMESTAMP_PROVIDER, HBaseStoreManager.PREFERRED_TIMESTAMPS); config.set(SimpleBulkPlacementStrategy.CONCURRENT_PARTITIONS, 1); // config.set(GraphDatabaseConfiguration.STORAGE_NS.getName()+"."+HBaseStoreManager.HBASE_CONFIGURATION_NAMESPACE+ // ".hbase.zookeeper.quorum","localhost"); // config.set(GraphDatabaseConfiguration.STORAGE_NS.getName()+"."+HBaseStoreManager.HBASE_CONFIGURATION_NAMESPACE+ // "hbase.zookeeper.property.clientPort",2181); return config; } public static WriteConfiguration getHBaseGraphConfiguration() { return getHBaseConfiguration().getConfiguration(); } /** * Starts the HBase version described by {@link #HBASE_TARGET_VERSION} * * @return a status object describing a successfully-started HBase daemon * @throws IOException * passed-through * @throws RuntimeException * if starting HBase fails for any other reason */ public synchronized static HBaseStatus startHBase() throws IOException { if (HBASE != null) { log.info("HBase already started"); return HBASE; } killIfRunning(); deleteData(); log.info("Starting HBase"); String scriptPath = getScriptDirForHBaseVersion(HBASE_TARGET_VERSION) + "/hbase-daemon.sh"; runCommand(scriptPath, "--config", getConfDirForHBaseVersion(HBASE_TARGET_VERSION), "start", "master"); HBASE = HBaseStatus.write(HBASE_STAT_FILE, HBASE_TARGET_VERSION); registerKillerHook(HBASE); return HBASE; } /** * Check whether {@link #HBASE_STAT_FILE} describes an HBase daemon. If so, * kill it. Otherwise, do nothing. */ public synchronized static void killIfRunning() { HBaseStatus stat = HBaseStatus.read(HBASE_STAT_FILE); if (null == stat) { log.info("HBase is not running"); return; } shutdownHBase(stat); } /** * Delete HBase data under the current working directory. */ private synchronized static void deleteData() { try { // please keep in sync with HBASE_CONFIG_DIR/hbase-site.xml, reading HBase XML config is huge pain. File hbaseRoot = new File("./target/hbase-root"); File zookeeperDataDir = new File("./target/zk-data"); if (hbaseRoot.exists()) { log.info("Deleting {}", hbaseRoot); FileUtils.deleteDirectory(hbaseRoot); } if (zookeeperDataDir.exists()) { log.info("Deleting {}", zookeeperDataDir); FileUtils.deleteDirectory(zookeeperDataDir); } } catch (IOException e) { throw new RuntimeException("Failed to delete old HBase test data directories", e); } } /** * Register a shutdown hook with the JVM that attempts to kill the external * HBase daemon * * @param stat * the HBase daemon to kill */ private static void registerKillerHook(final HBaseStatus stat) { Runtime.getRuntime().addShutdownHook(new Thread() { public void run() { shutdownHBase(stat); } }); } /** * Runs the {@code hbase-daemon.sh stop master} script corresponding to the * HBase version described by the parameter. * * @param stat * the running HBase daemon to stop */ private synchronized static void shutdownHBase(HBaseStatus stat) { log.info("Shutting down HBase..."); // First try graceful shutdown through the script... runCommand(stat.getScriptDir() + "/hbase-daemon.sh", "--config", stat.getConfDir(), "stop", "master"); log.info("Shutdown HBase"); stat.getFile().delete(); log.info("Deleted {}", stat.getFile()); HBASE = null; } /** * Run the parameter as an external process. Returns if the command starts * without throwing an exception and returns exit status 0. Throws an * exception if there's any problem invoking the command or if it does not * return zero exit status. * * Blocks indefinitely while waiting for the command to complete. * * @param argv * passed directly to {@link ProcessBuilder}'s constructor */ private static void runCommand(String... argv) { final String cmd = Joiner.on(" ").join(argv); log.info("Executing {}", cmd); ProcessBuilder pb = new ProcessBuilder(argv); pb.redirectErrorStream(true); Process startup; try { startup = pb.start(); } catch (IOException e) { throw new RuntimeException(e); } StreamLogger sl = new StreamLogger(startup.getInputStream()); sl.setDaemon(true); sl.start(); try { int exitcode = startup.waitFor(); // wait for script to return if (0 == exitcode) { log.info("Command \"{}\" exited with status 0", cmd); } else { throw new RuntimeException("Command \"" + cmd + "\" exited with status " + exitcode); } } catch (InterruptedException e) { throw new RuntimeException(e); } try { sl.join(1000L); } catch (InterruptedException e) { log.warn("Failed to cleanup stdin handler thread after running command \"{}\"", cmd, e); } } /* * This could be retired in favor of ProcessBuilder.Redirect when we move to * source level 1.7. */ private static class StreamLogger extends Thread { private final BufferedReader reader; private static final Logger log = LoggerFactory.getLogger(StreamLogger.class); private StreamLogger(InputStream is) { this.reader = new BufferedReader(new InputStreamReader(is)); } @Override public void run() { String line; try { while (null != (line = reader.readLine())) { log.info("> {}", line); if (Thread.currentThread().isInterrupted()) { break; } } log.info("End of stream."); } catch (IOException e) { log.error("Unexpected IOException while reading stream {}", reader, e); } } } }
0true
titan-hbase-parent_titan-hbase-core_src_test_java_com_thinkaurelius_titan_HBaseStorageSetup.java
453
public class TransportClusterStatsAction extends TransportNodesOperationAction<ClusterStatsRequest, ClusterStatsResponse, TransportClusterStatsAction.ClusterStatsNodeRequest, ClusterStatsNodeResponse> { private static final CommonStatsFlags SHARD_STATS_FLAGS = new CommonStatsFlags(CommonStatsFlags.Flag.Docs, CommonStatsFlags.Flag.Store, CommonStatsFlags.Flag.FieldData, CommonStatsFlags.Flag.FilterCache, CommonStatsFlags.Flag.IdCache, CommonStatsFlags.Flag.Completion, CommonStatsFlags.Flag.Segments, CommonStatsFlags.Flag.Percolate); private final NodeService nodeService; private final IndicesService indicesService; @Inject public TransportClusterStatsAction(Settings settings, ClusterName clusterName, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, NodeService nodeService, IndicesService indicesService) { super(settings, clusterName, threadPool, clusterService, transportService); this.nodeService = nodeService; this.indicesService = indicesService; } @Override protected String executor() { return ThreadPool.Names.MANAGEMENT; } @Override protected String transportAction() { return ClusterStatsAction.NAME; } @Override protected ClusterStatsResponse newResponse(ClusterStatsRequest clusterStatsRequest, AtomicReferenceArray responses) { final List<ClusterStatsNodeResponse> nodeStats = new ArrayList<ClusterStatsNodeResponse>(responses.length()); for (int i = 0; i < responses.length(); i++) { Object resp = responses.get(i); if (resp instanceof ClusterStatsNodeResponse) { nodeStats.add((ClusterStatsNodeResponse) resp); } } return new ClusterStatsResponse(System.currentTimeMillis(), clusterName, clusterService.state().metaData().uuid(), nodeStats.toArray(new ClusterStatsNodeResponse[nodeStats.size()])); } @Override protected ClusterStatsRequest newRequest() { return new ClusterStatsRequest(); } @Override protected ClusterStatsNodeRequest newNodeRequest() { return new ClusterStatsNodeRequest(); } @Override protected ClusterStatsNodeRequest newNodeRequest(String nodeId, ClusterStatsRequest request) { return new ClusterStatsNodeRequest(nodeId, request); } @Override protected ClusterStatsNodeResponse newNodeResponse() { return new ClusterStatsNodeResponse(); } @Override protected ClusterStatsNodeResponse nodeOperation(ClusterStatsNodeRequest nodeRequest) throws ElasticsearchException { NodeInfo nodeInfo = nodeService.info(false, true, false, true, false, false, true, false, true); NodeStats nodeStats = nodeService.stats(CommonStatsFlags.NONE, false, true, true, false, false, true, false, false, false); List<ShardStats> shardsStats = new ArrayList<ShardStats>(); for (String index : indicesService.indices()) { IndexService indexService = indicesService.indexService(index); if (indexService == null) { continue; } for (IndexShard indexShard : indexService) { if (indexShard.routingEntry().active()) { // only report on fully started shards shardsStats.add(new ShardStats(indexShard, SHARD_STATS_FLAGS)); } } } ClusterHealthStatus clusterStatus = null; if (clusterService.state().nodes().localNodeMaster()) { // populate cluster status clusterStatus = ClusterHealthStatus.GREEN; for (IndexRoutingTable indexRoutingTable : clusterService.state().routingTable()) { IndexMetaData indexMetaData = clusterService.state().metaData().index(indexRoutingTable.index()); if (indexRoutingTable == null) { continue; } ClusterIndexHealth indexHealth = new ClusterIndexHealth(indexMetaData, indexRoutingTable); switch (indexHealth.getStatus()) { case RED: clusterStatus = ClusterHealthStatus.RED; break; case YELLOW: if (clusterStatus != ClusterHealthStatus.RED) { clusterStatus = ClusterHealthStatus.YELLOW; } break; } } } return new ClusterStatsNodeResponse(nodeInfo.getNode(), clusterStatus, nodeInfo, nodeStats, shardsStats.toArray(new ShardStats[shardsStats.size()])); } @Override protected boolean accumulateExceptions() { return false; } static class ClusterStatsNodeRequest extends NodeOperationRequest { ClusterStatsRequest request; ClusterStatsNodeRequest() { } ClusterStatsNodeRequest(String nodeId, ClusterStatsRequest request) { super(request, nodeId); this.request = request; } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); request = new ClusterStatsRequest(); request.readFrom(in); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); request.writeTo(out); } } }
0true
src_main_java_org_elasticsearch_action_admin_cluster_stats_TransportClusterStatsAction.java
1,531
public class HazelcastConnectionImpl implements HazelcastConnection { /** * Identity generator */ private static AtomicInteger idGen = new AtomicInteger(); /** * Reference to this creator and access to container infrastructure */ final ManagedConnectionImpl managedConnection; /** * this identity */ private final int id; public HazelcastConnectionImpl(ManagedConnectionImpl managedConnectionImpl, Subject subject) { super(); this.managedConnection = managedConnectionImpl; id = idGen.incrementAndGet(); } /* (non-Javadoc) * @see javax.resource.cci.Connection#close() */ public void close() throws ResourceException { managedConnection.log(Level.FINEST, "close"); //important: inform the container! managedConnection.fireConnectionEvent(ConnectionEvent.CONNECTION_CLOSED, this); } public Interaction createInteraction() throws ResourceException { //TODO return null; } /** * @throws NotSupportedException as this is not supported by this resource adapter */ public ResultSetInfo getResultSetInfo() throws NotSupportedException { //as per spec 15.11.3 throw new NotSupportedException(); } public HazelcastTransaction getLocalTransaction() throws ResourceException { managedConnection.log(Level.FINEST, "getLocalTransaction"); return managedConnection.getLocalTransaction(); } public ConnectionMetaData getMetaData() throws ResourceException { return managedConnection.getMetaData(); } @Override public String toString() { return "hazelcast.ConnectionImpl [" + id + "]"; } /** * Method is not exposed to force all clients to go through this connection object and its * methods from {@link HazelcastConnection} * * @return the local hazelcast instance */ private HazelcastInstance getHazelcastInstance() { return managedConnection.getHazelcastInstance(); } /* (non-Javadoc) * @see com.hazelcast.jca.HazelcastConnection#getMap(java.lang.String) */ public <K, V> IMap<K, V> getMap(String name) { return getHazelcastInstance().getMap(name); } /* (non-Javadoc) * @see com.hazelcast.jca.HazelcastConnection#getQueue(java.lang.String) */ public <E> IQueue<E> getQueue(String name) { return getHazelcastInstance().getQueue(name); } /* (non-Javadoc) * @see com.hazelcast.jca.HazelcastConnection#getTopic(java.lang.String) */ public <E> ITopic<E> getTopic(String name) { return getHazelcastInstance().getTopic(name); } /* (non-Javadoc) * @see com.hazelcast.jca.HazelcastConnection#getSet(java.lang.String) */ public <E> ISet<E> getSet(String name) { return getHazelcastInstance().getSet(name); } /* (non-Javadoc) * @see com.hazelcast.jca.HazelcastConnection#getList(java.lang.String) */ public <E> IList<E> getList(String name) { return getHazelcastInstance().getList(name); } /* (non-Javadoc) * @see com.hazelcast.jca.HazelcastConnection#getMultiMap(java.lang.String) */ public <K, V> MultiMap<K, V> getMultiMap(String name) { return getHazelcastInstance().getMultiMap(name); } /* (non-Javadoc) * @see com.hazelcast.jca.HazelcastConnection#getExecutorService(java.lang.String) */ public ExecutorService getExecutorService(String name) { return getHazelcastInstance().getExecutorService(name); } /* (non-Javadoc) * @see com.hazelcast.jca.HazelcastConnection#getAtomicLong(java.lang.String) */ public IAtomicLong getAtomicLong(String name) { return getHazelcastInstance().getAtomicLong(name); } /* (non-Javadoc) * @see com.hazelcast.jca.HazelcastConnection#getCountDownLatch(java.lang.String) */ public ICountDownLatch getCountDownLatch(String name) { return getHazelcastInstance().getCountDownLatch(name); } /* (non-Javadoc) * @see com.hazelcast.jca.HazelcastConnection#getSemaphore(java.lang.String) */ public ISemaphore getSemaphore(String name) { return getHazelcastInstance().getSemaphore(name); } public <K, V> TransactionalMap<K, V> getTransactionalMap(String name) { final TransactionContext txContext = this.managedConnection.getTx().getTxContext(); if (txContext == null) { throw new IllegalStateException("Transaction is not active"); } return txContext.getMap(name); } public <E> TransactionalQueue<E> getTransactionalQueue(String name) { final TransactionContext txContext = this.managedConnection.getTx().getTxContext(); if (txContext == null) { throw new IllegalStateException("Transaction is not active"); } return txContext.getQueue(name); } public <K, V> TransactionalMultiMap<K, V> getTransactionalMultiMap(String name) { final TransactionContext txContext = this.managedConnection.getTx().getTxContext(); if (txContext == null) { throw new IllegalStateException("Transaction is not active"); } return txContext.getMultiMap(name); } public <E> TransactionalList<E> getTransactionalList(String name) { final TransactionContext txContext = this.managedConnection.getTx().getTxContext(); if (txContext == null) { throw new IllegalStateException("Transaction is not active"); } return txContext.getList(name); } public <E> TransactionalSet<E> getTransactionalSet(String name) { final TransactionContext txContext = this.managedConnection.getTx().getTxContext(); if (txContext == null) { throw new IllegalStateException("Transaction is not active"); } return txContext.getSet(name); } }
1no label
hazelcast-ra_hazelcast-jca_src_main_java_com_hazelcast_jca_HazelcastConnectionImpl.java
1,076
@Service("blFulfillmentOptionService") @Transactional("blTransactionManager") public class FulfillmentOptionServiceImpl implements FulfillmentOptionService { @Resource(name = "blFulfillmentOptionDao") FulfillmentOptionDao fulfillmentOptionDao; @Override public FulfillmentOption readFulfillmentOptionById(Long fulfillmentOptionId) { return fulfillmentOptionDao.readFulfillmentOptionById(fulfillmentOptionId); } @Override public FulfillmentOption save(FulfillmentOption option) { return fulfillmentOptionDao.save(option); } @Override public List<FulfillmentOption> readAllFulfillmentOptions() { return fulfillmentOptionDao.readAllFulfillmentOptions(); } @Override public List<FulfillmentOption> readAllFulfillmentOptionsByFulfillmentType(FulfillmentType type) { return fulfillmentOptionDao.readAllFulfillmentOptionsByFulfillmentType(type); } }
1no label
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_order_service_FulfillmentOptionServiceImpl.java
40
public class DummyCreditCardModule extends AbstractModule { @Override public PaymentResponseItem processAuthorize(PaymentContext paymentContext, Money amountToAuthorize, PaymentResponseItem responseItem) throws PaymentException { return createResponse(paymentContext, responseItem); } @Override public PaymentResponseItem processAuthorizeAndDebit(PaymentContext paymentContext, Money amountToDebit, PaymentResponseItem responseItem) throws PaymentException { return createResponse(paymentContext, responseItem); } @Override public PaymentResponseItem processDebit(PaymentContext paymentContext, Money amountToDebit, PaymentResponseItem responseItem) throws PaymentException { return createResponse(paymentContext, responseItem); } @Override public PaymentResponseItem processCredit(PaymentContext paymentContext, Money amountToCredit, PaymentResponseItem responseItem) throws PaymentException { return createResponse(paymentContext, responseItem); } @Override public PaymentResponseItem processVoidPayment(PaymentContext paymentContext, Money amountToVoid, PaymentResponseItem responseItem) throws PaymentException { return createResponse(paymentContext, responseItem); } @Override public PaymentResponseItem processBalance(PaymentContext paymentContext, PaymentResponseItem responseItem) throws PaymentException { return createResponse(paymentContext, responseItem); } @Override public PaymentResponseItem processReverseAuthorize(PaymentContext paymentContext, Money amountToReverseAuthorize, PaymentResponseItem responseItem) throws PaymentException { return createResponse(paymentContext, responseItem); } @Override public PaymentResponseItem processPartialPayment(PaymentContext paymentContext, Money amountToDebit, PaymentResponseItem responseItem) throws PaymentException { throw new PaymentException("partial payment not implemented."); } private PaymentResponseItem createResponse(PaymentContext paymentContext, PaymentResponseItem responseItem) { paymentContext.getPaymentInfo().setReferenceNumber("abc123"); responseItem.setReferenceNumber(paymentContext.getPaymentInfo().getReferenceNumber()); responseItem.setTransactionId(paymentContext.getPaymentInfo().getReferenceNumber()); responseItem.setTransactionSuccess(true); responseItem.setTransactionAmount(paymentContext.getPaymentInfo().getAmount()); responseItem.setCurrency(paymentContext.getPaymentInfo().getCurrency()); return responseItem; } @Override public Boolean isValidCandidate(PaymentInfoType paymentType) { return PaymentInfoType.CREDIT_CARD.equals(paymentType); } }
0true
integration_src_test_java_org_broadleafcommerce_checkout_service_DummyCreditCardModule.java
1,695
public class ByteBufferBytesReference implements BytesReference { private final ByteBuffer buffer; public ByteBufferBytesReference(ByteBuffer buffer) { this.buffer = buffer; } @Override public byte get(int index) { return buffer.get(buffer.position() + index); } @Override public int length() { return buffer.remaining(); } @Override public BytesReference slice(int from, int length) { ByteBuffer dup = buffer.duplicate(); dup.position(buffer.position() + from); dup.limit(buffer.position() + from + length); return new ByteBufferBytesReference(dup); } @Override public StreamInput streamInput() { return new ByteBufferStreamInput(buffer); } @Override public void writeTo(OutputStream os) throws IOException { if (buffer.hasArray()) { os.write(buffer.array(), buffer.arrayOffset() + buffer.position(), buffer.remaining()); } else { byte[] tmp = new byte[8192]; ByteBuffer buf = buffer.duplicate(); while (buf.hasRemaining()) { buf.get(tmp, 0, Math.min(tmp.length, buf.remaining())); os.write(tmp); } } } @Override public byte[] toBytes() { if (!buffer.hasRemaining()) { return BytesRef.EMPTY_BYTES; } byte[] tmp = new byte[buffer.remaining()]; buffer.duplicate().get(tmp); return tmp; } @Override public BytesArray toBytesArray() { if (buffer.hasArray()) { return new BytesArray(buffer.array(), buffer.arrayOffset() + buffer.position(), buffer.remaining()); } return new BytesArray(toBytes()); } @Override public BytesArray copyBytesArray() { return new BytesArray(toBytes()); } @Override public ChannelBuffer toChannelBuffer() { return ChannelBuffers.wrappedBuffer(buffer); } @Override public boolean hasArray() { return buffer.hasArray(); } @Override public byte[] array() { return buffer.array(); } @Override public int arrayOffset() { return buffer.arrayOffset() + buffer.position(); } @Override public int hashCode() { return Helper.bytesHashCode(this); } @Override public boolean equals(Object obj) { return Helper.bytesEqual(this, (BytesReference) obj); } @Override public String toUtf8() { if (!buffer.hasRemaining()) { return ""; } final CharsetDecoder decoder = CharsetUtil.getDecoder(Charsets.UTF_8); final CharBuffer dst = CharBuffer.allocate( (int) ((double) buffer.remaining() * decoder.maxCharsPerByte())); try { CoderResult cr = decoder.decode(buffer, dst, true); if (!cr.isUnderflow()) { cr.throwException(); } cr = decoder.flush(dst); if (!cr.isUnderflow()) { cr.throwException(); } } catch (CharacterCodingException x) { throw new IllegalStateException(x); } return dst.flip().toString(); } @Override public BytesRef toBytesRef() { if (buffer.hasArray()) { return new BytesRef(buffer.array(), buffer.arrayOffset() + buffer.position(), buffer.remaining()); } return new BytesRef(toBytes()); } @Override public BytesRef copyBytesRef() { return new BytesRef(toBytes()); } }
1no label
src_main_java_org_elasticsearch_common_bytes_ByteBufferBytesReference.java
6,231
public class RestClient implements Closeable { private static final ESLogger logger = Loggers.getLogger(RestClient.class); private final RestSpec restSpec; private final CloseableHttpClient httpClient; private final String host; private final int port; private final String esVersion; public RestClient(String host, int port, RestSpec restSpec) throws IOException, RestException { this.restSpec = restSpec; this.httpClient = createHttpClient(); this.host = host; this.port = port; this.esVersion = readVersion(); logger.info("REST client initialized [{}:{}], elasticsearch version: [{}]", host, port, esVersion); } private String readVersion() throws IOException, RestException { //we make a manual call here without using callApi method, mainly because we are initializing //and the randomized context doesn't exist for the current thread (would be used to choose the method otherwise) RestApi restApi = restApi("info"); assert restApi.getPaths().size() == 1; assert restApi.getMethods().size() == 1; RestResponse restResponse = new RestResponse(httpRequestBuilder() .path(restApi.getPaths().get(0)) .method(restApi.getMethods().get(0)).execute()); checkStatusCode(restResponse); Object version = restResponse.evaluate("version.number"); if (version == null) { throw new RuntimeException("elasticsearch version not found in the response"); } return version.toString(); } public String getEsVersion() { return esVersion; } /** * Calls an api with the provided parameters * @throws RestException if the obtained status code is non ok, unless the specific error code needs to be ignored * according to the ignore parameter received as input (which won't get sent to elasticsearch) */ public RestResponse callApi(String apiName, String... params) throws IOException, RestException { if (params.length % 2 != 0) { throw new IllegalArgumentException("The number of params passed must be even but was [" + params.length + "]"); } Map<String, String> paramsMap = Maps.newHashMap(); for (int i = 0; i < params.length; i++) { paramsMap.put(params[i++], params[i]); } return callApi(apiName, paramsMap, null); } /** * Calls an api with the provided parameters and body * @throws RestException if the obtained status code is non ok, unless the specific error code needs to be ignored * according to the ignore parameter received as input (which won't get sent to elasticsearch) */ public RestResponse callApi(String apiName, Map<String, String> params, String body) throws IOException, RestException { List<Integer> ignores = Lists.newArrayList(); Map<String, String> requestParams = null; if (params != null) { //makes a copy of the parameters before modifying them for this specific request requestParams = Maps.newHashMap(params); //ignore is a special parameter supported by the clients, shouldn't be sent to es String ignoreString = requestParams.remove("ignore"); if (Strings.hasLength(ignoreString)) { try { ignores.add(Integer.valueOf(ignoreString)); } catch(NumberFormatException e) { throw new IllegalArgumentException("ignore value should be a number, found [" + ignoreString + "] instead"); } } } HttpRequestBuilder httpRequestBuilder = callApiBuilder(apiName, requestParams, body); logger.debug("calling api [{}]", apiName); HttpResponse httpResponse = httpRequestBuilder.execute(); //http HEAD doesn't support response body // For the few api (exists class of api) that use it we need to accept 404 too if (!httpResponse.supportsBody()) { ignores.add(404); } RestResponse restResponse = new RestResponse(httpResponse); checkStatusCode(restResponse, ignores); return restResponse; } private void checkStatusCode(RestResponse restResponse, List<Integer> ignores) throws RestException { //ignore is a catch within the client, to prevent the client from throwing error if it gets non ok codes back if (ignores.contains(restResponse.getStatusCode())) { if (logger.isDebugEnabled()) { logger.debug("ignored non ok status codes {} as requested", ignores); } return; } checkStatusCode(restResponse); } private void checkStatusCode(RestResponse restResponse) throws RestException { if (restResponse.isError()) { throw new RestException("non ok status code [" + restResponse.getStatusCode() + "] returned", restResponse); } } private HttpRequestBuilder callApiBuilder(String apiName, Map<String, String> params, String body) { //create doesn't exist in the spec but is supported in the clients (index with op_type=create) boolean indexCreateApi = "create".equals(apiName); String api = indexCreateApi ? "index" : apiName; RestApi restApi = restApi(api); HttpRequestBuilder httpRequestBuilder = httpRequestBuilder(); if (Strings.hasLength(body)) { if (!restApi.isBodySupported()) { throw new IllegalArgumentException("body is not supported by [" + restApi.getName() + "] api"); } httpRequestBuilder.body(body); } else { if (restApi.isBodyRequired()) { throw new IllegalArgumentException("body is required by [" + restApi.getName() + "] api"); } } //divide params between ones that go within query string and ones that go within path Map<String, String> pathParts = Maps.newHashMap(); if (params != null) { for (Map.Entry<String, String> entry : params.entrySet()) { if (restApi.getPathParts().contains(entry.getKey())) { pathParts.put(entry.getKey(), entry.getValue()); } else { if (!restApi.getParams().contains(entry.getKey())) { throw new IllegalArgumentException("param [" + entry.getKey() + "] not supported in [" + restApi.getName() + "] api"); } httpRequestBuilder.addParam(entry.getKey(), entry.getValue()); } } } if (indexCreateApi) { httpRequestBuilder.addParam("op_type", "create"); } //the http method is randomized (out of the available ones with the chosen api) return httpRequestBuilder.method(RandomizedTest.randomFrom(restApi.getSupportedMethods(pathParts.keySet()))) .path(RandomizedTest.randomFrom(restApi.getFinalPaths(pathParts))); } private RestApi restApi(String apiName) { RestApi restApi = restSpec.getApi(apiName); if (restApi == null) { throw new IllegalArgumentException("rest api [" + apiName + "] doesn't exist in the rest spec"); } return restApi; } protected HttpRequestBuilder httpRequestBuilder() { return new HttpRequestBuilder(httpClient).host(host).port(port); } protected CloseableHttpClient createHttpClient() { return HttpClients.createDefault(); } /** * Closes the REST client and the underlying http client */ public void close() { try { httpClient.close(); } catch(IOException e) { logger.error(e.getMessage(), e); } } }
1no label
src_test_java_org_elasticsearch_test_rest_client_RestClient.java
87
nodeEngine.getExecutionService().schedule(new Runnable() { @Override public void run() { Iterator<ClientEndpoint> iterator = endpoints.values().iterator(); while (iterator.hasNext()) { ClientEndpoint endpoint = iterator.next(); String ownerUuid = endpoint.getPrincipal().getOwnerUuid(); if (uuid.equals(ownerUuid)) { iterator.remove(); destroyEndpoint(endpoint, true); } } } }, ENDPOINT_REMOVE_DELAY_MS, TimeUnit.SECONDS);
0true
hazelcast_src_main_java_com_hazelcast_client_ClientEngineImpl.java
673
clusterService.submitStateUpdateTask("delete_warmer [" + Arrays.toString(request.names()) + "]", new AckedClusterStateUpdateTask() { @Override public boolean mustAck(DiscoveryNode discoveryNode) { return true; } @Override public void onAllNodesAcked(@Nullable Throwable t) { listener.onResponse(new DeleteWarmerResponse(true)); } @Override public void onAckTimeout() { listener.onResponse(new DeleteWarmerResponse(false)); } @Override public TimeValue ackTimeout() { return request.timeout(); } @Override public TimeValue timeout() { return request.masterNodeTimeout(); } @Override public void onFailure(String source, Throwable t) { logger.debug("failed to delete warmer [{}] on indices [{}]", t, Arrays.toString(request.names()), request.indices()); listener.onFailure(t); } @Override public ClusterState execute(ClusterState currentState) { MetaData.Builder mdBuilder = MetaData.builder(currentState.metaData()); boolean globalFoundAtLeastOne = false; for (String index : request.indices()) { IndexMetaData indexMetaData = currentState.metaData().index(index); if (indexMetaData == null) { throw new IndexMissingException(new Index(index)); } IndexWarmersMetaData warmers = indexMetaData.custom(IndexWarmersMetaData.TYPE); if (warmers != null) { List<IndexWarmersMetaData.Entry> entries = Lists.newArrayList(); for (IndexWarmersMetaData.Entry entry : warmers.entries()) { boolean keepWarmer = true; for (String warmer : request.names()) { if (Regex.simpleMatch(warmer, entry.name()) || warmer.equals("_all")) { globalFoundAtLeastOne = true; keepWarmer = false; // don't add it... break; } } if (keepWarmer) { entries.add(entry); } } // a change, update it... if (entries.size() != warmers.entries().size()) { warmers = new IndexWarmersMetaData(entries.toArray(new IndexWarmersMetaData.Entry[entries.size()])); IndexMetaData.Builder indexBuilder = IndexMetaData.builder(indexMetaData).putCustom(IndexWarmersMetaData.TYPE, warmers); mdBuilder.put(indexBuilder); } } } if (!globalFoundAtLeastOne) { throw new IndexWarmerMissingException(request.names()); } if (logger.isInfoEnabled()) { for (String index : request.indices()) { IndexMetaData indexMetaData = currentState.metaData().index(index); if (indexMetaData == null) { throw new IndexMissingException(new Index(index)); } IndexWarmersMetaData warmers = indexMetaData.custom(IndexWarmersMetaData.TYPE); if (warmers != null) { for (IndexWarmersMetaData.Entry entry : warmers.entries()) { for (String warmer : request.names()) { if (Regex.simpleMatch(warmer, entry.name()) || warmer.equals("_all")) { logger.info("[{}] delete warmer [{}]", index, entry.name()); } } } } } } return ClusterState.builder(currentState).metaData(mdBuilder).build(); } @Override public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { } });
1no label
src_main_java_org_elasticsearch_action_admin_indices_warmer_delete_TransportDeleteWarmerAction.java
93
@Retention(RetentionPolicy.RUNTIME) @Target(ElementType.METHOD) public @interface ConsoleCommand { String[] aliases() default {}; String description() default ""; boolean splitInWords() default true; }
0true
commons_src_main_java_com_orientechnologies_common_console_annotation_ConsoleCommand.java
1,072
public class OSQLEngine { private static List<OSQLFunctionFactory> FUNCTION_FACTORIES = null; private static List<OCommandExecutorSQLFactory> EXECUTOR_FACTORIES = null; private static List<OQueryOperatorFactory> OPERATOR_FACTORIES = null; private static List<OCollateFactory> COLLATE_FACTORIES = null; private static OQueryOperator[] SORTED_OPERATORS = null; protected static final OSQLEngine INSTANCE = new OSQLEngine(); private static ClassLoader orientClassLoader = OSQLEngine.class.getClassLoader(); protected OSQLEngine() { } public synchronized OQueryOperator[] getRecordOperators() { if (SORTED_OPERATORS != null) { return SORTED_OPERATORS; } // sort operators, will happen only very few times since we cache the // result final Iterator<OQueryOperatorFactory> ite = getOperatorFactories(); final List<OQueryOperator> operators = new ArrayList<OQueryOperator>(); while (ite.hasNext()) { final OQueryOperatorFactory factory = ite.next(); operators.addAll(factory.getOperators()); } final List<OQueryOperator> sorted = new ArrayList<OQueryOperator>(); final Set<Pair> pairs = new LinkedHashSet<Pair>(); for (final OQueryOperator ca : operators) { for (final OQueryOperator cb : operators) { if (ca != cb) { switch (ca.compare(cb)) { case BEFORE: pairs.add(new Pair(ca, cb)); break; case AFTER: pairs.add(new Pair(cb, ca)); break; } switch (cb.compare(ca)) { case BEFORE: pairs.add(new Pair(cb, ca)); break; case AFTER: pairs.add(new Pair(ca, cb)); break; } } } } boolean added; do { added = false; scan: for (final Iterator<OQueryOperator> it = operators.iterator(); it.hasNext();) { final OQueryOperator candidate = it.next(); for (final Pair pair : pairs) { if (pair.after == candidate) { continue scan; } } sorted.add(candidate); it.remove(); for (final Iterator<Pair> itp = pairs.iterator(); itp.hasNext();) { if (itp.next().before == candidate) { itp.remove(); } } added = true; } } while (added); if (!operators.isEmpty()) { throw new OException("Unvalid sorting. " + OCollections.toString(pairs)); } SORTED_OPERATORS = sorted.toArray(new OQueryOperator[sorted.size()]); return SORTED_OPERATORS; } public static void registerOperator(final OQueryOperator iOperator) { ODynamicSQLElementFactory.OPERATORS.add(iOperator); SORTED_OPERATORS = null; // clear cache } public void registerFunction(final String iName, final OSQLFunction iFunction) { ODynamicSQLElementFactory.FUNCTIONS.put(iName.toUpperCase(Locale.ENGLISH), iFunction); } public void registerFunction(final String iName, final Class<? extends OSQLFunction> iFunctionClass) { ODynamicSQLElementFactory.FUNCTIONS.put(iName.toUpperCase(Locale.ENGLISH), iFunctionClass); } public OSQLFunction getFunction(String iFunctionName) { iFunctionName = iFunctionName.toUpperCase(Locale.ENGLISH); if (iFunctionName.equalsIgnoreCase("any") || iFunctionName.equalsIgnoreCase("all")) // SPECIAL FUNCTIONS return null; final Iterator<OSQLFunctionFactory> ite = getFunctionFactories(); while (ite.hasNext()) { final OSQLFunctionFactory factory = ite.next(); if (factory.hasFunction(iFunctionName)) { return factory.createFunction(iFunctionName); } } throw new OCommandSQLParsingException("No function for name " + iFunctionName + ", available names are : " + OCollections.toString(getFunctionNames())); } public void unregisterFunction(String iName) { iName = iName.toUpperCase(Locale.ENGLISH); ODynamicSQLElementFactory.FUNCTIONS.remove(iName); } /** * @return Iterator of all function factories */ public static synchronized Iterator<OSQLFunctionFactory> getFunctionFactories() { if (FUNCTION_FACTORIES == null) { final Iterator<OSQLFunctionFactory> ite = lookupProviderWithOrientClassLoader(OSQLFunctionFactory.class, orientClassLoader); final List<OSQLFunctionFactory> factories = new ArrayList<OSQLFunctionFactory>(); while (ite.hasNext()) { factories.add(ite.next()); } FUNCTION_FACTORIES = Collections.unmodifiableList(factories); } return FUNCTION_FACTORIES.iterator(); } /** * @return Iterator of all function factories */ public static synchronized Iterator<OCollateFactory> getCollateFactories() { if (COLLATE_FACTORIES == null) { final Iterator<OCollateFactory> ite = lookupProviderWithOrientClassLoader(OCollateFactory.class, orientClassLoader); final List<OCollateFactory> factories = new ArrayList<OCollateFactory>(); while (ite.hasNext()) { factories.add(ite.next()); } COLLATE_FACTORIES = Collections.unmodifiableList(factories); } return COLLATE_FACTORIES.iterator(); } /** * @return Iterator of all operator factories */ public static synchronized Iterator<OQueryOperatorFactory> getOperatorFactories() { if (OPERATOR_FACTORIES == null) { final Iterator<OQueryOperatorFactory> ite = lookupProviderWithOrientClassLoader(OQueryOperatorFactory.class, orientClassLoader); final List<OQueryOperatorFactory> factories = new ArrayList<OQueryOperatorFactory>(); while (ite.hasNext()) { factories.add(ite.next()); } OPERATOR_FACTORIES = Collections.unmodifiableList(factories); } return OPERATOR_FACTORIES.iterator(); } /** * @return Iterator of all command factories */ public static synchronized Iterator<OCommandExecutorSQLFactory> getCommandFactories() { if (EXECUTOR_FACTORIES == null) { final Iterator<OCommandExecutorSQLFactory> ite = lookupProviderWithOrientClassLoader(OCommandExecutorSQLFactory.class, orientClassLoader); final List<OCommandExecutorSQLFactory> factories = new ArrayList<OCommandExecutorSQLFactory>(); while (ite.hasNext()) { try { factories.add(ite.next()); } catch (Exception e) { OLogManager.instance().warn(null, "Cannot load OCommandExecutorSQLFactory instance from service registry", e); } } EXECUTOR_FACTORIES = Collections.unmodifiableList(factories); } return EXECUTOR_FACTORIES.iterator(); } /** * Iterates on all factories and append all function names. * * @return Set of all function names. */ public static Set<String> getFunctionNames() { final Set<String> types = new HashSet<String>(); final Iterator<OSQLFunctionFactory> ite = getFunctionFactories(); while (ite.hasNext()) { types.addAll(ite.next().getFunctionNames()); } return types; } /** * Iterates on all factories and append all collate names. * * @return Set of all colate names. */ public static Set<String> getCollateNames() { final Set<String> types = new HashSet<String>(); final Iterator<OCollateFactory> ite = getCollateFactories(); while (ite.hasNext()) { types.addAll(ite.next().getNames()); } return types; } /** * Iterates on all factories and append all command names. * * @return Set of all command names. */ public static Set<String> getCommandNames() { final Set<String> types = new HashSet<String>(); final Iterator<OCommandExecutorSQLFactory> ite = getCommandFactories(); while (ite.hasNext()) { types.addAll(ite.next().getCommandNames()); } return types; } /** * Scans for factory plug-ins on the application class path. This method is needed because the application class path can * theoretically change, or additional plug-ins may become available. Rather than re-scanning the classpath on every invocation of * the API, the class path is scanned automatically only on the first invocation. Clients can call this method to prompt a * re-scan. Thus this method need only be invoked by sophisticated applications which dynamically make new plug-ins available at * runtime. */ public static synchronized void scanForPlugins() { // clear cache, will cause a rescan on next getFunctionFactories call FUNCTION_FACTORIES = null; } public OCommandExecutorSQLAbstract getCommand(final String candidate) { final Set<String> names = getCommandNames(); String commandName = candidate; boolean found = names.contains(commandName); int pos = -1; while (!found) { pos = OStringSerializerHelper.getLowerIndexOf(candidate, pos + 1, " ", "\n", "\r"); if (pos > -1) { commandName = candidate.substring(0, pos); found = names.contains(commandName); } else { break; } } if (found) { final Iterator<OCommandExecutorSQLFactory> ite = getCommandFactories(); while (ite.hasNext()) { final OCommandExecutorSQLFactory factory = ite.next(); if (factory.getCommandNames().contains(commandName)) { return factory.createCommand(commandName); } } } return null; } public OSQLFilter parseCondition(final String iText, final OCommandContext iContext, final String iFilterKeyword) { return new OSQLFilter(iText, iContext, iFilterKeyword); } public OSQLTarget parseTarget(final String iText, final OCommandContext iContext, final String iFilterKeyword) { return new OSQLTarget(iText, iContext, iFilterKeyword); } public static Object foreachRecord(final OCallable<Object, OIdentifiable> iCallable, final Object iCurrent, final OCommandContext iContext) { if (iCurrent == null) return null; if (iContext != null && !iContext.checkTimeout()) return null; if (OMultiValue.isMultiValue(iCurrent) || iCurrent instanceof Iterator) { final OMultiCollectionIterator<Object> result = new OMultiCollectionIterator<Object>(); for (Object o : OMultiValue.getMultiValueIterable(iCurrent)) { if (iContext != null && !iContext.checkTimeout()) return null; if (OMultiValue.isMultiValue(o) || o instanceof Iterator) { for (Object inner : OMultiValue.getMultiValueIterable(o)) { result.add(iCallable.call((OIdentifiable) inner)); } } else result.add(iCallable.call((OIdentifiable) o)); } return result; } else if (iCurrent instanceof OIdentifiable) return iCallable.call((OIdentifiable) iCurrent); return null; } public static OSQLEngine getInstance() { return INSTANCE; } /** * internal use only, to sort operators. */ private static final class Pair { final OQueryOperator before; final OQueryOperator after; public Pair(final OQueryOperator before, final OQueryOperator after) { this.before = before; this.after = after; } @Override public boolean equals(final Object obj) { if (obj instanceof Pair) { final Pair that = (Pair) obj; return before == that.before && after == that.after; } return false; } @Override public int hashCode() { return System.identityHashCode(before) + 31 * System.identityHashCode(after); } @Override public String toString() { return before + " > " + after; } } public Set<ORID> parseRIDTarget(final ODatabaseRecord database, final String iTarget) { final Set<ORID> ids; if (iTarget.startsWith("(")) { // SUB-QUERY final List<OIdentifiable> result = database.query(new OSQLSynchQuery<Object>(iTarget.substring(1, iTarget.length() - 1))); if (result == null || result.isEmpty()) ids = Collections.emptySet(); else { ids = new HashSet<ORID>((int) (result.size() * 1.3)); for (OIdentifiable aResult : result) ids.add(aResult.getIdentity()); } } else if (iTarget.startsWith("[")) { // COLLECTION OF RIDS final String[] idsAsStrings = iTarget.substring(1, iTarget.length() - 1).split(","); ids = new HashSet<ORID>((int) (idsAsStrings.length * 1.3)); for (String idsAsString : idsAsStrings) ids.add(new ORecordId(idsAsString)); } else // SINGLE RID ids = Collections.<ORID> singleton(new ORecordId(iTarget)); return ids; } public static OCollate getCollate(final String name) { for (Iterator<OCollateFactory> iter = getCollateFactories(); iter.hasNext();) { OCollateFactory f = iter.next(); final OCollate c = f.getCollate(name); if (c != null) return c; } return null; } }
1no label
core_src_main_java_com_orientechnologies_orient_core_sql_OSQLEngine.java
68
class AssignToForProposal extends LocalProposal { protected DocumentChange createChange(IDocument document, Node expanse, Integer stopIndex) { DocumentChange change = new DocumentChange("Assign to For", document); change.setEdit(new MultiTextEdit()); change.addEdit(new InsertEdit(offset, "for (" + initialName + " in ")); String terminal = expanse.getEndToken().getText(); if (!terminal.equals(";")) { change.addEdit(new InsertEdit(stopIndex+1, ") {}")); exitPos = stopIndex+4; } else { change.addEdit(new ReplaceEdit(stopIndex, 1, ") {}")); exitPos = stopIndex+3; } return change; } public AssignToForProposal(Tree.CompilationUnit cu, Node node, int currentOffset) { super(cu, node, currentOffset); } protected void addLinkedPositions(IDocument document, Unit unit) throws BadLocationException { // ProposalPosition typePosition = // new ProposalPosition(document, offset, 5, 1, // getSupertypeProposals(offset, unit, // type, true, "value")); ProposalPosition namePosition = new ProposalPosition(document, offset+5, initialName.length(), 0, getNameProposals(offset+5, 0, nameProposals)); // LinkedMode.addLinkedPosition(linkedModeModel, typePosition); LinkedMode.addLinkedPosition(linkedModeModel, namePosition); } @Override String[] computeNameProposals(Node expression) { return Nodes.nameProposals(expression, true); } @Override public String getDisplayString() { return "Assign expression to 'for' loop"; } @Override boolean isEnabled(ProducedType resultType) { return resultType!=null && rootNode.getUnit().isIterableType(resultType); } static void addAssignToForProposal(Tree.CompilationUnit cu, Collection<ICompletionProposal> proposals, Node node, int currentOffset) { AssignToForProposal prop = new AssignToForProposal(cu, node, currentOffset); if (prop.isEnabled()) { proposals.add(prop); } } }
0true
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_correct_AssignToForProposal.java
298
@ResponseStatus(value= HttpStatus.FORBIDDEN, reason="Access is denied") public class SecurityServiceException extends ServiceException { public SecurityServiceException() { super(); } public SecurityServiceException(Throwable cause) { super(cause); } public SecurityServiceException(String message) { super(message); } public SecurityServiceException(String message, Throwable cause) { super(message, cause); } }
0true
common_src_main_java_org_broadleafcommerce_common_exception_SecurityServiceException.java
2,011
@Service("blCustomerPhoneService") public class CustomerPhoneServiceImpl implements CustomerPhoneService { @Resource(name="blCustomerPhoneDao") protected CustomerPhoneDao customerPhoneDao; public CustomerPhone saveCustomerPhone(CustomerPhone customerPhone) { List<CustomerPhone> activeCustomerPhones = readActiveCustomerPhonesByCustomerId(customerPhone.getCustomer().getId()); if (activeCustomerPhones != null && activeCustomerPhones.isEmpty()) { customerPhone.getPhone().setDefault(true); } else { // if parameter customerPhone is set as default, unset all other default phones if (customerPhone.getPhone().isDefault()) { for (CustomerPhone activeCustomerPhone : activeCustomerPhones) { if (activeCustomerPhone.getId() != customerPhone.getId() && activeCustomerPhone.getPhone().isDefault()) { activeCustomerPhone.getPhone().setDefault(false); customerPhoneDao.save(activeCustomerPhone); } } } } return customerPhoneDao.save(customerPhone); } public List<CustomerPhone> readActiveCustomerPhonesByCustomerId(Long customerId) { return customerPhoneDao.readActiveCustomerPhonesByCustomerId(customerId); } public CustomerPhone readCustomerPhoneById(Long customerPhoneId) { return customerPhoneDao.readCustomerPhoneById(customerPhoneId); } public void makeCustomerPhoneDefault(Long customerPhoneId, Long customerId) { customerPhoneDao.makeCustomerPhoneDefault(customerPhoneId, customerId); } public void deleteCustomerPhoneById(Long customerPhoneId){ customerPhoneDao.deleteCustomerPhoneById(customerPhoneId); } public CustomerPhone findDefaultCustomerPhone(Long customerId) { return customerPhoneDao.findDefaultCustomerPhone(customerId); } public List<CustomerPhone> readAllCustomerPhonesByCustomerId(Long customerId) { return customerPhoneDao.readAllCustomerPhonesByCustomerId(customerId); } public CustomerPhone create() { return customerPhoneDao.create(); } }
1no label
core_broadleaf-profile_src_main_java_org_broadleafcommerce_profile_core_service_CustomerPhoneServiceImpl.java
342
public class NodesShutdownAction extends ClusterAction<NodesShutdownRequest, NodesShutdownResponse, NodesShutdownRequestBuilder> { public static final NodesShutdownAction INSTANCE = new NodesShutdownAction(); public static final String NAME = "cluster/nodes/shutdown"; private NodesShutdownAction() { super(NAME); } @Override public NodesShutdownResponse newResponse() { return new NodesShutdownResponse(); } @Override public NodesShutdownRequestBuilder newRequestBuilder(ClusterAdminClient client) { return new NodesShutdownRequestBuilder(client); } }
0true
src_main_java_org_elasticsearch_action_admin_cluster_node_shutdown_NodesShutdownAction.java
81
public final class ClientEndpoint implements Client { private final ClientEngineImpl clientEngine; private final Connection conn; private final ConcurrentMap<String, TransactionContext> transactionContextMap = new ConcurrentHashMap<String, TransactionContext>(); private final List<Runnable> removeListenerActions = Collections.synchronizedList(new LinkedList<Runnable>()); private final SocketAddress socketAddress; private String uuid; private LoginContext loginContext; private ClientPrincipal principal; private boolean firstConnection; private volatile boolean authenticated; ClientEndpoint(ClientEngineImpl clientEngine, Connection conn, String uuid) { this.clientEngine = clientEngine; this.conn = conn; if (conn instanceof TcpIpConnection) { TcpIpConnection tcpIpConnection = (TcpIpConnection) conn; socketAddress = tcpIpConnection.getSocketChannelWrapper().socket().getRemoteSocketAddress(); } else { socketAddress = null; } this.uuid = uuid; } Connection getConnection() { return conn; } @Override public String getUuid() { return uuid; } public boolean live() { return conn.live(); } void setLoginContext(LoginContext loginContext) { this.loginContext = loginContext; } public Subject getSubject() { return loginContext != null ? loginContext.getSubject() : null; } public boolean isFirstConnection() { return firstConnection; } void authenticated(ClientPrincipal principal, boolean firstConnection) { this.principal = principal; this.uuid = principal.getUuid(); this.firstConnection = firstConnection; this.authenticated = true; } public boolean isAuthenticated() { return authenticated; } public ClientPrincipal getPrincipal() { return principal; } @Override public InetSocketAddress getSocketAddress() { return (InetSocketAddress) socketAddress; } @Override public ClientType getClientType() { switch (conn.getType()) { case JAVA_CLIENT: return ClientType.JAVA; case CSHARP_CLIENT: return ClientType.CSHARP; case CPP_CLIENT: return ClientType.CPP; case PYTHON_CLIENT: return ClientType.PYTHON; case RUBY_CLIENT: return ClientType.RUBY; case BINARY_CLIENT: return ClientType.OTHER; default: throw new IllegalArgumentException("Invalid connection type: " + conn.getType()); } } public TransactionContext getTransactionContext(String txnId) { final TransactionContext transactionContext = transactionContextMap.get(txnId); if (transactionContext == null) { throw new TransactionException("No transaction context found for txnId:" + txnId); } return transactionContext; } public void setTransactionContext(TransactionContext transactionContext) { transactionContextMap.put(transactionContext.getTxnId(), transactionContext); } public void removeTransactionContext(String txnId) { transactionContextMap.remove(txnId); } public void setListenerRegistration(final String service, final String topic, final String id) { removeListenerActions.add(new Runnable() { @Override public void run() { EventService eventService = clientEngine.getEventService(); eventService.deregisterListener(service, topic, id); } }); } public void setDistributedObjectListener(final String id) { removeListenerActions.add(new Runnable() { @Override public void run() { clientEngine.getProxyService().removeProxyListener(id); } }); } public void clearAllListeners() { for (Runnable removeAction : removeListenerActions) { try { removeAction.run(); } catch (Exception e) { getLogger().warning("Exception during destroy action", e); } } removeListenerActions.clear(); } void destroy() throws LoginException { for (Runnable removeAction : removeListenerActions) { try { removeAction.run(); } catch (Exception e) { getLogger().warning("Exception during destroy action", e); } } LoginContext lc = loginContext; if (lc != null) { lc.logout(); } for (TransactionContext context : transactionContextMap.values()) { Transaction transaction = TransactionAccessor.getTransaction(context); if (context.isXAManaged() && transaction.getState() == PREPARED) { TransactionManagerServiceImpl transactionManager = (TransactionManagerServiceImpl) clientEngine.getTransactionManagerService(); transactionManager.addTxBackupLogForClientRecovery(transaction); } else { try { context.rollbackTransaction(); } catch (HazelcastInstanceNotActiveException e) { getLogger().finest(e); } catch (Exception e) { getLogger().warning(e); } } } authenticated = false; } private ILogger getLogger() { return clientEngine.getLogger(getClass()); } public void sendResponse(Object response, int callId) { boolean isError = false; Object clientResponseObject; if (response == null) { clientResponseObject = ClientEngineImpl.NULL; } else if (response instanceof Throwable) { isError = true; ClientExceptionConverter converter = ClientExceptionConverters.get(getClientType()); clientResponseObject = converter.convert((Throwable) response); } else { clientResponseObject = response; } ClientResponse clientResponse = new ClientResponse(clientEngine.toData(clientResponseObject), isError, callId); clientEngine.sendResponse(this, clientResponse); } public void sendEvent(Object event, int callId) { Data data = clientEngine.toData(event); clientEngine.sendResponse(this, new ClientResponse(data, callId, true)); } @Override public String toString() { StringBuilder sb = new StringBuilder("ClientEndpoint{"); sb.append("conn=").append(conn); sb.append(", uuid='").append(uuid).append('\''); sb.append(", firstConnection=").append(firstConnection); sb.append(", authenticated=").append(authenticated); sb.append('}'); return sb.toString(); } }
1no label
hazelcast_src_main_java_com_hazelcast_client_ClientEndpoint.java
405
snapshotsService.addListener(new SnapshotsService.SnapshotCompletionListener() { SnapshotId snapshotId = new SnapshotId(request.repository(), request.snapshot()); @Override public void onSnapshotCompletion(SnapshotId snapshotId, SnapshotInfo snapshot) { if (this.snapshotId.equals(snapshotId)) { listener.onResponse(new CreateSnapshotResponse(snapshot)); snapshotsService.removeListener(this); } } @Override public void onSnapshotFailure(SnapshotId snapshotId, Throwable t) { if (this.snapshotId.equals(snapshotId)) { listener.onFailure(t); snapshotsService.removeListener(this); } } });
0true
src_main_java_org_elasticsearch_action_admin_cluster_snapshots_create_TransportCreateSnapshotAction.java
21
}), new Function<ByteEntry, Vertex>() { @Override public Vertex apply(@Nullable ByteEntry entry) { return new ByteVertex(entry.key.getLong(8), tx); } });
0true
titan-test_src_main_java_com_thinkaurelius_titan_TestByteBuffer.java
239
service.submitToAllMembers(callable, new MultiExecutionCallback() { public void onResponse(Member member, Object value) { if (value.equals(msg + AppendCallable.APPENDAGE)) { responseLatch.countDown(); } } public void onComplete(Map<Member, Object> values) { for (Member member : values.keySet()) { Object value = values.get(member); if (value.equals(msg + AppendCallable.APPENDAGE)) { completeLatch.countDown(); } } } });
0true
hazelcast-client_src_test_java_com_hazelcast_client_executor_ClientExecutorServiceSubmitTest.java
1,771
public class AdornedTargetRetrieval { private PersistencePackage persistencePackage; private PersistencePerspective persistencePerspective; private Entity entity; private AdornedTargetList adornedTargetList; private Map<String, FieldMetadata> mergedProperties; private List<Serializable> records; private int index; private List<FilterMapping> filterMappings; private CriteriaTransferObject cto; // This constructor is used by the update method public AdornedTargetRetrieval(PersistencePackage persistencePackage, Entity entity, AdornedTargetList adornedTargetList) { this(persistencePackage, adornedTargetList, new CriteriaTransferObject()); this.entity = entity; } // This constructor is used by the fetch method public AdornedTargetRetrieval(PersistencePackage persistencePackage, AdornedTargetList adornedTargetList, CriteriaTransferObject cto) { this.persistencePackage = persistencePackage; this.persistencePerspective = persistencePackage.getPersistencePerspective(); this.adornedTargetList = adornedTargetList; this.cto = cto; } public Map<String, FieldMetadata> getMergedProperties() { return mergedProperties; } public List<Serializable> getRecords() { return records; } public int getIndex() { return index; } public List<FilterMapping> getFilterMappings() { return filterMappings; } public AdornedTargetRetrieval invokeForFetch() throws ClassNotFoundException, NoSuchMethodException, IllegalAccessException, InvocationTargetException, FieldNotAvailableException, NoSuchFieldException { invokeInternal(); return this; } public AdornedTargetRetrieval invokeForUpdate() throws ClassNotFoundException, NoSuchMethodException, IllegalAccessException, InvocationTargetException, FieldNotAvailableException, NoSuchFieldException { FilterAndSortCriteria filterCriteria = cto.get(adornedTargetList.getCollectionFieldName()); filterCriteria.setFilterValue(entity.findProperty(adornedTargetList.getLinkedObjectPath() + "." + adornedTargetList.getLinkedIdProperty()).getValue()); invokeInternal(); index = 0; Long myEntityId = Long.valueOf(entity.findProperty(adornedTargetList.getTargetObjectPath() + "." + adornedTargetList.getTargetIdProperty()).getValue()); FieldManager fieldManager = getFieldManager(); for (Serializable record : records) { Long targetId = (Long) fieldManager.getFieldValue(record, adornedTargetList.getTargetObjectPath() + "." + adornedTargetList.getTargetIdProperty()); if (myEntityId.equals(targetId)) { break; } index++; } return this; } private void invokeInternal() throws ClassNotFoundException { if (adornedTargetList.getSortField() != null) { FilterAndSortCriteria sortCriteria = cto.get(adornedTargetList.getSortField()); sortCriteria.setSortAscending(adornedTargetList.getSortAscending()); } Class<?>[] entities = persistenceManager.getPolymorphicEntities(adornedTargetList .getAdornedTargetEntityClassname()); mergedProperties = persistenceManager.getDynamicEntityDao().getMergedProperties( adornedTargetList.getAdornedTargetEntityClassname(), entities, null, new String[]{}, new ForeignKey[]{}, MergedPropertyType.ADORNEDTARGETLIST, persistencePerspective.getPopulateToOneFields(), persistencePerspective.getIncludeFields(), persistencePerspective.getExcludeFields(), persistencePerspective.getConfigurationKey(), "" ); filterMappings = getAdornedTargetFilterMappings(persistencePerspective, cto, mergedProperties, adornedTargetList); String ceilingEntityFullyQualifiedClassname = persistencePackage.getCeilingEntityFullyQualifiedClassname(); Class<?>[] entities2 = persistenceManager.getPolymorphicEntities(ceilingEntityFullyQualifiedClassname); Map<String, FieldMetadata> mergedPropertiesTarget = persistenceManager.getDynamicEntityDao() .getMergedProperties( ceilingEntityFullyQualifiedClassname, entities2, null, persistencePerspective.getAdditionalNonPersistentProperties(), persistencePerspective.getAdditionalForeignKeys(), MergedPropertyType.PRIMARY, persistencePerspective.getPopulateToOneFields(), persistencePerspective.getIncludeFields(), persistencePerspective.getExcludeFields(), persistencePerspective.getConfigurationKey(), "" ); // We need to make sure that the target merged properties have the target object path prefix Map<String, FieldMetadata> convertedMergedPropertiesTarget = new HashMap<String, FieldMetadata>(); String prefix = adornedTargetList.getTargetObjectPath(); for (Entry<String, FieldMetadata> entry : mergedPropertiesTarget.entrySet()) { convertedMergedPropertiesTarget.put(prefix + "." + entry.getKey(), entry.getValue()); } // We also need to make sure that the cto filter and sort criteria have the prefix Map<String, FilterAndSortCriteria> convertedCto = new HashMap<String, FilterAndSortCriteria>(); for (Entry<String, FilterAndSortCriteria> entry : cto.getCriteriaMap().entrySet()) { if (adornedTargetList.getSortField() != null && entry.getKey().equals(adornedTargetList.getSortField())) { convertedCto.put(entry.getKey(), entry.getValue()); } else { convertedCto.put(prefix + "." + entry.getKey(), entry.getValue()); } } cto.setCriteriaMap(convertedCto); List<FilterMapping> filterMappings2 = getBasicFilterMappings(persistencePerspective, cto, convertedMergedPropertiesTarget, ceilingEntityFullyQualifiedClassname); for (FilterMapping fm : filterMappings2) { fm.setInheritedFromClass(entities[0]); } filterMappings.addAll(filterMappings2); records = getPersistentRecords(adornedTargetList.getAdornedTargetEntityClassname(), filterMappings, cto.getFirstResult(), cto.getMaxResults()); } }
1no label
admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_server_service_persistence_module_AdornedTargetListPersistenceModule.java
140
abstract class Striped64 extends Number { /* * This class maintains a lazily-initialized table of atomically * updated variables, plus an extra "base" field. The table size * is a power of two. Indexing uses masked per-thread hash codes. * Nearly all declarations in this class are package-private, * accessed directly by subclasses. * * Table entries are of class Cell; a variant of AtomicLong padded * to reduce cache contention on most processors. Padding is * overkill for most Atomics because they are usually irregularly * scattered in memory and thus don't interfere much with each * other. But Atomic objects residing in arrays will tend to be * placed adjacent to each other, and so will most often share * cache lines (with a huge negative performance impact) without * this precaution. * * In part because Cells are relatively large, we avoid creating * them until they are needed. When there is no contention, all * updates are made to the base field. Upon first contention (a * failed CAS on base update), the table is initialized to size 2. * The table size is doubled upon further contention until * reaching the nearest power of two greater than or equal to the * number of CPUS. Table slots remain empty (null) until they are * needed. * * A single spinlock ("busy") is used for initializing and * resizing the table, as well as populating slots with new Cells. * There is no need for a blocking lock; when the lock is not * available, threads try other slots (or the base). During these * retries, there is increased contention and reduced locality, * which is still better than alternatives. * * Per-thread hash codes are initialized to random values. * Contention and/or table collisions are indicated by failed * CASes when performing an update operation (see method * retryUpdate). Upon a collision, if the table size is less than * the capacity, it is doubled in size unless some other thread * holds the lock. If a hashed slot is empty, and lock is * available, a new Cell is created. Otherwise, if the slot * exists, a CAS is tried. Retries proceed by "double hashing", * using a secondary hash (Marsaglia XorShift) to try to find a * free slot. * * The table size is capped because, when there are more threads * than CPUs, supposing that each thread were bound to a CPU, * there would exist a perfect hash function mapping threads to * slots that eliminates collisions. When we reach capacity, we * search for this mapping by randomly varying the hash codes of * colliding threads. Because search is random, and collisions * only become known via CAS failures, convergence can be slow, * and because threads are typically not bound to CPUS forever, * may not occur at all. However, despite these limitations, * observed contention rates are typically low in these cases. * * It is possible for a Cell to become unused when threads that * once hashed to it terminate, as well as in the case where * doubling the table causes no thread to hash to it under * expanded mask. We do not try to detect or remove such cells, * under the assumption that for long-running instances, observed * contention levels will recur, so the cells will eventually be * needed again; and for short-lived ones, it does not matter. */ /** * Padded variant of AtomicLong supporting only raw accesses plus CAS. * The value field is placed between pads, hoping that the JVM doesn't * reorder them. * * JVM intrinsics note: It would be possible to use a release-only * form of CAS here, if it were provided. */ static final class Cell { volatile long p0, p1, p2, p3, p4, p5, p6; volatile long value; volatile long q0, q1, q2, q3, q4, q5, q6; Cell(long x) { value = x; } final boolean cas(long cmp, long val) { return UNSAFE.compareAndSwapLong(this, valueOffset, cmp, val); } // Unsafe mechanics private static final sun.misc.Unsafe UNSAFE; private static final long valueOffset; static { try { UNSAFE = getUnsafe(); Class<?> ak = Cell.class; valueOffset = UNSAFE.objectFieldOffset (ak.getDeclaredField("value")); } catch (Exception e) { throw new Error(e); } } } /** * Holder for the thread-local hash code. The code is initially * random, but may be set to a different value upon collisions. */ static final class HashCode { static final Random rng = new Random(); int code; HashCode() { int h = rng.nextInt(); // Avoid zero to allow xorShift rehash code = (h == 0) ? 1 : h; } } /** * The corresponding ThreadLocal class */ static final class ThreadHashCode extends ThreadLocal<HashCode> { public HashCode initialValue() { return new HashCode(); } } /** * Static per-thread hash codes. Shared across all instances to * reduce ThreadLocal pollution and because adjustments due to * collisions in one table are likely to be appropriate for * others. */ static final ThreadHashCode threadHashCode = new ThreadHashCode(); /** Number of CPUS, to place bound on table size */ static final int NCPU = Runtime.getRuntime().availableProcessors(); /** * Table of cells. When non-null, size is a power of 2. */ transient volatile Cell[] cells; /** * Base value, used mainly when there is no contention, but also as * a fallback during table initialization races. Updated via CAS. */ transient volatile long base; /** * Spinlock (locked via CAS) used when resizing and/or creating Cells. */ transient volatile int busy; /** * Package-private default constructor */ Striped64() { } /** * CASes the base field. */ final boolean casBase(long cmp, long val) { return UNSAFE.compareAndSwapLong(this, baseOffset, cmp, val); } /** * CASes the busy field from 0 to 1 to acquire lock. */ final boolean casBusy() { return UNSAFE.compareAndSwapInt(this, busyOffset, 0, 1); } /** * Computes the function of current and new value. Subclasses * should open-code this update function for most uses, but the * virtualized form is needed within retryUpdate. * * @param currentValue the current value (of either base or a cell) * @param newValue the argument from a user update call * @return result of the update function */ abstract long fn(long currentValue, long newValue); /** * Handles cases of updates involving initialization, resizing, * creating new Cells, and/or contention. See above for * explanation. This method suffers the usual non-modularity * problems of optimistic retry code, relying on rechecked sets of * reads. * * @param x the value * @param hc the hash code holder * @param wasUncontended false if CAS failed before call */ final void retryUpdate(long x, HashCode hc, boolean wasUncontended) { int h = hc.code; boolean collide = false; // True if last slot nonempty for (;;) { Cell[] as; Cell a; int n; long v; if ((as = cells) != null && (n = as.length) > 0) { if ((a = as[(n - 1) & h]) == null) { if (busy == 0) { // Try to attach new Cell Cell r = new Cell(x); // Optimistically create if (busy == 0 && casBusy()) { boolean created = false; try { // Recheck under lock Cell[] rs; int m, j; if ((rs = cells) != null && (m = rs.length) > 0 && rs[j = (m - 1) & h] == null) { rs[j] = r; created = true; } } finally { busy = 0; } if (created) break; continue; // Slot is now non-empty } } collide = false; } else if (!wasUncontended) // CAS already known to fail wasUncontended = true; // Continue after rehash else if (a.cas(v = a.value, fn(v, x))) break; else if (n >= NCPU || cells != as) collide = false; // At max size or stale else if (!collide) collide = true; else if (busy == 0 && casBusy()) { try { if (cells == as) { // Expand table unless stale Cell[] rs = new Cell[n << 1]; for (int i = 0; i < n; ++i) rs[i] = as[i]; cells = rs; } } finally { busy = 0; } collide = false; continue; // Retry with expanded table } h ^= h << 13; // Rehash h ^= h >>> 17; h ^= h << 5; } else if (busy == 0 && cells == as && casBusy()) { boolean init = false; try { // Initialize table if (cells == as) { Cell[] rs = new Cell[2]; rs[h & 1] = new Cell(x); cells = rs; init = true; } } finally { busy = 0; } if (init) break; } else if (casBase(v = base, fn(v, x))) break; // Fall back on using base } hc.code = h; // Record index for next time } /** * Sets base and all cells to the given value. */ final void internalReset(long initialValue) { Cell[] as = cells; base = initialValue; if (as != null) { int n = as.length; for (int i = 0; i < n; ++i) { Cell a = as[i]; if (a != null) a.value = initialValue; } } } // Unsafe mechanics private static final sun.misc.Unsafe UNSAFE; private static final long baseOffset; private static final long busyOffset; static { try { UNSAFE = getUnsafe(); Class<?> sk = Striped64.class; baseOffset = UNSAFE.objectFieldOffset (sk.getDeclaredField("base")); busyOffset = UNSAFE.objectFieldOffset (sk.getDeclaredField("busy")); } catch (Exception e) { throw new Error(e); } } /** * Returns a sun.misc.Unsafe. Suitable for use in a 3rd party package. * Replace with a simple call to Unsafe.getUnsafe when integrating * into a jdk. * * @return a sun.misc.Unsafe */ private static sun.misc.Unsafe getUnsafe() { try { return sun.misc.Unsafe.getUnsafe(); } catch (SecurityException tryReflectionInstead) {} try { return java.security.AccessController.doPrivileged (new java.security.PrivilegedExceptionAction<sun.misc.Unsafe>() { public sun.misc.Unsafe run() throws Exception { Class<sun.misc.Unsafe> k = sun.misc.Unsafe.class; for (java.lang.reflect.Field f : k.getDeclaredFields()) { f.setAccessible(true); Object x = f.get(null); if (k.isInstance(x)) return k.cast(x); } throw new NoSuchFieldError("the Unsafe"); }}); } catch (java.security.PrivilegedActionException e) { throw new RuntimeException("Could not initialize intrinsics", e.getCause()); } } }
0true
src_main_java_jsr166e_Striped64.java
1,922
EntryListener<Object, Object> listener = new EntryListener<Object, Object>() { private void handleEvent(EntryEvent<Object, Object> event) { if (endpoint.live()) { Data key = clientEngine.toData(event.getKey()); Data value = clientEngine.toData(event.getValue()); Data oldValue = clientEngine.toData(event.getOldValue()); PortableEntryEvent portableEntryEvent = new PortableEntryEvent(key, value, oldValue, event.getEventType(), event.getMember().getUuid()); endpoint.sendEvent(portableEntryEvent, getCallId()); } } public void entryAdded(EntryEvent<Object, Object> event) { handleEvent(event); } public void entryRemoved(EntryEvent<Object, Object> event) { handleEvent(event); } public void entryUpdated(EntryEvent<Object, Object> event) { handleEvent(event); } public void entryEvicted(EntryEvent<Object, Object> event) { handleEvent(event); } };
1no label
hazelcast_src_main_java_com_hazelcast_map_client_AbstractMapAddEntryListenerRequest.java
2,090
public class PutAllOperation extends AbstractMapOperation implements PartitionAwareOperation, BackupAwareOperation { private MapEntrySet entrySet; private boolean initialLoad = false; private List<Map.Entry<Data, Data>> backupEntrySet; private List<RecordInfo> backupRecordInfos; public PutAllOperation(String name, MapEntrySet entrySet) { super(name); this.entrySet = entrySet; } public PutAllOperation(String name, MapEntrySet entrySet, boolean initialLoad) { super(name); this.entrySet = entrySet; this.initialLoad = initialLoad; } public PutAllOperation() { } public void run() { backupRecordInfos = new ArrayList<RecordInfo>(); backupEntrySet = new ArrayList<Map.Entry<Data, Data>>(); int partitionId = getPartitionId(); RecordStore recordStore = mapService.getRecordStore(partitionId, name); Set<Map.Entry<Data, Data>> entries = entrySet.getEntrySet(); InternalPartitionService partitionService = getNodeEngine().getPartitionService(); Set<Data> keysToInvalidate = new HashSet<Data>(); for (Map.Entry<Data, Data> entry : entries) { Data dataKey = entry.getKey(); Data dataValue = entry.getValue(); if (partitionId == partitionService.getPartitionId(dataKey)) { Data dataOldValue = null; if (initialLoad) { recordStore.putFromLoad(dataKey, dataValue, -1); } else { dataOldValue = mapService.toData(recordStore.put(dataKey, dataValue, -1)); } mapService.interceptAfterPut(name, dataValue); EntryEventType eventType = dataOldValue == null ? EntryEventType.ADDED : EntryEventType.UPDATED; mapService.publishEvent(getCallerAddress(), name, eventType, dataKey, dataOldValue, dataValue); keysToInvalidate.add(dataKey); if (mapContainer.getWanReplicationPublisher() != null && mapContainer.getWanMergePolicy() != null) { Record record = recordStore.getRecord(dataKey); final SimpleEntryView entryView = mapService.createSimpleEntryView(dataKey, mapService.toData(dataValue), record); mapService.publishWanReplicationUpdate(name, entryView); } backupEntrySet.add(entry); RecordInfo replicationInfo = mapService.createRecordInfo(recordStore.getRecord(dataKey)); backupRecordInfos.add(replicationInfo); } } invalidateNearCaches(keysToInvalidate); } protected final void invalidateNearCaches(Set<Data> keys) { if (mapService.isNearCacheAndInvalidationEnabled(name)) { mapService.invalidateAllNearCaches(name, keys); } } @Override public Object getResponse() { return true; } @Override public String toString() { return "PutAllOperation{" + '}'; } @Override protected void writeInternal(ObjectDataOutput out) throws IOException { super.writeInternal(out); out.writeObject(entrySet); out.writeBoolean(initialLoad); } @Override protected void readInternal(ObjectDataInput in) throws IOException { super.readInternal(in); entrySet = in.readObject(); initialLoad = in.readBoolean(); } @Override public boolean shouldBackup() { return !backupEntrySet.isEmpty(); } public final int getAsyncBackupCount() { return mapContainer.getAsyncBackupCount(); } public final int getSyncBackupCount() { return mapContainer.getBackupCount(); } @Override public Operation getBackupOperation() { return new PutAllBackupOperation(name, backupEntrySet, backupRecordInfos); } }
1no label
hazelcast_src_main_java_com_hazelcast_map_operation_PutAllOperation.java
1,190
@edu.umd.cs.findbugs.annotations.SuppressWarnings("SE_BAD_FIELD") public class MemberLeftException extends ExecutionException implements DataSerializable, RetryableException { private Member member; public MemberLeftException() { } public MemberLeftException(Member member) { this.member = member; } /** * Returns the member which left the cluster * @return member */ public Member getMember() { return member; } public String getMessage() { return member + " has left cluster!"; } public void writeData(ObjectDataOutput out) throws IOException { member.writeData(out); } public void readData(ObjectDataInput in) throws IOException { member = new MemberImpl(); member.readData(in); } }
1no label
hazelcast_src_main_java_com_hazelcast_core_MemberLeftException.java
116
public class OStringForwardReader implements CharSequence { private final BufferedReader input; private char[] buffer = new char[DEFAULT_SIZE]; private long start = -1; private long end = -1; private long current = 0; private long size = 0; private static final int DEFAULT_SIZE = 1000; public OStringForwardReader(final InputStream iInput) { this.input = new BufferedReader(new InputStreamReader(iInput)); } public OStringForwardReader(final Reader iReader) { this.input = new BufferedReader(iReader); } public OStringForwardReader(final File file) throws FileNotFoundException { this(new FileInputStream(file)); size = file.length(); } public char charAt(final int iIndex) { if (iIndex < start) throw new IllegalStateException("Cannot read backward"); if (iIndex >= end) read(iIndex); if (iIndex > current) current = iIndex; return buffer[(int) (iIndex - start)]; } private void read(final int iIndex) { try { // JUMP CHARACTERS for (long i = end; i < iIndex - 1; ++i) input.read(); start = iIndex; final int byteRead = input.read(buffer); end = start + byteRead; current = start; } catch (IOException e) { throw new OIOException("Error in read", e); } } public void close() throws IOException { if (input != null) input.close(); start = end = -1; current = size = 0; } public boolean ready() { try { return current < end || input.ready(); } catch (IOException e) { throw new OIOException("Error in ready", e); } } public int length() { return (int) size; } public CharSequence subSequence(final int start, final int end) { throw new UnsupportedOperationException(); } public long getPosition() { return current; } @Override public String toString() { return (start > 0 ? "..." : "") + new String(buffer) + (ready() ? "..." : ""); } public int indexOf(final char iToFind) { for (int i = (int) current; i < size; ++i) { if (charAt(i) == iToFind) return i; } return -1; } public String subString(int iOffset, final char iToFind, boolean iIncluded) { StringBuilder buffer = new StringBuilder(); char c; for (int i = iOffset; i < size; ++i) { c = charAt(i); if (c == iToFind) { if (iIncluded) buffer.append(c); return buffer.toString(); } buffer.append(c); } buffer.setLength(0); return null; } }
0true
commons_src_main_java_com_orientechnologies_common_parser_OStringForwardReader.java
934
class AsyncBroadcastAction { private final Request request; private final ActionListener<Response> listener; private final ClusterState clusterState; private final DiscoveryNodes nodes; private final GroupShardsIterator shardsIts; private final int expectedOps; private final AtomicInteger counterOps = new AtomicInteger(); private final AtomicReferenceArray shardsResponses; AsyncBroadcastAction(Request request, ActionListener<Response> listener) { this.request = request; this.listener = listener; clusterState = clusterService.state(); ClusterBlockException blockException = checkGlobalBlock(clusterState, request); if (blockException != null) { throw blockException; } // update to concrete indices String[] concreteIndices = clusterState.metaData().concreteIndices(request.indices(), request.indicesOptions()); blockException = checkRequestBlock(clusterState, request, concreteIndices); if (blockException != null) { throw blockException; } nodes = clusterState.nodes(); shardsIts = shards(clusterState, request, concreteIndices); expectedOps = shardsIts.size(); shardsResponses = new AtomicReferenceArray<Object>(expectedOps); } public void start() { if (shardsIts.size() == 0) { // no shards try { listener.onResponse(newResponse(request, new AtomicReferenceArray(0), clusterState)); } catch (Throwable e) { listener.onFailure(e); } return; } request.beforeStart(); // count the local operations, and perform the non local ones int localOperations = 0; int shardIndex = -1; for (final ShardIterator shardIt : shardsIts) { shardIndex++; final ShardRouting shard = shardIt.firstOrNull(); if (shard != null) { if (shard.currentNodeId().equals(nodes.localNodeId())) { localOperations++; } else { // do the remote operation here, the localAsync flag is not relevant performOperation(shardIt, shardIndex, true); } } else { // really, no shards active in this group onOperation(null, shardIt, shardIndex, new NoShardAvailableActionException(shardIt.shardId())); } } // we have local operations, perform them now if (localOperations > 0) { if (request.operationThreading() == BroadcastOperationThreading.SINGLE_THREAD) { request.beforeLocalFork(); threadPool.executor(executor).execute(new Runnable() { @Override public void run() { int shardIndex = -1; for (final ShardIterator shardIt : shardsIts) { shardIndex++; final ShardRouting shard = shardIt.firstOrNull(); if (shard != null) { if (shard.currentNodeId().equals(nodes.localNodeId())) { performOperation(shardIt, shardIndex, false); } } } } }); } else { boolean localAsync = request.operationThreading() == BroadcastOperationThreading.THREAD_PER_SHARD; if (localAsync) { request.beforeLocalFork(); } shardIndex = -1; for (final ShardIterator shardIt : shardsIts) { shardIndex++; final ShardRouting shard = shardIt.firstOrNull(); if (shard != null) { if (shard.currentNodeId().equals(nodes.localNodeId())) { performOperation(shardIt, shardIndex, localAsync); } } } } } } void performOperation(final ShardIterator shardIt, int shardIndex, boolean localAsync) { performOperation(shardIt, shardIt.nextOrNull(), shardIndex, localAsync); } void performOperation(final ShardIterator shardIt, final ShardRouting shard, final int shardIndex, boolean localAsync) { if (shard == null) { // no more active shards... (we should not really get here, just safety) onOperation(null, shardIt, shardIndex, new NoShardAvailableActionException(shardIt.shardId())); } else { try { final ShardRequest shardRequest = newShardRequest(shard, request); if (shard.currentNodeId().equals(nodes.localNodeId())) { if (localAsync) { threadPool.executor(executor).execute(new Runnable() { @Override public void run() { try { onOperation(shard, shardIndex, shardOperation(shardRequest)); } catch (Throwable e) { onOperation(shard, shardIt, shardIndex, e); } } }); } else { onOperation(shard, shardIndex, shardOperation(shardRequest)); } } else { DiscoveryNode node = nodes.get(shard.currentNodeId()); if (node == null) { // no node connected, act as failure onOperation(shard, shardIt, shardIndex, new NoShardAvailableActionException(shardIt.shardId())); } else { transportService.sendRequest(node, transportShardAction, shardRequest, new BaseTransportResponseHandler<ShardResponse>() { @Override public ShardResponse newInstance() { return newShardResponse(); } @Override public String executor() { return ThreadPool.Names.SAME; } @Override public void handleResponse(ShardResponse response) { onOperation(shard, shardIndex, response); } @Override public void handleException(TransportException e) { onOperation(shard, shardIt, shardIndex, e); } }); } } } catch (Throwable e) { onOperation(shard, shardIt, shardIndex, e); } } } @SuppressWarnings({"unchecked"}) void onOperation(ShardRouting shard, int shardIndex, ShardResponse response) { shardsResponses.set(shardIndex, response); if (expectedOps == counterOps.incrementAndGet()) { finishHim(); } } @SuppressWarnings({"unchecked"}) void onOperation(@Nullable ShardRouting shard, final ShardIterator shardIt, int shardIndex, Throwable t) { // we set the shard failure always, even if its the first in the replication group, and the next one // will work (it will just override it...) setFailure(shardIt, shardIndex, t); ShardRouting nextShard = shardIt.nextOrNull(); if (nextShard != null) { if (t != null) { if (logger.isTraceEnabled()) { if (!TransportActions.isShardNotAvailableException(t)) { if (shard != null) { logger.trace(shard.shortSummary() + ": Failed to execute [" + request + "]", t); } else { logger.trace(shardIt.shardId() + ": Failed to execute [" + request + "]", t); } } } } // we are not threaded here if we got here from the transport // or we possibly threaded if we got from a local threaded one, // in which case, the next shard in the partition will not be local one // so there is no meaning to this flag performOperation(shardIt, nextShard, shardIndex, true); } else { if (logger.isDebugEnabled()) { if (t != null) { if (!TransportActions.isShardNotAvailableException(t)) { if (shard != null) { logger.debug(shard.shortSummary() + ": Failed to execute [" + request + "]", t); } else { logger.debug(shardIt.shardId() + ": Failed to execute [" + request + "]", t); } } } } if (expectedOps == counterOps.incrementAndGet()) { finishHim(); } } } void finishHim() { try { listener.onResponse(newResponse(request, shardsResponses, clusterState)); } catch (Throwable e) { listener.onFailure(e); } } void setFailure(ShardIterator shardIt, int shardIndex, Throwable t) { // we don't aggregate shard failures on non active shards (but do keep the header counts right) if (TransportActions.isShardNotAvailableException(t)) { return; } if (!(t instanceof BroadcastShardOperationFailedException)) { t = new BroadcastShardOperationFailedException(shardIt.shardId(), t); } Object response = shardsResponses.get(shardIndex); if (response == null) { // just override it and return shardsResponses.set(shardIndex, t); } if (!(response instanceof Throwable)) { // we should never really get here... return; } // the failure is already present, try and not override it with an exception that is less meaningless // for example, getting illegal shard state if (TransportActions.isReadOverrideException(t)) { shardsResponses.set(shardIndex, t); } } }
1no label
src_main_java_org_elasticsearch_action_support_broadcast_TransportBroadcastOperationAction.java
4
@Component("blCategoryCustomPersistenceHandler") public class CategoryCustomPersistenceHandler extends CustomPersistenceHandlerAdapter { private static final Log LOG = LogFactory.getLog(CategoryCustomPersistenceHandler.class); @Override public Boolean canHandleAdd(PersistencePackage persistencePackage) { String ceilingEntityFullyQualifiedClassname = persistencePackage.getCeilingEntityFullyQualifiedClassname(); String[] customCriteria = persistencePackage.getCustomCriteria(); return !ArrayUtils.isEmpty(customCriteria) && "addNewCategory".equals(customCriteria[0]) && Category.class.getName().equals(ceilingEntityFullyQualifiedClassname); } @Override public Entity add(PersistencePackage persistencePackage, DynamicEntityDao dynamicEntityDao, RecordHelper helper) throws ServiceException { Entity entity = persistencePackage.getEntity(); try { PersistencePerspective persistencePerspective = persistencePackage.getPersistencePerspective(); Category adminInstance = (Category) Class.forName(entity.getType()[0]).newInstance(); Map<String, FieldMetadata> adminProperties = helper.getSimpleMergedProperties(Category.class.getName(), persistencePerspective); adminInstance = (Category) helper.createPopulatedInstance(adminInstance, entity, adminProperties, false); CategoryXref categoryXref = new CategoryXrefImpl(); categoryXref.setCategory(adminInstance.getDefaultParentCategory()); categoryXref.setSubCategory(adminInstance); if (adminInstance.getDefaultParentCategory() != null && !adminInstance.getAllParentCategoryXrefs().contains(categoryXref)) { adminInstance.getAllParentCategoryXrefs().add(categoryXref); } adminInstance = (Category) dynamicEntityDao.merge(adminInstance); return helper.getRecord(adminProperties, adminInstance, null, null); } catch (Exception e) { throw new ServiceException("Unable to add entity for " + entity.getType()[0], e); } } protected Map<String, FieldMetadata> getMergedProperties(Class<?> ceilingEntityFullyQualifiedClass, DynamicEntityDao dynamicEntityDao, Boolean populateManyToOneFields, String[] includeManyToOneFields, String[] excludeManyToOneFields, String configurationKey) throws ClassNotFoundException, SecurityException, IllegalArgumentException, NoSuchMethodException, IllegalAccessException, InvocationTargetException, NoSuchFieldException { Class<?>[] entities = dynamicEntityDao.getAllPolymorphicEntitiesFromCeiling(ceilingEntityFullyQualifiedClass); return dynamicEntityDao.getMergedProperties( ceilingEntityFullyQualifiedClass.getName(), entities, null, new String[]{}, new ForeignKey[]{}, MergedPropertyType.PRIMARY, populateManyToOneFields, includeManyToOneFields, excludeManyToOneFields, configurationKey, "" ); } }
0true
admin_broadleaf-admin-module_src_main_java_org_broadleafcommerce_admin_server_service_handler_CategoryCustomPersistenceHandler.java
217
public class ClientReadHandler extends ClientAbstractSelectionHandler { private final ByteBuffer buffer; private volatile long lastHandle; private ClientPacket packet; public ClientReadHandler(ClientConnection connection, IOSelector ioSelector, int bufferSize) { super(connection, ioSelector); buffer = ByteBuffer.allocate(bufferSize); } @Override public void run() { registerOp(SelectionKey.OP_READ); } @Override public void handle() { lastHandle = Clock.currentTimeMillis(); if (!connection.live()) { if (logger.isFinestEnabled()) { String message = "We are being asked to read, but connection is not live so we won't"; logger.finest(message); } return; } try { int readBytes = socketChannel.read(buffer); if (readBytes == -1) { throw new EOFException("Remote socket closed!"); } } catch (IOException e) { handleSocketException(e); return; } try { if (buffer.position() == 0) { return; } buffer.flip(); while (buffer.hasRemaining()) { if (packet == null) { packet = new ClientPacket(connection.getConnectionManager().getSerializationContext()); } boolean complete = packet.readFrom(buffer); if (complete) { packet.setConn(connection); connectionManager.handlePacket(packet); packet = null; } else { break; } } if (buffer.hasRemaining()) { buffer.compact(); } else { buffer.clear(); } } catch (Throwable t) { handleSocketException(t); } } long getLastHandle() { return lastHandle; } }
1no label
hazelcast-client_src_main_java_com_hazelcast_client_connection_nio_ClientReadHandler.java
713
@Entity @Inheritance(strategy = InheritanceType.JOINED) @Table(name = "BLC_PRODUCT_OPTION_VALUE") @Cache(usage = CacheConcurrencyStrategy.READ_WRITE, region = "blStandardElements") @AdminPresentationClass(friendlyName = "Product Option Value") public class ProductOptionValueImpl implements ProductOptionValue { private static final long serialVersionUID = 1L; @Id @GeneratedValue(generator = "ProductOptionValueId") @GenericGenerator( name = "ProductOptionValueId", strategy = "org.broadleafcommerce.common.persistence.IdOverrideTableGenerator", parameters = { @Parameter(name = "segment_value", value = "ProductOptionValueImpl"), @Parameter(name = "entity_name", value = "org.broadleafcommerce.core.catalog.domain.ProductOptionValueImpl") }) @Column(name = "PRODUCT_OPTION_VALUE_ID") protected Long id; @Column(name = "ATTRIBUTE_VALUE") @AdminPresentation(friendlyName = "productOptionValue_attributeValue", prominent = true, translatable = true) protected String attributeValue; @Column(name = "DISPLAY_ORDER") @AdminPresentation(friendlyName = "productOptionValue_displayOrder", prominent = true) protected Long displayOrder; @Column(name = "PRICE_ADJUSTMENT", precision = 19, scale = 5) @AdminPresentation(friendlyName = "productOptionValue_adjustment", fieldType = SupportedFieldType.MONEY, prominent = true) protected BigDecimal priceAdjustment; @ManyToOne(targetEntity = ProductOptionImpl.class) @JoinColumn(name = "PRODUCT_OPTION_ID") protected ProductOption productOption; @Override public Long getId() { return id; } @Override public void setId(Long id) { this.id = id; } @Override public String getAttributeValue() { return DynamicTranslationProvider.getValue(this, "attributeValue", attributeValue); } @Override public void setAttributeValue(String attributeValue) { this.attributeValue = attributeValue; } @Override public Long getDisplayOrder() { return displayOrder; } @Override public void setDisplayOrder(Long displayOrder) { this.displayOrder = displayOrder; } @Override public Money getPriceAdjustment() { Money returnPrice = null; if (SkuPricingConsiderationContext.hasDynamicPricing()) { DynamicSkuPrices dynamicPrices = SkuPricingConsiderationContext.getSkuPricingService().getPriceAdjustment(this, priceAdjustment == null ? null : new Money(priceAdjustment), SkuPricingConsiderationContext.getSkuPricingConsiderationContext()); returnPrice = dynamicPrices.getPriceAdjustment(); } else { if (priceAdjustment != null) { returnPrice = new Money(priceAdjustment, Money.defaultCurrency()); } } return returnPrice; } @Override public void setPriceAdjustment(Money priceAdjustment) { this.priceAdjustment = Money.toAmount(priceAdjustment); } @Override public ProductOption getProductOption() { return productOption; } @Override public void setProductOption(ProductOption productOption) { this.productOption = productOption; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } ProductOptionValueImpl other = (ProductOptionValueImpl) obj; if (id != null && other.id != null) { return id.equals(other.id); } if (getAttributeValue() == null) { if (other.getAttributeValue() != null) { return false; } } else if (!getAttributeValue().equals(other.getAttributeValue())) { return false; } return true; } }
1no label
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_catalog_domain_ProductOptionValueImpl.java
11
public class StorageSetup { //############ UTILITIES ############# public static final String getHomeDir(String subdir) { String homedir = System.getProperty("titan.testdir"); if (null == homedir) { homedir = "target" + File.separator + "db"; } if (subdir!=null && !StringUtils.isEmpty(subdir)) homedir += File.separator + subdir; File homefile = new File(homedir); if (!homefile.exists()) homefile.mkdirs(); return homedir; } public static final String getHomeDir() { return getHomeDir(null); } public static final File getHomeDirFile() { return getHomeDirFile(null); } public static final File getHomeDirFile(String subdir) { return new File(getHomeDir(subdir)); } public static final void deleteHomeDir() { deleteHomeDir(null); } public static final void deleteHomeDir(String subdir) { File homeDirFile = getHomeDirFile(subdir); // Make directory if it doesn't exist if (!homeDirFile.exists()) homeDirFile.mkdirs(); boolean success = IOUtils.deleteFromDirectory(homeDirFile); if (!success) throw new IllegalStateException("Could not remove " + homeDirFile); } public static TitanGraph getInMemoryGraph() { return TitanFactory.open(buildConfiguration().set(STORAGE_BACKEND,"inmemory")); } public static WriteConfiguration addPermanentCache(ModifiableConfiguration conf) { conf.set(DB_CACHE, true); conf.set(DB_CACHE_TIME,0l); return conf.getConfiguration(); } public static ModifiableConfiguration getConfig(WriteConfiguration config) { return new ModifiableConfiguration(ROOT_NS,config, BasicConfiguration.Restriction.NONE); } public static BasicConfiguration getConfig(ReadConfiguration config) { return new BasicConfiguration(ROOT_NS,config, BasicConfiguration.Restriction.NONE); } }
0true
titan-test_src_main_java_com_thinkaurelius_titan_StorageSetup.java