Unnamed: 0
int64 0
6.45k
| func
stringlengths 37
143k
| target
class label 2
classes | project
stringlengths 33
157
|
---|---|---|---|
219 | ThreadLocal<T> response = new ThreadLocal<T>() {
@Override
protected T initialValue() {
addThreadLocal(this);
if (!createInitialValue) {
return null;
}
try {
return type.newInstance();
} catch (InstantiationException e) {
throw new RuntimeException(e);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
}
@Override
public void set(T value) {
super.get();
super.set(value);
}
}; | 0true
| common_src_main_java_org_broadleafcommerce_common_classloader_release_ThreadLocalManager.java |
1,362 | @Entity
@Table(name = "BLC_CODE_TYPES")
@Inheritance(strategy=InheritanceType.JOINED)
@Cache(usage=CacheConcurrencyStrategy.READ_WRITE, region="blStandardElements")
@Deprecated
public class CodeTypeImpl implements CodeType {
public static final long serialVersionUID = 1L;
@Id
@GeneratedValue(generator = "CodeTypeId", strategy = GenerationType.TABLE)
@GenericGenerator(
name="CodeTypeId",
strategy="org.broadleafcommerce.common.persistence.IdOverrideTableGenerator",
parameters = {
@Parameter(name="segment_value", value="CodeTypeImpl"),
@Parameter(name="entity_name", value="org.broadleafcommerce.core.util.domain.CodeTypeImpl")
}
)
@Column(name = "CODE_ID")
protected Long id;
@Column(name = "CODE_TYPE", nullable=false)
protected String codeType;
@Column(name = "CODE_KEY", nullable=false)
protected String key;
@Column(name = "CODE_DESC")
protected String description;
@Column(name = "MODIFIABLE")
protected Character modifiable;
@Override
public Long getId() {
return id;
}
@Override
public void setId(Long id) {
this.id = id;
}
@Override
public String getCodeType() {
return codeType;
}
@Override
public void setCodeType(String codeType) {
this.codeType = codeType;
}
@Override
public String getKey() {
return key;
}
@Override
public void setKey(String key) {
this.key = key;
}
@Override
public String getDescription() {
return description;
}
@Override
public void setDescription(String description) {
this.description = description;
}
@Override
public Boolean isModifiable() {
if(modifiable == null)
return null;
return modifiable == 'Y' ? Boolean.TRUE : Boolean.FALSE;
}
@Override
public Boolean getModifiable() {
return isModifiable();
}
@Override
public void setModifiable(Boolean modifiable) {
if(modifiable == null) {
this.modifiable = null;
} else {
this.modifiable = modifiable ? 'Y' : 'N';
}
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result
+ ((codeType == null) ? 0 : codeType.hashCode());
result = prime * result
+ ((description == null) ? 0 : description.hashCode());
result = prime * result + ((id == null) ? 0 : id.hashCode());
result = prime * result + ((key == null) ? 0 : key.hashCode());
result = prime * result
+ ((modifiable == null) ? 0 : modifiable.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
CodeTypeImpl other = (CodeTypeImpl) obj;
if (codeType == null) {
if (other.codeType != null)
return false;
} else if (!codeType.equals(other.codeType))
return false;
if (description == null) {
if (other.description != null)
return false;
} else if (!description.equals(other.description))
return false;
if (id == null) {
if (other.id != null)
return false;
} else if (!id.equals(other.id))
return false;
if (key == null) {
if (other.key != null)
return false;
} else if (!key.equals(other.key))
return false;
if (modifiable == null) {
if (other.modifiable != null)
return false;
} else if (!modifiable.equals(other.modifiable))
return false;
return true;
}
} | 1no label
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_util_domain_CodeTypeImpl.java |
114 | {
@Override
public Object doWork( Void state )
{
try
{
tm.rollback();
}
catch ( Exception e )
{
throw new RuntimeException( e );
}
return null;
}
}; | 0true
| community_kernel_src_test_java_org_neo4j_kernel_impl_transaction_TestJtaCompliance.java |
309 | public class MergeFileSystemAndClassPathXMLApplicationContext extends AbstractMergeXMLApplicationContext {
public MergeFileSystemAndClassPathXMLApplicationContext(ApplicationContext parent) {
super(parent);
}
public MergeFileSystemAndClassPathXMLApplicationContext(String[] classPathLocations, String[] fileSystemLocations) throws BeansException {
this(classPathLocations, fileSystemLocations, null);
}
public MergeFileSystemAndClassPathXMLApplicationContext(LinkedHashMap<String, ResourceType> locations, ApplicationContext parent) throws BeansException {
this(parent);
ResourceInputStream[] resources = new ResourceInputStream[locations.size()];
int j = 0;
for (Map.Entry<String, ResourceType> entry : locations.entrySet()) {
switch (entry.getValue()) {
case CLASSPATH:
resources[j] = new ResourceInputStream(getClassLoader(parent).getResourceAsStream(entry.getKey()), entry.getKey());
break;
case FILESYSTEM:
try {
File temp = new File(entry.getKey());
resources[j] = new ResourceInputStream(new BufferedInputStream(new FileInputStream(temp)), entry.getKey());
} catch (FileNotFoundException e) {
throw new FatalBeanException("Unable to merge context files", e);
}
break;
}
j++;
}
ImportProcessor importProcessor = new ImportProcessor(this);
try {
resources = importProcessor.extract(resources);
} catch (MergeException e) {
throw new FatalBeanException("Unable to merge source and patch locations", e);
}
this.configResources = new MergeApplicationContextXmlConfigResource().getConfigResources(resources, null);
refresh();
}
public MergeFileSystemAndClassPathXMLApplicationContext(String[] classPathLocations, String[] fileSystemLocations, ApplicationContext parent) throws BeansException {
this(parent);
ResourceInputStream[] classPathSources;
ResourceInputStream[] fileSystemSources;
try {
classPathSources = new ResourceInputStream[classPathLocations.length];
for (int j=0;j<classPathLocations.length;j++){
classPathSources[j] = new ResourceInputStream(getClassLoader(parent).getResourceAsStream(classPathLocations[j]), classPathLocations[j]);
}
fileSystemSources = new ResourceInputStream[fileSystemLocations.length];
for (int j=0;j<fileSystemSources.length;j++){
File temp = new File(fileSystemLocations[j]);
fileSystemSources[j] = new ResourceInputStream(new BufferedInputStream(new FileInputStream(temp)), fileSystemLocations[j]);
}
} catch (FileNotFoundException e) {
throw new FatalBeanException("Unable to merge context files", e);
}
ImportProcessor importProcessor = new ImportProcessor(this);
try {
classPathSources = importProcessor.extract(classPathSources);
fileSystemSources = importProcessor.extract(fileSystemSources);
} catch (MergeException e) {
throw new FatalBeanException("Unable to merge source and patch locations", e);
}
this.configResources = new MergeApplicationContextXmlConfigResource().getConfigResources(classPathSources, fileSystemSources);
refresh();
}
/**
* This could be advantageous for subclasses to override in order to utilize the parent application context. By default,
* this utilizes the class loader for the current class.
*/
protected ClassLoader getClassLoader(ApplicationContext parent) {
return MergeFileSystemAndClassPathXMLApplicationContext.class.getClassLoader();
}
public enum ResourceType {
FILESYSTEM,CLASSPATH
}
} | 0true
| common_src_main_java_org_broadleafcommerce_common_extensibility_context_MergeFileSystemAndClassPathXMLApplicationContext.java |
2,563 | firstMaster.clusterService.submitStateUpdateTask("local-disco-receive(from node[" + localNode + "])", new ProcessedClusterStateUpdateTask() {
@Override
public ClusterState execute(ClusterState currentState) {
DiscoveryNodes.Builder nodesBuilder = DiscoveryNodes.builder();
for (LocalDiscovery discovery : clusterGroups.get(clusterName).members()) {
nodesBuilder.put(discovery.localNode);
}
nodesBuilder.localNodeId(master.localNode().id()).masterNodeId(master.localNode().id());
return ClusterState.builder(currentState).nodes(nodesBuilder).build();
}
@Override
public void onFailure(String source, Throwable t) {
logger.error("unexpected failure during [{}]", t, source);
}
@Override
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
sendInitialStateEventIfNeeded();
}
}); | 1no label
| src_main_java_org_elasticsearch_discovery_local_LocalDiscovery.java |
277 | public class VersionTests extends ElasticsearchTestCase {
@Test
public void testVersions() throws Exception {
assertThat(V_0_20_0.before(V_0_90_0), is(true));
assertThat(V_0_20_0.before(V_0_20_0), is(false));
assertThat(V_0_90_0.before(V_0_20_0), is(false));
assertThat(V_0_20_0.onOrBefore(V_0_90_0), is(true));
assertThat(V_0_20_0.onOrBefore(V_0_20_0), is(true));
assertThat(V_0_90_0.onOrBefore(V_0_20_0), is(false));
assertThat(V_0_20_0.after(V_0_90_0), is(false));
assertThat(V_0_20_0.after(V_0_20_0), is(false));
assertThat(V_0_90_0.after(V_0_20_0), is(true));
assertThat(V_0_20_0.onOrAfter(V_0_90_0), is(false));
assertThat(V_0_20_0.onOrAfter(V_0_20_0), is(true));
assertThat(V_0_90_0.onOrAfter(V_0_20_0), is(true));
}
@Test
public void testVersionConstantPresent() {
assertThat(Version.CURRENT, sameInstance(Version.fromId(Version.CURRENT.id)));
assertThat(Version.CURRENT.luceneVersion.ordinal(), equalTo(org.apache.lucene.util.Version.LUCENE_CURRENT.ordinal() - 1));
final int iters = atLeast(20);
for (int i = 0; i < iters; i++) {
Version version = randomVersion();
assertThat(version, sameInstance(Version.fromId(version.id)));
assertThat(version.luceneVersion, sameInstance(Version.fromId(version.id).luceneVersion));
}
}
} | 0true
| src_test_java_org_elasticsearch_VersionTests.java |
21 | @SuppressWarnings("rawtypes")
static final class KeySet<E> extends AbstractSet<E> implements ONavigableSet<E> {
private final ONavigableMap<E, Object> m;
KeySet(ONavigableMap<E, Object> map) {
m = map;
}
@Override
public OLazyIterator<E> iterator() {
if (m instanceof OMVRBTree)
return ((OMVRBTree<E, Object>) m).keyIterator();
else
return (((OMVRBTree.NavigableSubMap) m).keyIterator());
}
public OLazyIterator<E> descendingIterator() {
if (m instanceof OMVRBTree)
return ((OMVRBTree<E, Object>) m).descendingKeyIterator();
else
return (((OMVRBTree.NavigableSubMap) m).descendingKeyIterator());
}
@Override
public int size() {
return m.size();
}
@Override
public boolean isEmpty() {
return m.isEmpty();
}
@Override
public boolean contains(final Object o) {
return m.containsKey(o);
}
@Override
public void clear() {
m.clear();
}
public E lower(final E e) {
return m.lowerKey(e);
}
public E floor(final E e) {
return m.floorKey(e);
}
public E ceiling(final E e) {
return m.ceilingKey(e);
}
public E higher(final E e) {
return m.higherKey(e);
}
public E first() {
return m.firstKey();
}
public E last() {
return m.lastKey();
}
public Comparator<? super E> comparator() {
return m.comparator();
}
public E pollFirst() {
final Map.Entry<E, Object> e = m.pollFirstEntry();
return e == null ? null : e.getKey();
}
public E pollLast() {
final Map.Entry<E, Object> e = m.pollLastEntry();
return e == null ? null : e.getKey();
}
@Override
public boolean remove(final Object o) {
final int oldSize = size();
m.remove(o);
return size() != oldSize;
}
public ONavigableSet<E> subSet(final E fromElement, final boolean fromInclusive, final E toElement, final boolean toInclusive) {
return new OMVRBTreeSet<E>(m.subMap(fromElement, fromInclusive, toElement, toInclusive));
}
public ONavigableSet<E> headSet(final E toElement, final boolean inclusive) {
return new OMVRBTreeSet<E>(m.headMap(toElement, inclusive));
}
public ONavigableSet<E> tailSet(final E fromElement, final boolean inclusive) {
return new OMVRBTreeSet<E>(m.tailMap(fromElement, inclusive));
}
public SortedSet<E> subSet(final E fromElement, final E toElement) {
return subSet(fromElement, true, toElement, false);
}
public SortedSet<E> headSet(final E toElement) {
return headSet(toElement, false);
}
public SortedSet<E> tailSet(final E fromElement) {
return tailSet(fromElement, true);
}
public ONavigableSet<E> descendingSet() {
return new OMVRBTreeSet<E>(m.descendingMap());
}
} | 0true
| commons_src_main_java_com_orientechnologies_common_collection_OMVRBTree.java |
5,436 | public class ScriptBytesValues extends BytesValues implements ScriptValues {
final SearchScript script;
private Iterator<?> iter;
private Object value;
private BytesRef scratch = new BytesRef();
public ScriptBytesValues(SearchScript script) {
super(true); // assume multi-valued
this.script = script;
}
@Override
public SearchScript script() {
return script;
}
@Override
public int setDocument(int docId) {
this.docId = docId;
script.setNextDocId(docId);
value = script.run();
if (value == null) {
iter = Iterators.emptyIterator();
return 0;
}
if (value.getClass().isArray()) {
final int length = Array.getLength(value);
// don't use Arrays.asList because the array may be an array of primitives?
iter = new Iterator<Object>() {
int i = 0;
@Override
public boolean hasNext() {
return i < length;
}
@Override
public Object next() {
return Array.get(value, i++);
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
return length;
}
if (value instanceof Collection) {
final Collection<?> coll = (Collection<?>) value;
iter = coll.iterator();
return coll.size();
}
iter = Iterators.singletonIterator(value);
return 1;
}
@Override
public BytesRef nextValue() {
final String next = iter.next().toString();
scratch.copyChars(next);
return scratch;
}
} | 1no label
| src_main_java_org_elasticsearch_search_aggregations_support_bytes_ScriptBytesValues.java |
6 | @Component("blCustomerCustomPersistenceHandler")
public class CustomerCustomPersistenceHandler extends CustomPersistenceHandlerAdapter {
private static final Log LOG = LogFactory.getLog(StructuredContentTypeCustomPersistenceHandler.class);
@Resource(name="blCustomerService")
protected CustomerService customerService;
@Override
public Boolean canHandleAdd(PersistencePackage persistencePackage) {
return persistencePackage.getCeilingEntityFullyQualifiedClassname() != null && persistencePackage.getCeilingEntityFullyQualifiedClassname().equals(Customer.class.getName());
}
@Override
public Entity add(PersistencePackage persistencePackage, DynamicEntityDao dynamicEntityDao, RecordHelper helper) throws ServiceException {
Entity entity = persistencePackage.getEntity();
try {
PersistencePerspective persistencePerspective = persistencePackage.getPersistencePerspective();
Customer adminInstance = (Customer) Class.forName(entity.getType()[0]).newInstance();
adminInstance.setId(customerService.findNextCustomerId());
Map<String, FieldMetadata> adminProperties = helper.getSimpleMergedProperties(Customer.class.getName(), persistencePerspective);
adminInstance = (Customer) helper.createPopulatedInstance(adminInstance, entity, adminProperties, false);
if (customerService.readCustomerByUsername(adminInstance.getUsername()) != null) {
Entity error = new Entity();
error.addValidationError("username", "nonUniqueUsernameError");
return error;
}
adminInstance = (Customer) dynamicEntityDao.merge(adminInstance);
Entity adminEntity = helper.getRecord(adminProperties, adminInstance, null, null);
return adminEntity;
} catch (Exception e) {
LOG.error("Unable to execute persistence activity", e);
throw new ServiceException("Unable to add entity for " + entity.getType()[0], e);
}
}
} | 1no label
| admin_broadleaf-admin-module_src_main_java_org_broadleafcommerce_admin_server_service_handler_CustomerCustomPersistenceHandler.java |
54 | private class ClusterListenerImpl extends ClusterListener.Adapter
{
@Override
public void enteredCluster( ClusterConfiguration clusterConfiguration )
{
// Catch up with elections
for ( Map.Entry<String, InstanceId> memberRoles : clusterConfiguration.getRoles().entrySet() )
{
elected( memberRoles.getKey(), memberRoles.getValue(),
clusterConfiguration.getUriForId( memberRoles.getValue() ) );
}
}
@Override
public void elected( String role, final InstanceId instanceId, final URI electedMember )
{
if ( role.equals( ClusterConfiguration.COORDINATOR ) )
{
// Use the cluster coordinator as master for HA
Listeners.notifyListeners( listeners, new Listeners.Notification<ClusterMemberListener>()
{
@Override
public void notify( ClusterMemberListener listener )
{
listener.coordinatorIsElected( instanceId );
}
} );
}
}
@Override
public void leftCluster( final InstanceId member )
{
// Notify unavailability of members
Listeners.notifyListeners( listeners, new Listeners.Notification<ClusterMemberListener>()
{
@Override
public void notify( ClusterMemberListener listener )
{
for ( MemberIsAvailable memberIsAvailable : clusterMembersSnapshot.getCurrentAvailable( member ) )
{
listener.memberIsUnavailable( memberIsAvailable.getRole(), member );
}
}
} );
clusterMembersSnapshot.unavailableMember( member );
}
} | 1no label
| enterprise_cluster_src_main_java_org_neo4j_cluster_member_paxos_PaxosClusterMemberEvents.java |
1,466 | public class FindReferencesVisitor extends Visitor implements NaturalVisitor {
private Referenceable declaration;
private final Set<Node> nodes = new HashSet<Node>();
public FindReferencesVisitor(Referenceable declaration) {
if (declaration instanceof TypedDeclaration) {
Referenceable od = declaration;
while (od!=null && od!=declaration) {
declaration = od;
od = ((TypedDeclaration) od).getOriginalDeclaration();
}
}
if (declaration instanceof Declaration &&
((Declaration)declaration).getContainer() instanceof Setter) {
Setter setter = (Setter) ((Declaration)declaration).getContainer();
if (setter.getDirectMember(setter.getName(), null, false)
.equals(declaration)) {
declaration = setter;
}
}
if (declaration instanceof Setter) {
declaration = ((Setter) declaration).getGetter();
}
this.declaration = declaration;
}
public Referenceable getDeclaration() {
return declaration;
}
public Set<Node> getNodes() {
return nodes;
}
protected boolean isReference(Parameter p) {
return p!=null && isReference(p.getModel());
}
protected boolean isReference(Declaration ref) {
return ref!=null && declaration instanceof Declaration &&
(((Declaration)declaration).refines(ref) ||
isSetterParameterReference(ref));
}
private boolean isSetterParameterReference(Declaration ref) {
if (ref.getContainer() instanceof Setter) {
Setter setter = (Setter) ref.getContainer();
return setter.getDirectMember(setter.getName(), null, false).equals(ref) &&
isReference(setter.getGetter());
}
else {
return false;
}
}
protected boolean isReference(Declaration ref, String id) {
return isReference(ref);
}
private Tree.Variable getConditionVariable(Condition c) {
if (c instanceof Tree.ExistsOrNonemptyCondition) {
return ((Tree.ExistsOrNonemptyCondition) c).getVariable();
}
if (c instanceof Tree.IsCondition) {
return ((Tree.IsCondition) c).getVariable();
}
return null;
}
@Override
public void visit(Tree.CaseClause that) {
Tree.CaseItem ci = that.getCaseItem();
if (ci instanceof Tree.IsCase) {
Tree.Variable var = ((Tree.IsCase) ci).getVariable();
if (var!=null) {
TypedDeclaration od = var.getDeclarationModel().getOriginalDeclaration();
if (od!=null && od.equals(declaration)) {
Referenceable d = declaration;
declaration = var.getDeclarationModel();
that.getBlock().visit(this);
declaration = d;
return;
}
}
}
super.visit(that);
}
@Override
public void visit(Tree.IfClause that) {
for (Condition c: that.getConditionList().getConditions()) {
Tree.Variable var = getConditionVariable(c);
if (var!=null && var.getType() instanceof Tree.SyntheticVariable) {
TypedDeclaration od = var.getDeclarationModel().getOriginalDeclaration();
if (od!=null && od.equals(declaration)) {
c.visit(this);
Referenceable d = declaration;
declaration = var.getDeclarationModel();
if (that.getBlock()!=null) {
that.getBlock().visit(this);
}
declaration = d;
return;
}
}
}
super.visit(that);
}
@Override
public void visit(Tree.WhileClause that) {
for (Condition c: that.getConditionList().getConditions()) {
Tree.Variable var = getConditionVariable(c);
if (var!=null && var.getType() instanceof Tree.SyntheticVariable) {
TypedDeclaration od = var.getDeclarationModel()
.getOriginalDeclaration();
if (od!=null && od.equals(declaration)) {
c.visit(this);
Referenceable d = declaration;
declaration = var.getDeclarationModel();
that.getBlock().visit(this);
declaration = d;
return;
}
}
}
super.visit(that);
}
@Override
public void visit(Tree.Body body) {
Referenceable d = declaration;
for (Tree.Statement st: body.getStatements()) {
if (st instanceof Tree.Assertion) {
Tree.Assertion that = (Tree.Assertion) st;
for (Condition c: that.getConditionList().getConditions()) {
Tree.Variable var = getConditionVariable(c);
if (var!=null && var.getType() instanceof Tree.SyntheticVariable) {
TypedDeclaration od = var.getDeclarationModel()
.getOriginalDeclaration();
if (od!=null && od.equals(declaration)) {
c.visit(this);
declaration = var.getDeclarationModel();
break;
}
}
}
}
st.visit(this);
}
declaration = d;
}
@Override
public void visit(Tree.ExtendedTypeExpression that) {}
@Override
public void visit(Tree.StaticMemberOrTypeExpression that) {
if (isReference(that.getDeclaration(),
id(that.getIdentifier()))) {
nodes.add(that);
}
super.visit(that);
}
public void visit(Tree.MemberLiteral that) {
if (isReference(that.getDeclaration(),
id(that.getIdentifier()))) {
nodes.add(that);
}
super.visit(that);
}
@Override
public void visit(Tree.TypedArgument that) {
if (isReference(that.getParameter())) {
nodes.add(that);
}
super.visit(that);
}
@Override
public void visit(Tree.SpecifiedArgument that) {
if (that.getIdentifier()!=null &&
that.getIdentifier().getToken()!=null &&
isReference(that.getParameter())) {
nodes.add(that);
}
super.visit(that);
}
@Override
public void visit(Tree.SimpleType that) {
ProducedType type = that.getTypeModel();
if (type!=null && isReference(type.getDeclaration(),
id(that.getIdentifier()))) {
nodes.add(that);
}
super.visit(that);
}
@Override
public void visit(Tree.ImportMemberOrType that) {
if (isReference(that.getDeclarationModel())) {
nodes.add(that);
}
super.visit(that);
}
@Override
public void visit(Import that) {
super.visit(that);
if (declaration instanceof Package) {
if (formatPath(that.getImportPath().getIdentifiers())
.equals(declaration.getNameAsString())) {
nodes.add(that);
}
}
}
@Override
public void visit(ImportModule that) {
super.visit(that);
if (declaration instanceof Module) {
if (formatPath(that.getImportPath().getIdentifiers())
.equals(declaration.getNameAsString())) {
nodes.add(that);
}
}
}
@Override
public void visit(Tree.InitializerParameter that) {
if (isReference(that.getParameterModel())) {
nodes.add(that);
}
else {
super.visit(that);
}
}
private String id(Tree.Identifier that) {
return that==null ? null : that.getText();
}
} | 1no label
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_util_FindReferencesVisitor.java |
1,927 | public abstract class AbstractRuleBuilderFieldService implements RuleBuilderFieldService, ApplicationContextAware, InitializingBean {
protected DynamicEntityDao dynamicEntityDao;
protected ApplicationContext applicationContext;
protected List<FieldData> fields = new ArrayList<FieldData>();
@Override
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
this.applicationContext = applicationContext;
}
@Override
public FieldWrapper buildFields() {
FieldWrapper wrapper = new FieldWrapper();
for (FieldData field : getFields()) {
FieldDTO fieldDTO = new FieldDTO();
fieldDTO.setLabel(field.getFieldLabel());
//translate the label to display
String label = field.getFieldLabel();
BroadleafRequestContext context = BroadleafRequestContext.getBroadleafRequestContext();
MessageSource messages = context.getMessageSource();
label = messages.getMessage(label, null, label, context.getJavaLocale());
fieldDTO.setLabel(label);
fieldDTO.setName(field.getFieldName());
fieldDTO.setOperators(field.getOperators());
fieldDTO.setOptions(field.getOptions());
wrapper.getFields().add(fieldDTO);
}
return wrapper;
}
@Override
public SupportedFieldType getSupportedFieldType(String fieldName) {
SupportedFieldType type = null;
if (fieldName != null) {
for (FieldData field : getFields()) {
if (fieldName.equals(field.getFieldName())){
return field.getFieldType();
}
}
}
return type;
}
@Override
public SupportedFieldType getSecondaryFieldType(String fieldName) {
SupportedFieldType type = null;
if (fieldName != null) {
for (FieldData field : getFields()) {
if (fieldName.equals(field.getFieldName())){
return field.getSecondaryFieldType();
}
}
}
return type;
}
@Override
public FieldDTO getField(String fieldName) {
for (FieldData field : getFields()) {
if (field.getFieldName().equals(fieldName)) {
FieldDTO fieldDTO = new FieldDTO();
fieldDTO.setLabel(field.getFieldLabel());
fieldDTO.setName(field.getFieldName());
fieldDTO.setOperators(field.getOperators());
fieldDTO.setOptions(field.getOptions());
return fieldDTO;
}
}
return null;
}
@Override
public List<FieldData> getFields() {
return fields;
}
@Override
@SuppressWarnings("unchecked")
public void setFields(final List<FieldData> fields) {
List<FieldData> proxyFields = (List<FieldData>) Proxy.newProxyInstance(getClass().getClassLoader(), new Class<?>[]{List.class}, new InvocationHandler() {
@Override
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
if (method.getName().equals("add")) {
FieldData fieldData = (FieldData) args[0];
testFieldName(fieldData);
}
if (method.getName().equals("addAll")) {
Collection<FieldData> addCollection = (Collection<FieldData>) args[0];
Iterator<FieldData> itr = addCollection.iterator();
while (itr.hasNext()) {
FieldData fieldData = itr.next();
testFieldName(fieldData);
}
}
return method.invoke(fields, args);
}
private void testFieldName(FieldData fieldData) throws ClassNotFoundException {
if (!StringUtils.isEmpty(fieldData.getFieldName()) && dynamicEntityDao != null) {
Class<?>[] dtos = dynamicEntityDao.getAllPolymorphicEntitiesFromCeiling(Class.forName(getDtoClassName()));
if (ArrayUtils.isEmpty(dtos)) {
dtos = new Class<?>[]{Class.forName(getDtoClassName())};
}
Field field = null;
for (Class<?> dto : dtos) {
field = dynamicEntityDao.getFieldManager().getField(dto, fieldData.getFieldName());
if (field != null) {
break;
}
}
if (field == null) {
throw new IllegalArgumentException("Unable to find the field declared in FieldData (" + fieldData.getFieldName() + ") on the target class (" + getDtoClassName() + "), or any registered entity class that derives from it.");
}
}
}
});
this.fields = proxyFields;
}
@Override
public RuleBuilderFieldService clone() throws CloneNotSupportedException {
try {
RuleBuilderFieldService clone = this.getClass().newInstance();
clone.setFields(this.fields);
return clone;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public abstract String getDtoClassName();
public abstract void init();
@Override
public void afterPropertiesSet() throws Exception {
// This bean only is valid when the following bean is active. (admin)
if (applicationContext.containsBean(DynamicEntityRemoteService.DEFAULTPERSISTENCEMANAGERREF)) {
PersistenceManager persistenceManager = (PersistenceManager) applicationContext.getBean(DynamicEntityRemoteService.DEFAULTPERSISTENCEMANAGERREF);
persistenceManager.setTargetMode(TargetModeType.SANDBOX);
dynamicEntityDao = persistenceManager.getDynamicEntityDao();
setFields(new ArrayList<FieldData>());
// This cannot be null during startup as we do not want to remove the null safety checks in a multi-tenant env.
boolean contextWasNull = false;
if (BroadleafRequestContext.getBroadleafRequestContext() == null) {
BroadleafRequestContext brc = new BroadleafRequestContext();
brc.setIgnoreSite(true);
BroadleafRequestContext.setBroadleafRequestContext(brc);
contextWasNull = true;
}
try {
init();
} finally {
if (contextWasNull) {
BroadleafRequestContext.setBroadleafRequestContext(null);
}
}
}
}
} | 1no label
| admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_web_rulebuilder_service_AbstractRuleBuilderFieldService.java |
4,427 | shardGatewayService.recover(indexShouldExists, new IndexShardGatewayService.RecoveryListener() {
@Override
public void onRecoveryDone() {
shardStateAction.shardStarted(shardRouting, indexMetaData.getUUID(), "after recovery from gateway");
}
@Override
public void onIgnoreRecovery(String reason) {
}
@Override
public void onRecoveryFailed(IndexShardGatewayRecoveryException e) {
handleRecoveryFailure(indexService, indexMetaData, shardRouting, true, e);
}
}); | 1no label
| src_main_java_org_elasticsearch_indices_cluster_IndicesClusterStateService.java |
229 | @Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
/**
* Tells to OrientDB to call the method BEFORE the record is marshalled and written to the database.
* Applies only to the entity Objects reachable by the OrientDB engine after have registered them.
*/
public @interface OBeforeSerialization {
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_annotation_OBeforeSerialization.java |
2,891 | public static class AndPredicate implements IndexAwarePredicate, DataSerializable {
protected Predicate[] predicates;
public AndPredicate() {
}
public AndPredicate(Predicate... predicates) {
this.predicates = predicates;
}
@Override
public Set<QueryableEntry> filter(QueryContext queryContext) {
Set<QueryableEntry> smallestIndexedResult = null;
List<Set<QueryableEntry>> otherIndexedResults = new LinkedList<Set<QueryableEntry>>();
List<Predicate> lsNoIndexPredicates = null;
for (Predicate predicate : predicates) {
boolean indexed = false;
if (predicate instanceof IndexAwarePredicate) {
IndexAwarePredicate iap = (IndexAwarePredicate) predicate;
if (iap.isIndexed(queryContext)) {
indexed = true;
Set<QueryableEntry> s = iap.filter(queryContext);
if (smallestIndexedResult == null) {
smallestIndexedResult = s;
} else if (s.size() < smallestIndexedResult.size()) {
otherIndexedResults.add(smallestIndexedResult);
smallestIndexedResult = s;
} else {
otherIndexedResults.add(s);
}
} else {
if (lsNoIndexPredicates == null) {
lsNoIndexPredicates = new LinkedList<Predicate>();
lsNoIndexPredicates.add(predicate);
}
}
}
if (!indexed) {
if (lsNoIndexPredicates == null) {
lsNoIndexPredicates = new LinkedList<Predicate>();
}
lsNoIndexPredicates.add(predicate);
}
}
if (smallestIndexedResult == null) {
return null;
}
return new AndResultSet(smallestIndexedResult, otherIndexedResults, lsNoIndexPredicates);
}
@Override
public boolean isIndexed(QueryContext queryContext) {
for (Predicate predicate : predicates) {
if (predicate instanceof IndexAwarePredicate) {
IndexAwarePredicate iap = (IndexAwarePredicate) predicate;
if (iap.isIndexed(queryContext)) {
return true;
}
}
}
return false;
}
@Override
public boolean apply(Map.Entry mapEntry) {
for (Predicate predicate : predicates) {
if (!predicate.apply(mapEntry)) {
return false;
}
}
return true;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder();
sb.append("(");
int size = predicates.length;
for (int i = 0; i < size; i++) {
if (i > 0) {
sb.append(" AND ");
}
sb.append(predicates[i]);
}
sb.append(")");
return sb.toString();
}
@Override
public void writeData(ObjectDataOutput out) throws IOException {
out.writeInt(predicates.length);
for (Predicate predicate : predicates) {
out.writeObject(predicate);
}
}
@Override
public void readData(ObjectDataInput in) throws IOException {
int size = in.readInt();
predicates = new Predicate[size];
for (int i = 0; i < size; i++) {
predicates[i] = in.readObject();
}
}
} | 1no label
| hazelcast_src_main_java_com_hazelcast_query_Predicates.java |
183 | @RunWith(HazelcastSerialClassRunner.class)
@Category(QuickTest.class)
public class ClientListTest {
static final String name = "test";
static HazelcastInstance hz;
static IList list;
@BeforeClass
public static void init(){
Config config = new Config();
Hazelcast.newHazelcastInstance(config);
hz = HazelcastClient.newHazelcastClient();
list = hz.getList(name);
}
@AfterClass
public static void destroy() {
hz.shutdown();
Hazelcast.shutdownAll();
}
@Before
@After
public void clear() throws IOException {
list.clear();
}
@Test
public void testAddAll() {
List l = new ArrayList();
l.add("item1");
l.add("item2");
assertTrue(list.addAll(l));
assertEquals(2, list.size());
assertTrue(list.addAll(1, l));
assertEquals(4, list.size());
assertEquals("item1", list.get(0));
assertEquals("item1", list.get(1));
assertEquals("item2", list.get(2));
assertEquals("item2", list.get(3));
}
@Test
public void testAddSetRemove() {
assertTrue(list.add("item1"));
assertTrue(list.add("item2"));
list.add(0,"item3");
assertEquals(3, list.size());
Object o = list.set(2, "item4");
assertEquals("item2", o);
assertEquals(3, list.size());
assertEquals("item3", list.get(0));
assertEquals("item1", list.get(1));
assertEquals("item4", list.get(2));
assertFalse(list.remove("item2"));
assertTrue(list.remove("item3"));
o = list.remove(1);
assertEquals("item4", o);
assertEquals(1, list.size());
assertEquals("item1", list.get(0));
}
@Test
public void testIndexOf(){
assertTrue(list.add("item1"));
assertTrue(list.add("item2"));
assertTrue(list.add("item1"));
assertTrue(list.add("item4"));
assertEquals(-1, list.indexOf("item5"));
assertEquals(0, list.indexOf("item1"));
assertEquals(-1, list.lastIndexOf("item6"));
assertEquals(2, list.lastIndexOf("item1"));
}
@Test
public void testIterator(){
assertTrue(list.add("item1"));
assertTrue(list.add("item2"));
assertTrue(list.add("item1"));
assertTrue(list.add("item4"));
Iterator iter = list.iterator();
assertEquals("item1",iter.next());
assertEquals("item2",iter.next());
assertEquals("item1",iter.next());
assertEquals("item4",iter.next());
assertFalse(iter.hasNext());
ListIterator listIterator = list.listIterator(2);
assertEquals("item1",listIterator.next());
assertEquals("item4",listIterator.next());
assertFalse(listIterator.hasNext());
List l = list.subList(1, 3);
assertEquals(2, l.size());
assertEquals("item2", l.get(0));
assertEquals("item1", l.get(1));
}
@Test
public void testContains(){
assertTrue(list.add("item1"));
assertTrue(list.add("item2"));
assertTrue(list.add("item1"));
assertTrue(list.add("item4"));
assertFalse(list.contains("item3"));
assertTrue(list.contains("item2"));
List l = new ArrayList();
l.add("item4");
l.add("item3");
assertFalse(list.containsAll(l));
assertTrue(list.add("item3"));
assertTrue(list.containsAll(l));
}
@Test
public void removeRetainAll(){
assertTrue(list.add("item1"));
assertTrue(list.add("item2"));
assertTrue(list.add("item1"));
assertTrue(list.add("item4"));
List l = new ArrayList();
l.add("item4");
l.add("item3");
assertTrue(list.removeAll(l));
assertEquals(3, list.size());
assertFalse(list.removeAll(l));
assertEquals(3, list.size());
l.clear();
l.add("item1");
l.add("item2");
assertFalse(list.retainAll(l));
assertEquals(3, list.size());
l.clear();
assertTrue(list.retainAll(l));
assertEquals(0, list.size());
}
@Test
public void testListener() throws Exception {
// final ISet tempSet = server.getSet(name);
final IList tempList = list;
final CountDownLatch latch = new CountDownLatch(6);
ItemListener listener = new ItemListener() {
public void itemAdded(ItemEvent itemEvent) {
latch.countDown();
}
public void itemRemoved(ItemEvent item) {
}
};
String registrationId = tempList.addItemListener(listener, true);
new Thread(){
public void run() {
for (int i=0; i<5; i++){
tempList.add("item" + i);
}
tempList.add("done");
}
}.start();
assertTrue(latch.await(20, TimeUnit.SECONDS));
}
} | 0true
| hazelcast-client_src_test_java_com_hazelcast_client_collections_ClientListTest.java |
611 | @Component("blSandBoxResolver")
public class BroadleafSandBoxResolverImpl implements BroadleafSandBoxResolver {
private final Log LOG = LogFactory.getLog(BroadleafSandBoxResolverImpl.class);
/**
* Property used to disable sandbox mode. Some implementations will want to
* turn off sandboxes in production.
*/
protected Boolean sandBoxPreviewEnabled = true;
// Request Parameters and Attributes for Sandbox Mode properties - mostly values to manage dates.
private static String SANDBOX_ID_VAR = "blSandboxId";
private static String SANDBOX_DATE_TIME_VAR = "blSandboxDateTime";
private static final SimpleDateFormat CONTENT_DATE_FORMATTER = new SimpleDateFormat("yyyyMMddHHmm");
private static final SimpleDateFormat CONTENT_DATE_DISPLAY_FORMATTER = new SimpleDateFormat("MM/dd/yyyy");
private static final SimpleDateFormat CONTENT_DATE_DISPLAY_HOURS_FORMATTER = new SimpleDateFormat("h");
private static final SimpleDateFormat CONTENT_DATE_DISPLAY_MINUTES_FORMATTER = new SimpleDateFormat("mm");
private static final SimpleDateFormat CONTENT_DATE_PARSE_FORMAT = new SimpleDateFormat("MM/dd/yyyy hh:mm aa");
private static String SANDBOX_DATE_TIME_RIBBON_OVERRIDE_PARAM = "blSandboxDateTimeRibbonOverride";
private static final String SANDBOX_DISPLAY_DATE_TIME_DATE_PARAM = "blSandboxDisplayDateTimeDate";
private static final String SANDBOX_DISPLAY_DATE_TIME_HOURS_PARAM = "blSandboxDisplayDateTimeHours";
private static final String SANDBOX_DISPLAY_DATE_TIME_MINUTES_PARAM = "blSandboxDisplayDateTimeMinutes";
private static final String SANDBOX_DISPLAY_DATE_TIME_AMPM_PARAM = "blSandboxDisplayDateTimeAMPM";
/**
* Request attribute to store the current sandbox
*/
public static String SANDBOX_VAR = "blSandbox";
@Resource(name = "blSandBoxDao")
private SandBoxDao sandBoxDao;
/**
* Determines the current sandbox based on other parameters on the request such as
* the blSandBoxId parameters.
*
* If the {@link #getSandBoxPreviewEnabled()}, then this method will not return a user
* SandBox.
*
*/
@Override
public SandBox resolveSandBox(HttpServletRequest request, Site site) {
return resolveSandBox(new ServletWebRequest(request), site);
}
@Override
public SandBox resolveSandBox(WebRequest request, Site site) {
SandBox currentSandbox = null;
if (!sandBoxPreviewEnabled) {
if (LOG.isTraceEnabled()) {
LOG.trace("Sandbox preview disabled. Setting sandbox to production");
}
request.setAttribute(SANDBOX_VAR, currentSandbox, WebRequest.SCOPE_REQUEST);
} else {
Long sandboxId = null;
// Clear the sandBox - second parameter is to support legacy implementations.
if ( (request.getParameter("blClearSandBox") == null) || (request.getParameter("blSandboxDateTimeRibbonProduction") == null)) {
sandboxId = lookupSandboxId(request);
} else {
if (LOG.isTraceEnabled()) {
LOG.trace("Removing sandbox from session.");
}
if (BLCRequestUtils.isOKtoUseSession(request)) {
request.removeAttribute(SANDBOX_DATE_TIME_VAR, WebRequest.SCOPE_GLOBAL_SESSION);
request.removeAttribute(SANDBOX_ID_VAR, WebRequest.SCOPE_GLOBAL_SESSION);
}
}
if (sandboxId != null) {
currentSandbox = sandBoxDao.retrieve(sandboxId);
request.setAttribute(SANDBOX_VAR, currentSandbox, WebRequest.SCOPE_REQUEST);
if (currentSandbox != null && !SandBoxType.PRODUCTION.equals(currentSandbox.getSandBoxType())) {
setContentTime(request);
}
}
if (currentSandbox == null && site != null) {
currentSandbox = site.getProductionSandbox();
}
}
if (LOG.isTraceEnabled()) {
if (currentSandbox != null) {
LOG.trace("Serving request using sandbox: " + currentSandbox);
} else {
LOG.trace("Serving request without a sandbox.");
}
}
Date currentSystemDateTime = SystemTime.asDate(true);
Calendar sandboxDateTimeCalendar = Calendar.getInstance();
sandboxDateTimeCalendar.setTime(currentSystemDateTime);
request.setAttribute(SANDBOX_DISPLAY_DATE_TIME_DATE_PARAM, CONTENT_DATE_DISPLAY_FORMATTER.format(currentSystemDateTime), WebRequest.SCOPE_REQUEST);
request.setAttribute(SANDBOX_DISPLAY_DATE_TIME_HOURS_PARAM, CONTENT_DATE_DISPLAY_HOURS_FORMATTER.format(currentSystemDateTime), WebRequest.SCOPE_REQUEST);
request.setAttribute(SANDBOX_DISPLAY_DATE_TIME_MINUTES_PARAM, CONTENT_DATE_DISPLAY_MINUTES_FORMATTER.format(currentSystemDateTime), WebRequest.SCOPE_REQUEST);
request.setAttribute(SANDBOX_DISPLAY_DATE_TIME_AMPM_PARAM, sandboxDateTimeCalendar.get(Calendar.AM_PM), WebRequest.SCOPE_REQUEST);
return currentSandbox;
}
/**
* If another filter has already set the language as a request attribute, that will be honored.
* Otherwise, the request parameter is checked followed by the session attribute.
*
* @param request
* @param site
* @return
*/
private Long lookupSandboxId(WebRequest request) {
String sandboxIdStr = request.getParameter(SANDBOX_ID_VAR);
Long sandboxId = null;
if (sandboxIdStr != null) {
try {
sandboxId = Long.valueOf(sandboxIdStr);
if (LOG.isTraceEnabled()) {
LOG.trace("SandboxId found on request " + sandboxId);
}
} catch (NumberFormatException nfe) {
LOG.warn("blcSandboxId parameter could not be converted into a Long", nfe);
}
}
if (BLCRequestUtils.isOKtoUseSession(request)) {
if (sandboxId == null) {
// check the session
sandboxId = (Long) request.getAttribute(SANDBOX_ID_VAR, WebRequest.SCOPE_GLOBAL_SESSION);
if (LOG.isTraceEnabled()) {
if (sandboxId != null) {
LOG.trace("SandboxId found in session " + sandboxId);
}
}
} else {
request.setAttribute(SANDBOX_ID_VAR, sandboxId, WebRequest.SCOPE_GLOBAL_SESSION);
}
}
return sandboxId;
}
/**
* Allows a user in SandBox mode to override the current time and date being used by the system.
*
* @param request
*/
private void setContentTime(WebRequest request) {
String sandboxDateTimeParam = request.getParameter(SANDBOX_DATE_TIME_VAR);
if (sandBoxPreviewEnabled) {
sandboxDateTimeParam = null;
}
Date overrideTime = null;
try {
if (request.getParameter(SANDBOX_DATE_TIME_RIBBON_OVERRIDE_PARAM) != null) {
overrideTime = readDateFromRequest(request);
} else if (sandboxDateTimeParam != null) {
if (LOG.isDebugEnabled()) {
LOG.debug("Setting date/time using " + sandboxDateTimeParam);
}
overrideTime = CONTENT_DATE_FORMATTER.parse(sandboxDateTimeParam);
}
} catch (ParseException e) {
LOG.debug(e);
}
if (BLCRequestUtils.isOKtoUseSession(request)) {
if (overrideTime == null) {
overrideTime = (Date) request.getAttribute(SANDBOX_DATE_TIME_VAR, WebRequest.SCOPE_GLOBAL_SESSION);
} else {
if (LOG.isDebugEnabled()) {
LOG.debug("Setting date-time for sandbox mode to " + overrideTime + " for sandboxDateTimeParam = " + sandboxDateTimeParam);
}
request.setAttribute(SANDBOX_DATE_TIME_VAR, overrideTime, WebRequest.SCOPE_GLOBAL_SESSION);
}
}
if (overrideTime != null) {
FixedTimeSource ft = new FixedTimeSource(overrideTime.getTime());
SystemTime.setLocalTimeSource(ft);
} else {
SystemTime.resetLocalTimeSource();
}
}
private Date readDateFromRequest(WebRequest request) throws ParseException {
String date = request.getParameter(SANDBOX_DISPLAY_DATE_TIME_DATE_PARAM);
String minutes = request.getParameter(SANDBOX_DISPLAY_DATE_TIME_MINUTES_PARAM);
String hours = request.getParameter(SANDBOX_DISPLAY_DATE_TIME_HOURS_PARAM);
String ampm = request.getParameter(SANDBOX_DISPLAY_DATE_TIME_AMPM_PARAM);
if (StringUtils.isEmpty(minutes)) {
minutes = Integer.toString(SystemTime.asCalendar().get(Calendar.MINUTE));
}
if (StringUtils.isEmpty(hours)) {
hours = Integer.toString(SystemTime.asCalendar().get(Calendar.HOUR_OF_DAY));
}
String dateString = date + " " + hours + ":" + minutes + " " + ampm;
if (LOG.isDebugEnabled()) {
LOG.debug("Setting date/time using " + dateString);
}
Date parsedDate = CONTENT_DATE_PARSE_FORMAT.parse(dateString);
return parsedDate;
}
/**
* Sets whether or not the site can be viewed in preview mode.
* @return
*/
public Boolean getSandBoxPreviewEnabled() {
return sandBoxPreviewEnabled;
}
public void setSandBoxPreviewEnabled(Boolean sandBoxPreviewEnabled) {
this.sandBoxPreviewEnabled = sandBoxPreviewEnabled;
}
} | 1no label
| common_src_main_java_org_broadleafcommerce_common_web_BroadleafSandBoxResolverImpl.java |
4,469 | class RecoveryFileChunkRequest extends TransportRequest {
private long recoveryId;
private ShardId shardId;
private String name;
private long position;
private long length;
private String checksum;
private BytesReference content;
RecoveryFileChunkRequest() {
}
RecoveryFileChunkRequest(long recoveryId, ShardId shardId, String name, long position, long length, String checksum, BytesArray content) {
this.recoveryId = recoveryId;
this.shardId = shardId;
this.name = name;
this.position = position;
this.length = length;
this.checksum = checksum;
this.content = content;
}
public long recoveryId() {
return this.recoveryId;
}
public ShardId shardId() {
return shardId;
}
public String name() {
return name;
}
public long position() {
return position;
}
@Nullable
public String checksum() {
return this.checksum;
}
public long length() {
return length;
}
public BytesReference content() {
return content;
}
public RecoveryFileChunkRequest readFileChunk(StreamInput in) throws IOException {
RecoveryFileChunkRequest request = new RecoveryFileChunkRequest();
request.readFrom(in);
return request;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
recoveryId = in.readLong();
shardId = ShardId.readShardId(in);
name = in.readString();
position = in.readVLong();
length = in.readVLong();
checksum = in.readOptionalString();
content = in.readBytesReference();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeLong(recoveryId);
shardId.writeTo(out);
out.writeString(name);
out.writeVLong(position);
out.writeVLong(length);
out.writeOptionalString(checksum);
out.writeBytesReference(content);
}
@Override
public String toString() {
return shardId + ": name='" + name + '\'' +
", position=" + position +
", length=" + length;
}
} | 1no label
| src_main_java_org_elasticsearch_indices_recovery_RecoveryFileChunkRequest.java |
305 | new Thread(){
public void run() {
try {
m.get("ali");
} catch (Exception ignored) {
latch.countDown();
}
}
}.start(); | 0true
| hazelcast-client_src_test_java_com_hazelcast_client_map_ClientMapIssueTest.java |
1,474 | public class CyclicPathFilterMap {
public static final String CLASS = Tokens.makeNamespace(CyclicPathFilterMap.class) + ".class";
public enum Counters {
PATHS_FILTERED
}
public static Configuration createConfiguration(final Class<? extends Element> klass) {
final Configuration configuration = new EmptyConfiguration();
configuration.setClass(CLASS, klass, Element.class);
configuration.setBoolean(Tokens.TITAN_HADOOP_PIPELINE_TRACK_PATHS, true);
return configuration;
}
public static class Map extends Mapper<NullWritable, FaunusVertex, NullWritable, FaunusVertex> {
private boolean isVertex;
private HashSet set = new HashSet();
@Override
public void setup(final Mapper.Context context) throws IOException, InterruptedException {
this.isVertex = context.getConfiguration().getClass(CLASS, Element.class, Element.class).equals(Vertex.class);
}
@Override
public void map(final NullWritable key, final FaunusVertex value, final Mapper<NullWritable, FaunusVertex, NullWritable, FaunusVertex>.Context context) throws IOException, InterruptedException {
long pathsFiltered = 0l;
if (this.isVertex) {
if (value.hasPaths()) {
final Iterator<List<FaunusPathElement.MicroElement>> itty = value.getPaths().iterator();
while (itty.hasNext()) {
final List<FaunusPathElement.MicroElement> path = itty.next();
this.set.clear();
this.set.addAll(path);
if (path.size() != this.set.size()) {
itty.remove();
pathsFiltered++;
}
}
}
} else {
for (final Edge e : value.getEdges(Direction.BOTH)) {
final StandardFaunusEdge edge = (StandardFaunusEdge) e;
if (edge.hasPaths()) {
final Iterator<List<FaunusPathElement.MicroElement>> itty = edge.getPaths().iterator();
while (itty.hasNext()) {
final List<FaunusPathElement.MicroElement> path = itty.next();
this.set.clear();
this.set.addAll(path);
if (path.size() != this.set.size()) {
itty.remove();
pathsFiltered++;
}
}
}
}
}
DEFAULT_COMPAT.incrementContextCounter(context, Counters.PATHS_FILTERED, pathsFiltered);
context.write(NullWritable.get(), value);
}
}
} | 1no label
| titan-hadoop-parent_titan-hadoop-core_src_main_java_com_thinkaurelius_titan_hadoop_mapreduce_filter_CyclicPathFilterMap.java |
862 | public class OSecurityShared extends OSharedResourceAdaptive implements OSecurity, OCloseable {
public static final String RESTRICTED_CLASSNAME = "ORestricted";
public static final String IDENTITY_CLASSNAME = "OIdentity";
public static final String ALLOW_ALL_FIELD = "_allow";
public static final String ALLOW_READ_FIELD = "_allowRead";
public static final String ALLOW_UPDATE_FIELD = "_allowUpdate";
public static final String ALLOW_DELETE_FIELD = "_allowDelete";
public static final String ONCREATE_IDENTITY_TYPE = "onCreate.identityType";
public static final String ONCREATE_FIELD = "onCreate.fields";
public OSecurityShared() {
super(OGlobalConfiguration.ENVIRONMENT_CONCURRENT.getValueAsBoolean(), OGlobalConfiguration.STORAGE_LOCK_TIMEOUT
.getValueAsInteger(), true);
}
public OIdentifiable allowUser(final ODocument iDocument, final String iAllowFieldName, final String iUserName) {
final OUser user = ODatabaseRecordThreadLocal.INSTANCE.get().getMetadata().getSecurity().getUser(iUserName);
if (user == null)
throw new IllegalArgumentException("User '" + iUserName + "' not found");
return allowIdentity(iDocument, iAllowFieldName, user.getDocument().getIdentity());
}
public OIdentifiable allowRole(final ODocument iDocument, final String iAllowFieldName, final String iRoleName) {
final ORole role = ODatabaseRecordThreadLocal.INSTANCE.get().getMetadata().getSecurity().getRole(iRoleName);
if (role == null)
throw new IllegalArgumentException("Role '" + iRoleName + "' not found");
return allowIdentity(iDocument, iAllowFieldName, role.getDocument().getIdentity());
}
public OIdentifiable allowIdentity(final ODocument iDocument, final String iAllowFieldName, final OIdentifiable iId) {
Set<OIdentifiable> field = iDocument.field(iAllowFieldName);
if (field == null) {
field = new OMVRBTreeRIDSet(iDocument);
iDocument.field(iAllowFieldName, field);
}
field.add(iId);
return iId;
}
public OIdentifiable disallowUser(final ODocument iDocument, final String iAllowFieldName, final String iUserName) {
final OUser user = ODatabaseRecordThreadLocal.INSTANCE.get().getMetadata().getSecurity().getUser(iUserName);
if (user == null)
throw new IllegalArgumentException("User '" + iUserName + "' not found");
return disallowIdentity(iDocument, iAllowFieldName, user.getDocument().getIdentity());
}
public OIdentifiable disallowRole(final ODocument iDocument, final String iAllowFieldName, final String iRoleName) {
final ORole role = ODatabaseRecordThreadLocal.INSTANCE.get().getMetadata().getSecurity().getRole(iRoleName);
if (role == null)
throw new IllegalArgumentException("Role '" + iRoleName + "' not found");
return disallowIdentity(iDocument, iAllowFieldName, role.getDocument().getIdentity());
}
public OIdentifiable disallowIdentity(final ODocument iDocument, final String iAllowFieldName, final OIdentifiable iId) {
Set<OIdentifiable> field = iDocument.field(iAllowFieldName);
if (field != null)
field.remove(iId);
return iId;
}
public boolean isAllowed(final Set<OIdentifiable> iAllowAll, final Set<OIdentifiable> iAllowOperation) {
if (iAllowAll == null || iAllowAll.isEmpty())
return true;
final OUser currentUser = ODatabaseRecordThreadLocal.INSTANCE.get().getUser();
if (currentUser != null) {
// CHECK IF CURRENT USER IS ENLISTED
if (!iAllowAll.contains(currentUser.getDocument().getIdentity())) {
// CHECK AGAINST SPECIFIC _ALLOW OPERATION
if (iAllowOperation != null && iAllowOperation.contains(currentUser.getDocument().getIdentity()))
return true;
// CHECK IF AT LEAST ONE OF THE USER'S ROLES IS ENLISTED
for (ORole r : currentUser.getRoles()) {
// CHECK AGAINST GENERIC _ALLOW
if (iAllowAll.contains(r.getDocument().getIdentity()))
return true;
// CHECK AGAINST SPECIFIC _ALLOW OPERATION
if (iAllowOperation != null && iAllowOperation.contains(r.getDocument().getIdentity()))
return true;
}
return false;
}
}
return true;
}
public OUser authenticate(final String iUserName, final String iUserPassword) {
acquireExclusiveLock();
try {
final String dbName = getDatabase().getName();
final OUser user = getUser(iUserName);
if (user == null)
throw new OSecurityAccessException(dbName, "User or password not valid for database: '" + dbName + "'");
if (user.getAccountStatus() != STATUSES.ACTIVE)
throw new OSecurityAccessException(dbName, "User '" + iUserName + "' is not active");
if (!(getDatabase().getStorage() instanceof OStorageProxy)) {
// CHECK USER & PASSWORD
if (!user.checkPassword(iUserPassword)) {
// WAIT A BIT TO AVOID BRUTE FORCE
try {
Thread.sleep(200);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
throw new OSecurityAccessException(dbName, "User or password not valid for database: '" + dbName + "'");
}
}
return user;
} finally {
releaseExclusiveLock();
}
}
public OUser getUser(final String iUserName) {
acquireExclusiveLock();
try {
final List<ODocument> result = getDatabase().<OCommandRequest> command(
new OSQLSynchQuery<ODocument>("select from OUser where name = '" + iUserName + "' limit 1").setFetchPlan("roles:1"))
.execute();
if (result != null && !result.isEmpty())
return new OUser(result.get(0));
return null;
} finally {
releaseExclusiveLock();
}
}
public OUser createUser(final String iUserName, final String iUserPassword, final String... iRoles) {
acquireExclusiveLock();
try {
final OUser user = new OUser(iUserName, iUserPassword);
if (iRoles != null)
for (String r : iRoles) {
user.addRole(r);
}
return user.save();
} finally {
releaseExclusiveLock();
}
}
public OUser createUser(final String iUserName, final String iUserPassword, final ORole... iRoles) {
acquireExclusiveLock();
try {
final OUser user = new OUser(iUserName, iUserPassword);
if (iRoles != null)
for (ORole r : iRoles) {
user.addRole(r);
}
return user.save();
} finally {
releaseExclusiveLock();
}
}
public boolean dropUser(final String iUserName) {
acquireExclusiveLock();
try {
final Number removed = getDatabase().<OCommandRequest> command(
new OCommandSQL("delete from OUser where name = '" + iUserName + "'")).execute();
return removed != null && removed.intValue() > 0;
} finally {
releaseExclusiveLock();
}
}
public ORole getRole(final OIdentifiable iRole) {
acquireExclusiveLock();
try {
final ODocument doc = iRole.getRecord();
if ("ORole".equals(doc.getClassName()))
return new ORole(doc);
return null;
} finally {
releaseExclusiveLock();
}
}
public ORole getRole(final String iRoleName) {
acquireExclusiveLock();
try {
final List<ODocument> result = getDatabase().<OCommandRequest> command(
new OSQLSynchQuery<ODocument>("select from ORole where name = '" + iRoleName + "' limit 1")).execute();
if (result != null && !result.isEmpty())
return new ORole(result.get(0));
return null;
} catch (Exception ex) {
OLogManager.instance().error(this, "Failed to get role : " + iRoleName + " " + ex.getMessage());
return null;
} finally {
releaseExclusiveLock();
}
}
public ORole createRole(final String iRoleName, final ORole.ALLOW_MODES iAllowMode) {
return createRole(iRoleName, null, iAllowMode);
}
public ORole createRole(final String iRoleName, final ORole iParent, final ORole.ALLOW_MODES iAllowMode) {
acquireExclusiveLock();
try {
final ORole role = new ORole(iRoleName, iParent, iAllowMode);
return role.save();
} finally {
releaseExclusiveLock();
}
}
public boolean dropRole(final String iRoleName) {
acquireExclusiveLock();
try {
final Number removed = getDatabase().<OCommandRequest> command(
new OCommandSQL("delete from ORole where name = '" + iRoleName + "'")).execute();
return removed != null && removed.intValue() > 0;
} finally {
releaseExclusiveLock();
}
}
public List<ODocument> getAllUsers() {
acquireExclusiveLock();
try {
return getDatabase().<OCommandRequest> command(new OSQLSynchQuery<ODocument>("select from OUser")).execute();
} finally {
releaseExclusiveLock();
}
}
public List<ODocument> getAllRoles() {
acquireExclusiveLock();
try {
return getDatabase().<OCommandRequest> command(new OSQLSynchQuery<ODocument>("select from ORole")).execute();
} finally {
releaseExclusiveLock();
}
}
public OUser create() {
acquireExclusiveLock();
try {
if (!getDatabase().getMetadata().getSchema().getClasses().isEmpty())
return null;
final OUser adminUser = createMetadata();
final ORole readerRole = createRole("reader", ORole.ALLOW_MODES.DENY_ALL_BUT);
readerRole.addRule(ODatabaseSecurityResources.DATABASE, ORole.PERMISSION_READ);
readerRole.addRule(ODatabaseSecurityResources.SCHEMA, ORole.PERMISSION_READ);
readerRole.addRule(ODatabaseSecurityResources.CLUSTER + "." + OMetadataDefault.CLUSTER_INTERNAL_NAME, ORole.PERMISSION_READ);
readerRole.addRule(ODatabaseSecurityResources.CLUSTER + ".orole", ORole.PERMISSION_READ);
readerRole.addRule(ODatabaseSecurityResources.CLUSTER + ".ouser", ORole.PERMISSION_READ);
readerRole.addRule(ODatabaseSecurityResources.ALL_CLASSES, ORole.PERMISSION_READ);
readerRole.addRule(ODatabaseSecurityResources.ALL_CLUSTERS, ORole.PERMISSION_READ);
readerRole.addRule(ODatabaseSecurityResources.COMMAND, ORole.PERMISSION_READ);
readerRole.addRule(ODatabaseSecurityResources.RECORD_HOOK, ORole.PERMISSION_READ);
readerRole.save();
createUser("reader", "reader", new String[] { readerRole.getName() });
final ORole writerRole = createRole("writer", ORole.ALLOW_MODES.DENY_ALL_BUT);
writerRole.addRule(ODatabaseSecurityResources.DATABASE, ORole.PERMISSION_READ);
writerRole.addRule(ODatabaseSecurityResources.SCHEMA, ORole.PERMISSION_READ + ORole.PERMISSION_CREATE
+ ORole.PERMISSION_UPDATE);
writerRole.addRule(ODatabaseSecurityResources.CLUSTER + "." + OMetadataDefault.CLUSTER_INTERNAL_NAME, ORole.PERMISSION_READ);
writerRole.addRule(ODatabaseSecurityResources.CLUSTER + ".orole", ORole.PERMISSION_READ);
writerRole.addRule(ODatabaseSecurityResources.CLUSTER + ".ouser", ORole.PERMISSION_READ);
writerRole.addRule(ODatabaseSecurityResources.ALL_CLASSES, ORole.PERMISSION_ALL);
writerRole.addRule(ODatabaseSecurityResources.ALL_CLUSTERS, ORole.PERMISSION_ALL);
writerRole.addRule(ODatabaseSecurityResources.COMMAND, ORole.PERMISSION_ALL);
writerRole.addRule(ODatabaseSecurityResources.RECORD_HOOK, ORole.PERMISSION_ALL);
writerRole.save();
createUser("writer", "writer", new String[] { writerRole.getName() });
return adminUser;
} finally {
releaseExclusiveLock();
}
}
/**
* Repairs the security structure if broken by creating the ADMIN role and user with default password.
*
* @return
*/
public OUser repair() {
acquireExclusiveLock();
try {
getDatabase().getMetadata().getIndexManager().dropIndex("OUser.name");
getDatabase().getMetadata().getIndexManager().dropIndex("ORole.name");
return createMetadata();
} finally {
releaseExclusiveLock();
}
}
public OUser createMetadata() {
final ODatabaseRecord database = getDatabase();
OClass identityClass = database.getMetadata().getSchema().getClass(IDENTITY_CLASSNAME); // SINCE 1.2.0
if (identityClass == null)
identityClass = database.getMetadata().getSchema().createAbstractClass(IDENTITY_CLASSNAME);
OClass roleClass = database.getMetadata().getSchema().getClass("ORole");
if (roleClass == null)
roleClass = database.getMetadata().getSchema().createClass("ORole", identityClass);
else if (roleClass.getSuperClass() == null)
// MIGRATE AUTOMATICALLY TO 1.2.0
roleClass.setSuperClass(identityClass);
if (!roleClass.existsProperty("name")) {
roleClass.createProperty("name", OType.STRING).setMandatory(true).setNotNull(true).setCollate("ci");
roleClass.createIndex("ORole.name", INDEX_TYPE.UNIQUE, ONullOutputListener.INSTANCE, "name");
} else {
final Set<OIndex<?>> indexes = roleClass.getInvolvedIndexes("name");
if (indexes.isEmpty())
roleClass.createIndex("ORole.name", INDEX_TYPE.UNIQUE, ONullOutputListener.INSTANCE, "name");
}
if (!roleClass.existsProperty("mode"))
roleClass.createProperty("mode", OType.BYTE);
if (!roleClass.existsProperty("rules"))
roleClass.createProperty("rules", OType.EMBEDDEDMAP, OType.BYTE);
if (!roleClass.existsProperty("inheritedRole"))
roleClass.createProperty("inheritedRole", OType.LINK, roleClass);
OClass userClass = database.getMetadata().getSchema().getClass("OUser");
if (userClass == null)
userClass = database.getMetadata().getSchema().createClass("OUser", identityClass);
else if (userClass.getSuperClass() == null)
// MIGRATE AUTOMATICALLY TO 1.2.0
userClass.setSuperClass(identityClass);
if (!userClass.existsProperty("name")) {
userClass.createProperty("name", OType.STRING).setMandatory(true).setNotNull(true).setCollate("ci");
userClass.createIndex("OUser.name", INDEX_TYPE.UNIQUE, ONullOutputListener.INSTANCE, "name");
}
if (!userClass.existsProperty("password"))
userClass.createProperty("password", OType.STRING).setMandatory(true).setNotNull(true);
if (!userClass.existsProperty("roles"))
userClass.createProperty("roles", OType.LINKSET, roleClass);
if (!userClass.existsProperty("status"))
userClass.createProperty("status", OType.STRING).setMandatory(true).setNotNull(true);
// CREATE ROLES AND USERS
ORole adminRole = getRole(ORole.ADMIN);
if (adminRole == null) {
adminRole = createRole(ORole.ADMIN, ORole.ALLOW_MODES.ALLOW_ALL_BUT);
adminRole.addRule(ODatabaseSecurityResources.BYPASS_RESTRICTED, ORole.PERMISSION_ALL).save();
}
OUser adminUser = getUser(OUser.ADMIN);
if (adminUser == null)
adminUser = createUser(OUser.ADMIN, OUser.ADMIN, adminRole);
// SINCE 1.2.0
OClass restrictedClass = database.getMetadata().getSchema().getClass(RESTRICTED_CLASSNAME);
if (restrictedClass == null)
restrictedClass = database.getMetadata().getSchema().createAbstractClass(RESTRICTED_CLASSNAME);
if (!restrictedClass.existsProperty(ALLOW_ALL_FIELD))
restrictedClass.createProperty(ALLOW_ALL_FIELD, OType.LINKSET, database.getMetadata().getSchema()
.getClass(IDENTITY_CLASSNAME));
if (!restrictedClass.existsProperty(ALLOW_READ_FIELD))
restrictedClass.createProperty(ALLOW_READ_FIELD, OType.LINKSET,
database.getMetadata().getSchema().getClass(IDENTITY_CLASSNAME));
if (!restrictedClass.existsProperty(ALLOW_UPDATE_FIELD))
restrictedClass.createProperty(ALLOW_UPDATE_FIELD, OType.LINKSET,
database.getMetadata().getSchema().getClass(IDENTITY_CLASSNAME));
if (!restrictedClass.existsProperty(ALLOW_DELETE_FIELD))
restrictedClass.createProperty(ALLOW_DELETE_FIELD, OType.LINKSET,
database.getMetadata().getSchema().getClass(IDENTITY_CLASSNAME));
return adminUser;
}
public void close() {
}
public void load() {
final OClass userClass = getDatabase().getMetadata().getSchema().getClass("OUser");
if (userClass != null) {
// @COMPATIBILITY <1.3.0
if (!userClass.existsProperty("status")) {
userClass.createProperty("status", OType.STRING).setMandatory(true).setNotNull(true);
}
OProperty p = userClass.getProperty("name");
if (p == null)
p = userClass.createProperty("name", OType.STRING).setMandatory(true).setNotNull(true);
if (userClass.getInvolvedIndexes("name") == null)
p.createIndex(INDEX_TYPE.UNIQUE);
// ROLE
final OClass roleClass = getDatabase().getMetadata().getSchema().getClass("ORole");
if (!roleClass.existsProperty("inheritedRole")) {
roleClass.createProperty("inheritedRole", OType.LINK, roleClass);
}
p = roleClass.getProperty("name");
if (p == null)
p = roleClass.createProperty("name", OType.STRING).setMandatory(true).setNotNull(true);
if (roleClass.getInvolvedIndexes("name") == null)
p.createIndex(INDEX_TYPE.UNIQUE);
}
}
private ODatabaseRecord getDatabase() {
return ODatabaseRecordThreadLocal.INSTANCE.get();
}
public void createClassTrigger() {
final ODatabaseRecord db = ODatabaseRecordThreadLocal.INSTANCE.get();
OClass classTrigger = db.getMetadata().getSchema().getClass(OClassTrigger.CLASSNAME);
if (classTrigger == null)
classTrigger = db.getMetadata().getSchema().createAbstractClass(OClassTrigger.CLASSNAME);
}
} | 1no label
| core_src_main_java_com_orientechnologies_orient_core_metadata_security_OSecurityShared.java |
1,237 | public class UnitDependencyVisitor extends Visitor {
private final PhasedUnit phasedUnit;
private Set<Declaration> alreadyDone;
public UnitDependencyVisitor(PhasedUnit phasedUnit) {
this.phasedUnit = phasedUnit;
alreadyDone = new HashSet<Declaration>();
}
private void storeDependency(Declaration d) {
if (d!=null && (d instanceof UnionType ||
d instanceof IntersectionType ||
!alreadyDone.contains(d))) {
if (!(d instanceof UnionType ||
d instanceof IntersectionType)) {
alreadyDone.add(d);
}
if (d instanceof TypeDeclaration) {
TypeDeclaration td = (TypeDeclaration) d;
storeDependency(td.getExtendedTypeDeclaration());
for (TypeDeclaration st: td.getSatisfiedTypeDeclarations()) {
storeDependency(st);
}
List<TypeDeclaration> caseTypes = td.getCaseTypeDeclarations();
if (caseTypes!=null) {
for (TypeDeclaration ct: caseTypes) {
storeDependency(ct);
}
}
}
if (d instanceof TypedDeclaration) {
//TODO: is this really necessary?
storeDependency(((TypedDeclaration) d).getTypeDeclaration());
}
Declaration rd = d.getRefinedDeclaration();
if (rd!=d) {
storeDependency(rd); //this one is needed for default arguments, I think
}
Unit declarationUnit = d.getUnit();
if (declarationUnit != null && ! (declarationUnit instanceof TypeFactory)) {
String moduleName = declarationUnit.getPackage().getModule().getNameAsString();
if (!moduleName.equals(Module.LANGUAGE_MODULE_NAME) &&
!JDKUtils.isJDKModule(moduleName)
&& !JDKUtils.isOracleJDKModule(moduleName)) {
Unit currentUnit = phasedUnit.getUnit();
String currentUnitPath = phasedUnit.getUnitFile().getPath();
String currentUnitName = currentUnit.getFilename();
String dependedOnUnitName = declarationUnit.getFilename();
String currentUnitPackage = currentUnit.getPackage().getNameAsString();
String dependedOnPackage = declarationUnit.getPackage().getNameAsString();
if (!dependedOnUnitName.equals(currentUnitName) ||
!dependedOnPackage.equals(currentUnitPackage)) {
// WOW : Ceylon Abstract Data types and swith case would be cool here ;)
if (declarationUnit instanceof ProjectSourceFile) {
declarationUnit.getDependentsOf().add(currentUnitPath);
}
else if (declarationUnit instanceof ICrossProjectReference) {
ProjectSourceFile originalProjectSourceFile = ((ICrossProjectReference) declarationUnit).getOriginalSourceFile();
if (originalProjectSourceFile != null) {
originalProjectSourceFile.getDependentsOf().add(currentUnitPath);
}
}
else if (declarationUnit instanceof ExternalSourceFile) {
// Don't manage them : they cannot change ... Well they might if we were using these dependencies to manage module
// removal. But since module removal triggers a classpath container update and so a full build, it's not necessary.
// Might change in the future
}
else if (declarationUnit instanceof CeylonBinaryUnit) {
declarationUnit.getDependentsOf().add(currentUnitPath);
}
else if (declarationUnit instanceof JavaCompilationUnit) {
//TODO: this does not seem to work for cross-project deps
// We should introduce a CrossProjectJavaUnit that can return
// the original JavaCompilationUnit from the original project
declarationUnit.getDependentsOf().add(currentUnitPath);
}
else if (declarationUnit instanceof JavaClassFile) {
//TODO: All the dependencies to class files are also added... It is really useful ?
// I assume in the case of the classes in the classes or exploded dirs, it might be,
// but not sure it is also used not in the case of jar-located classes
declarationUnit.getDependentsOf().add(currentUnitPath);
}
else {
assert(false);
}
}
}
}
}
}
@Override
public void visit(Tree.MemberOrTypeExpression that) {
storeDependency(that.getDeclaration());
super.visit(that);
}
@Override
public void visit(Tree.NamedArgument that) {
//TODO: is this really necessary?
storeDependency(that.getParameter());
super.visit(that);
}
@Override
public void visit(Tree.SequencedArgument that) {
//TODO: is this really necessary?
storeDependency(that.getParameter());
super.visit(that);
}
@Override
public void visit(Tree.PositionalArgument that) {
//TODO: is this really necessary?
storeDependency(that.getParameter());
super.visit(that);
}
void storeDependency(Parameter p) {
if (p!=null) {
storeDependency(p.getModel());
}
}
@Override
public void visit(Tree.Type that) {
ProducedType tm = that.getTypeModel();
if (tm!=null) {
storeDependency(tm.getDeclaration());
}
super.visit(that);
}
@Override
public void visit(Tree.ImportMemberOrType that) {
storeDependency(that.getDeclarationModel());
super.visit(that);
}
@Override
public void visit(Tree.TypeArguments that) {
//TODO: is this really necessary?
List<ProducedType> tms = that.getTypeModels();
if (tms!=null) {
for (ProducedType pt: tms) {
if (pt!=null) {
storeDependency(pt.getDeclaration());
}
}
}
super.visit(that);
}
@Override
public void visit(Tree.Term that) {
//TODO: is this really necessary?
ProducedType tm = that.getTypeModel();
if (tm!=null) {
storeDependency(tm.getDeclaration());
}
super.visit(that);
}
} | 1no label
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_core_builder_UnitDependencyVisitor.java |
2 | (new java.security.PrivilegedExceptionAction<sun.misc.Unsafe>() {
public sun.misc.Unsafe run() throws Exception {
Class<sun.misc.Unsafe> k = sun.misc.Unsafe.class;
for (java.lang.reflect.Field f : k.getDeclaredFields()) {
f.setAccessible(true);
Object x = f.get(null);
if (k.isInstance(x))
return k.cast(x);
}
throw new NoSuchFieldError("the Unsafe");
}}); | 0true
| src_main_java_jsr166e_CompletableFuture.java |
241 | highlighter = new XPostingsHighlighter() {
@Override
protected char getMultiValuedSeparator(String field) {
//U+2029 PARAGRAPH SEPARATOR (PS): each value holds a discrete passage for highlighting
return 8233;
}
}; | 0true
| src_test_java_org_apache_lucene_search_postingshighlight_XPostingsHighlighterTests.java |
68 | public interface TitanProperty extends TitanRelation {
/**
* Returns the property key of this property
*
* @return property key of this property
* @see PropertyKey
*/
public PropertyKey getPropertyKey();
/**
* Returns the vertex on which this property is incident.
*
* @return The vertex of this property.
*/
public TitanVertex getVertex();
/**
* Returns the value of this property (possibly cast to the expected type).
*
* @return value of this property
* @throws ClassCastException if the value cannot be cast to the expected type
*/
public<O> O getValue();
} | 0true
| titan-core_src_main_java_com_thinkaurelius_titan_core_TitanProperty.java |
277 | @SuppressWarnings("serial")
public abstract class OCommandRequestTextAbstract extends OCommandRequestAbstract implements OCommandRequestText {
protected String text;
protected OCommandRequestTextAbstract() {
}
protected OCommandRequestTextAbstract(final String iText) {
if (iText == null)
throw new IllegalArgumentException("Text cannot be null");
text = iText.trim();
}
/**
* Delegates the execution to the configured command executor.
*/
@SuppressWarnings("unchecked")
public <RET> RET execute(final Object... iArgs) {
setParameters(iArgs);
return (RET) ODatabaseRecordThreadLocal.INSTANCE.get().getStorage().command(this);
}
public String getText() {
return text;
}
public OCommandRequestText setText(final String iText) {
this.text = iText;
return this;
}
public OSerializableStream fromStream(byte[] iStream) throws OSerializationException {
final OMemoryStream buffer = new OMemoryStream(iStream);
fromStream(buffer);
return this;
}
public byte[] toStream() throws OSerializationException {
final OMemoryStream buffer = new OMemoryStream();
return toStream(buffer);
}
@Override
public String toString() {
return "?." + text;
}
protected byte[] toStream(final OMemoryStream buffer) {
buffer.set(text);
if (parameters == null || parameters.size() == 0) {
// simple params are absent
buffer.set(false);
// composite keys are absent
buffer.set(false);
} else {
final Map<Object, Object> params = new HashMap<Object, Object>();
final Map<Object, byte[]> compositeKeyParams = new HashMap<Object, byte[]>();
for (final Entry<Object, Object> paramEntry : parameters.entrySet())
if (paramEntry.getValue() instanceof OCompositeKey) {
final OCompositeKey compositeKey = (OCompositeKey) paramEntry.getValue();
final int bufferSize = OCompositeKeySerializer.INSTANCE.getObjectSize(compositeKey);
final byte[] stream = new byte[bufferSize];
OCompositeKeySerializer.INSTANCE.serialize(compositeKey, stream, 0);
compositeKeyParams.put(paramEntry.getKey(), stream);
} else if (paramEntry.getValue() instanceof String) {
final StringBuilder builder = new StringBuilder();
ORecordSerializerStringAbstract.simpleValueToStream(builder, OType.STRING, paramEntry.getValue());
params.put(paramEntry.getKey(), builder.toString());
} else
params.put(paramEntry.getKey(), paramEntry.getValue());
buffer.set(!params.isEmpty());
if (!params.isEmpty()) {
final ODocument param = new ODocument();
param.field("params", params);
buffer.set(param.toStream());
}
buffer.set(!compositeKeyParams.isEmpty());
if (!compositeKeyParams.isEmpty()) {
final ODocument compositeKey = new ODocument();
compositeKey.field("compositeKeyParams", compositeKeyParams);
buffer.set(compositeKey.toStream());
}
}
return buffer.toByteArray();
}
protected void fromStream(final OMemoryStream buffer) {
text = buffer.getAsString();
parameters = null;
final boolean simpleParams = buffer.getAsBoolean();
if (simpleParams) {
final byte[] paramBuffer = buffer.getAsByteArray();
final ODocument param = new ODocument();
param.fromStream(paramBuffer);
Map<String, Object> params = param.field("params");
parameters = new HashMap<Object, Object>();
for (Entry<String, Object> p : params.entrySet()) {
final Object value;
if (p.getValue() instanceof String)
value = ORecordSerializerStringAbstract.getTypeValue((String) p.getValue());
else
value = p.getValue();
if (Character.isDigit(p.getKey().charAt(0)))
parameters.put(Integer.parseInt(p.getKey()), value);
else
parameters.put(p.getKey(), value);
}
}
final boolean compositeKeyParamsPresent = buffer.getAsBoolean();
if (compositeKeyParamsPresent) {
final byte[] paramBuffer = buffer.getAsByteArray();
final ODocument param = new ODocument();
param.fromStream(paramBuffer);
final Map<String, Object> compositeKeyParams = param.field("compositeKeyParams");
if (parameters == null)
parameters = new HashMap<Object, Object>();
for (final Entry<String, Object> p : compositeKeyParams.entrySet()) {
final Object value = OCompositeKeySerializer.INSTANCE
.deserialize(OStringSerializerHelper.getBinaryContent(p.getValue()), 0);
if (Character.isDigit(p.getKey().charAt(0)))
parameters.put(Integer.parseInt(p.getKey()), value);
else
parameters.put(p.getKey(), value);
}
}
}
} | 1no label
| core_src_main_java_com_orientechnologies_orient_core_command_OCommandRequestTextAbstract.java |
6,202 | public class RandomizingClient implements InternalClient {
private final SearchType defaultSearchType;
private final InternalClient delegate;
public RandomizingClient(InternalClient client, Random random) {
this.delegate = client;
// we don't use the QUERY_AND_FETCH types that break quite a lot of tests
// given that they return `size*num_shards` hits instead of `size`
defaultSearchType = RandomPicks.randomFrom(random, Arrays.asList(
SearchType.DFS_QUERY_THEN_FETCH,
SearchType.QUERY_THEN_FETCH));
}
@Override
public void close() {
delegate.close();
}
@Override
public AdminClient admin() {
return delegate.admin();
}
@Override
public <Request extends ActionRequest, Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder>> ActionFuture<Response> execute(
Action<Request, Response, RequestBuilder> action, Request request) {
return delegate.execute(action, request);
}
@Override
public <Request extends ActionRequest, Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder>> void execute(
Action<Request, Response, RequestBuilder> action, Request request, ActionListener<Response> listener) {
delegate.execute(action, request, listener);
}
@Override
public <Request extends ActionRequest, Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder>> RequestBuilder prepareExecute(
Action<Request, Response, RequestBuilder> action) {
return delegate.prepareExecute(action);
}
@Override
public ActionFuture<IndexResponse> index(IndexRequest request) {
return delegate.index(request);
}
@Override
public void index(IndexRequest request, ActionListener<IndexResponse> listener) {
delegate.index(request, listener);
}
@Override
public IndexRequestBuilder prepareIndex() {
return delegate.prepareIndex();
}
@Override
public ActionFuture<UpdateResponse> update(UpdateRequest request) {
return delegate.update(request);
}
@Override
public void update(UpdateRequest request, ActionListener<UpdateResponse> listener) {
delegate.update(request, listener);
}
@Override
public UpdateRequestBuilder prepareUpdate() {
return delegate.prepareUpdate();
}
@Override
public UpdateRequestBuilder prepareUpdate(String index, String type, String id) {
return delegate.prepareUpdate(index, type, id);
}
@Override
public IndexRequestBuilder prepareIndex(String index, String type) {
return delegate.prepareIndex(index, type);
}
@Override
public IndexRequestBuilder prepareIndex(String index, String type, String id) {
return delegate.prepareIndex(index, type, id);
}
@Override
public ActionFuture<DeleteResponse> delete(DeleteRequest request) {
return delegate.delete(request);
}
@Override
public void delete(DeleteRequest request, ActionListener<DeleteResponse> listener) {
delegate.delete(request, listener);
}
@Override
public DeleteRequestBuilder prepareDelete() {
return delegate.prepareDelete();
}
@Override
public DeleteRequestBuilder prepareDelete(String index, String type, String id) {
return delegate.prepareDelete(index, type, id);
}
@Override
public ActionFuture<BulkResponse> bulk(BulkRequest request) {
return delegate.bulk(request);
}
@Override
public void bulk(BulkRequest request, ActionListener<BulkResponse> listener) {
delegate.bulk(request, listener);
}
@Override
public BulkRequestBuilder prepareBulk() {
return delegate.prepareBulk();
}
@Override
public ActionFuture<DeleteByQueryResponse> deleteByQuery(DeleteByQueryRequest request) {
return delegate.deleteByQuery(request);
}
@Override
public void deleteByQuery(DeleteByQueryRequest request, ActionListener<DeleteByQueryResponse> listener) {
delegate.deleteByQuery(request, listener);
}
@Override
public DeleteByQueryRequestBuilder prepareDeleteByQuery(String... indices) {
return delegate.prepareDeleteByQuery(indices);
}
@Override
public ActionFuture<GetResponse> get(GetRequest request) {
return delegate.get(request);
}
@Override
public void get(GetRequest request, ActionListener<GetResponse> listener) {
delegate.get(request, listener);
}
@Override
public GetRequestBuilder prepareGet() {
return delegate.prepareGet();
}
@Override
public GetRequestBuilder prepareGet(String index, String type, String id) {
return delegate.prepareGet(index, type, id);
}
@Override
public ActionFuture<MultiGetResponse> multiGet(MultiGetRequest request) {
return delegate.multiGet(request);
}
@Override
public void multiGet(MultiGetRequest request, ActionListener<MultiGetResponse> listener) {
delegate.multiGet(request, listener);
}
@Override
public MultiGetRequestBuilder prepareMultiGet() {
return delegate.prepareMultiGet();
}
@Override
public ActionFuture<CountResponse> count(CountRequest request) {
return delegate.count(request);
}
@Override
public void count(CountRequest request, ActionListener<CountResponse> listener) {
delegate.count(request, listener);
}
@Override
public CountRequestBuilder prepareCount(String... indices) {
return delegate.prepareCount(indices);
}
@Override
public ActionFuture<SuggestResponse> suggest(SuggestRequest request) {
return delegate.suggest(request);
}
@Override
public void suggest(SuggestRequest request, ActionListener<SuggestResponse> listener) {
delegate.suggest(request, listener);
}
@Override
public SuggestRequestBuilder prepareSuggest(String... indices) {
return delegate.prepareSuggest(indices);
}
@Override
public ActionFuture<SearchResponse> search(SearchRequest request) {
return delegate.search(request);
}
@Override
public void search(SearchRequest request, ActionListener<SearchResponse> listener) {
delegate.search(request, listener);
}
@Override
public SearchRequestBuilder prepareSearch(String... indices) {
return delegate.prepareSearch(indices).setSearchType(defaultSearchType);
}
@Override
public ActionFuture<SearchResponse> searchScroll(SearchScrollRequest request) {
return delegate.searchScroll(request);
}
@Override
public void searchScroll(SearchScrollRequest request, ActionListener<SearchResponse> listener) {
delegate.searchScroll(request, listener);
}
@Override
public SearchScrollRequestBuilder prepareSearchScroll(String scrollId) {
return delegate.prepareSearchScroll(scrollId);
}
@Override
public ActionFuture<MultiSearchResponse> multiSearch(MultiSearchRequest request) {
return delegate.multiSearch(request);
}
@Override
public void multiSearch(MultiSearchRequest request, ActionListener<MultiSearchResponse> listener) {
delegate.multiSearch(request, listener);
}
@Override
public MultiSearchRequestBuilder prepareMultiSearch() {
return delegate.prepareMultiSearch();
}
@Override
public ActionFuture<SearchResponse> moreLikeThis(MoreLikeThisRequest request) {
return delegate.moreLikeThis(request);
}
@Override
public void moreLikeThis(MoreLikeThisRequest request, ActionListener<SearchResponse> listener) {
delegate.moreLikeThis(request, listener);
}
@Override
public MoreLikeThisRequestBuilder prepareMoreLikeThis(String index, String type, String id) {
return delegate.prepareMoreLikeThis(index, type, id);
}
@Override
public ActionFuture<TermVectorResponse> termVector(TermVectorRequest request) {
return delegate.termVector(request);
}
@Override
public void termVector(TermVectorRequest request, ActionListener<TermVectorResponse> listener) {
delegate.termVector(request, listener);
}
@Override
public TermVectorRequestBuilder prepareTermVector(String index, String type, String id) {
return delegate.prepareTermVector(index, type, id);
}
@Override
public ActionFuture<MultiTermVectorsResponse> multiTermVectors(MultiTermVectorsRequest request) {
return delegate.multiTermVectors(request);
}
@Override
public void multiTermVectors(MultiTermVectorsRequest request, ActionListener<MultiTermVectorsResponse> listener) {
delegate.multiTermVectors(request, listener);
}
@Override
public MultiTermVectorsRequestBuilder prepareMultiTermVectors() {
return delegate.prepareMultiTermVectors();
}
@Override
public ActionFuture<PercolateResponse> percolate(PercolateRequest request) {
return delegate.percolate(request);
}
@Override
public void percolate(PercolateRequest request, ActionListener<PercolateResponse> listener) {
delegate.percolate(request, listener);
}
@Override
public PercolateRequestBuilder preparePercolate() {
return delegate.preparePercolate();
}
@Override
public ActionFuture<MultiPercolateResponse> multiPercolate(MultiPercolateRequest request) {
return delegate.multiPercolate(request);
}
@Override
public void multiPercolate(MultiPercolateRequest request, ActionListener<MultiPercolateResponse> listener) {
delegate.multiPercolate(request, listener);
}
@Override
public MultiPercolateRequestBuilder prepareMultiPercolate() {
return delegate.prepareMultiPercolate();
}
@Override
public ExplainRequestBuilder prepareExplain(String index, String type, String id) {
return delegate.prepareExplain(index, type, id);
}
@Override
public ActionFuture<ExplainResponse> explain(ExplainRequest request) {
return delegate.explain(request);
}
@Override
public void explain(ExplainRequest request, ActionListener<ExplainResponse> listener) {
delegate.explain(request, listener);
}
@Override
public ClearScrollRequestBuilder prepareClearScroll() {
return delegate.prepareClearScroll();
}
@Override
public ActionFuture<ClearScrollResponse> clearScroll(ClearScrollRequest request) {
return delegate.clearScroll(request);
}
@Override
public void clearScroll(ClearScrollRequest request, ActionListener<ClearScrollResponse> listener) {
delegate.clearScroll(request, listener);
}
@Override
public ThreadPool threadPool() {
return delegate.threadPool();
}
@Override
public Settings settings() {
return delegate.settings();
}
@Override
public String toString() {
return "randomized(" + super.toString() + ")";
}
} | 1no label
| src_test_java_org_elasticsearch_test_client_RandomizingClient.java |
89 | private static class DeadlockProneTransactionState extends WritableTransactionState
{
private final DoubleLatch latch;
public DeadlockProneTransactionState( LockManager lockManager, NodeManager nodeManager,
Logging logging, javax.transaction.Transaction tx, RemoteTxHook txHook, TxIdGenerator txIdGenerator, DoubleLatch latch )
{
super( lockManager, nodeManager, logging, tx, txHook, txIdGenerator );
this.latch = latch;
}
@Override
public void commitCows()
{
latch.startAndAwaitFinish();
super.commitCows();
}
} | 0true
| community_kernel_src_test_java_org_neo4j_kernel_impl_transaction_TestCacheUpdateDeadlock.java |
299 | public class UnavailableShardsException extends ElasticsearchException {
public UnavailableShardsException(@Nullable ShardId shardId, String message) {
super(buildMessage(shardId, message));
}
private static String buildMessage(ShardId shardId, String message) {
if (shardId == null) {
return message;
}
return "[" + shardId.index().name() + "][" + shardId.id() + "] " + message;
}
@Override
public RestStatus status() {
return RestStatus.SERVICE_UNAVAILABLE;
}
} | 0true
| src_main_java_org_elasticsearch_action_UnavailableShardsException.java |
41 | @Component("blDynamicFieldPersistenceHandlerHelper")
public class DynamicFieldPersistenceHandlerHelper {
/**
* Builds all of the metadata for all of the dynamic properties within a {@link StructuredContentType}, gleaned from
* the {@link FieldGroup}s and {@link FieldDefinition}s.
*
* @param fieldGroups groups that the {@link Property}s are built from
* @param inheritedType the value that each built {@link FieldMetadata} for each property will use to notate where the
* dynamic field actually came from (meaning {@link FieldMetadata#setAvailableToTypes(String[])} and {@link FieldMetadata#setInheritedFromType(String)}
* @return
*/
public Property[] buildDynamicPropertyList(List<FieldGroup> fieldGroups, Class<?> inheritedType) {
List<Property> propertiesList = new ArrayList<Property>();
int groupCount = 1;
int fieldCount = 0;
for (FieldGroup group : fieldGroups) {
List<FieldDefinition> definitions = group.getFieldDefinitions();
for (FieldDefinition definition : definitions) {
Property property = new Property();
property.setName(definition.getName());
BasicFieldMetadata fieldMetadata = new BasicFieldMetadata();
property.setMetadata(fieldMetadata);
fieldMetadata.setFieldType(definition.getFieldType());
fieldMetadata.setMutable(true);
fieldMetadata.setInheritedFromType(inheritedType.getName());
fieldMetadata.setAvailableToTypes(new String[] {inheritedType.getName()});
fieldMetadata.setForeignKeyCollection(false);
fieldMetadata.setMergedPropertyType(MergedPropertyType.PRIMARY);
fieldMetadata.setLength(definition.getMaxLength());
if (definition.getFieldEnumeration() != null && !CollectionUtils.isEmpty(definition.getFieldEnumeration().getEnumerationItems())) {
int count = definition.getFieldEnumeration().getEnumerationItems().size();
String[][] enumItems = new String[count][2];
for (int j = 0; j < count; j++) {
FieldEnumerationItem item = definition.getFieldEnumeration().getEnumerationItems().get(j);
enumItems[j][0] = item.getName();
enumItems[j][1] = item.getFriendlyName();
}
fieldMetadata.setEnumerationValues(enumItems);
}
fieldMetadata.setName(definition.getName());
fieldMetadata.setFriendlyName(definition.getFriendlyName());
fieldMetadata.setSecurityLevel(definition.getSecurityLevel()==null?"":definition.getSecurityLevel());
fieldMetadata.setOrder(fieldCount++);
fieldMetadata.setVisibility(definition.getHiddenFlag()?VisibilityEnum.HIDDEN_ALL:VisibilityEnum.VISIBLE_ALL);
fieldMetadata.setGroup(group.getName());
fieldMetadata.setGroupOrder(groupCount);
fieldMetadata.setTab("General");
fieldMetadata.setTabOrder(100);
fieldMetadata.setGroupCollapsed(group.getInitCollapsedFlag());
fieldMetadata.setExplicitFieldType(SupportedFieldType.UNKNOWN);
fieldMetadata.setLargeEntry(definition.getTextAreaFlag());
fieldMetadata.setProminent(false);
fieldMetadata.setColumnWidth(String.valueOf(definition.getColumnWidth()));
fieldMetadata.setBroadleafEnumeration("");
fieldMetadata.setReadOnly(false);
if (definition.getValidationRegEx() != null) {
Map<String, String> itemMap = new HashMap<String, String>();
itemMap.put("regularExpression", definition.getValidationRegEx());
itemMap.put(ConfigurationItem.ERROR_MESSAGE, definition.getValidationErrorMesageKey());
fieldMetadata.getValidationConfigurations().put("org.broadleafcommerce.openadmin.server.service.persistence.validation.RegexPropertyValidator", itemMap);
}
propertiesList.add(property);
}
groupCount++;
fieldCount = 0;
}
Property property = new Property();
property.setName("id");
BasicFieldMetadata fieldMetadata = new BasicFieldMetadata();
property.setMetadata(fieldMetadata);
fieldMetadata.setFieldType(SupportedFieldType.ID);
fieldMetadata.setSecondaryType(SupportedFieldType.INTEGER);
fieldMetadata.setMutable(true);
fieldMetadata.setInheritedFromType(inheritedType.getName());
fieldMetadata.setAvailableToTypes(new String[] {inheritedType.getName()});
fieldMetadata.setForeignKeyCollection(false);
fieldMetadata.setMergedPropertyType(MergedPropertyType.PRIMARY);
fieldMetadata.setName("id");
fieldMetadata.setFriendlyName("ID");
fieldMetadata.setSecurityLevel("");
fieldMetadata.setVisibility(VisibilityEnum.HIDDEN_ALL);
fieldMetadata.setExplicitFieldType(SupportedFieldType.UNKNOWN);
fieldMetadata.setLargeEntry(false);
fieldMetadata.setProminent(false);
fieldMetadata.setColumnWidth("*");
fieldMetadata.setBroadleafEnumeration("");
fieldMetadata.setReadOnly(true);
propertiesList.add(property);
Property[] properties = new Property[propertiesList.size()];
properties = propertiesList.toArray(properties);
Arrays.sort(properties, new Comparator<Property>() {
@Override
public int compare(Property o1, Property o2) {
/*
* First, compare properties based on order fields
*/
if (o1.getMetadata().getOrder() != null && o2.getMetadata().getOrder() != null) {
return o1.getMetadata().getOrder().compareTo(o2.getMetadata().getOrder());
} else if (o1.getMetadata().getOrder() != null && o2.getMetadata().getOrder() == null) {
/*
* Always favor fields that have an order identified
*/
return -1;
} else if (o1.getMetadata().getOrder() == null && o2.getMetadata().getOrder() != null) {
/*
* Always favor fields that have an order identified
*/
return 1;
} else if (o1.getMetadata().getFriendlyName() != null && o2.getMetadata().getFriendlyName() != null) {
return o1.getMetadata().getFriendlyName().compareTo(o2.getMetadata().getFriendlyName());
} else {
return o1.getName().compareTo(o2.getName());
}
}
});
return properties;
}
} | 0true
| admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_admin_server_handler_DynamicFieldPersistenceHandlerHelper.java |
62 | public interface Constants {
String DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ss'Z'";
String DOC_VERSION = "2011-05-15";
String SIGNATURE_METHOD = "HmacSHA256";
String SIGNATURE_VERSION = "2";
String GET = "GET";
} | 0true
| hazelcast-cloud_src_main_java_com_hazelcast_aws_impl_Constants.java |
754 | public static class Item implements Streamable {
private String index;
private String type;
private String id;
private String routing;
private String[] fields;
private long version = Versions.MATCH_ANY;
private VersionType versionType = VersionType.INTERNAL;
private FetchSourceContext fetchSourceContext;
Item() {
}
/**
* Constructs a single get item.
*
* @param index The index name
* @param type The type (can be null)
* @param id The id
*/
public Item(String index, @Nullable String type, String id) {
this.index = index;
this.type = type;
this.id = id;
}
public String index() {
return this.index;
}
public Item index(String index) {
this.index = index;
return this;
}
public String type() {
return this.type;
}
public String id() {
return this.id;
}
/**
* The routing associated with this document.
*/
public Item routing(String routing) {
this.routing = routing;
return this;
}
public String routing() {
return this.routing;
}
public Item parent(String parent) {
if (routing == null) {
this.routing = parent;
}
return this;
}
public Item fields(String... fields) {
this.fields = fields;
return this;
}
public String[] fields() {
return this.fields;
}
public long version() {
return version;
}
public Item version(long version) {
this.version = version;
return this;
}
public VersionType versionType() {
return versionType;
}
public Item versionType(VersionType versionType) {
this.versionType = versionType;
return this;
}
public FetchSourceContext fetchSourceContext() {
return this.fetchSourceContext;
}
/**
* Allows setting the {@link FetchSourceContext} for this request, controlling if and how _source should be returned.
*/
public Item fetchSourceContext(FetchSourceContext fetchSourceContext) {
this.fetchSourceContext = fetchSourceContext;
return this;
}
public static Item readItem(StreamInput in) throws IOException {
Item item = new Item();
item.readFrom(in);
return item;
}
@Override
public void readFrom(StreamInput in) throws IOException {
index = in.readSharedString();
type = in.readOptionalSharedString();
id = in.readString();
routing = in.readOptionalString();
int size = in.readVInt();
if (size > 0) {
fields = new String[size];
for (int i = 0; i < size; i++) {
fields[i] = in.readString();
}
}
version = in.readVLong();
versionType = VersionType.fromValue(in.readByte());
fetchSourceContext = FetchSourceContext.optionalReadFromStream(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeSharedString(index);
out.writeOptionalSharedString(type);
out.writeString(id);
out.writeOptionalString(routing);
if (fields == null) {
out.writeVInt(0);
} else {
out.writeVInt(fields.length);
for (String field : fields) {
out.writeString(field);
}
}
out.writeVLong(version);
out.writeByte(versionType.getValue());
FetchSourceContext.optionalWriteToStream(fetchSourceContext, out);
}
} | 0true
| src_main_java_org_elasticsearch_action_get_MultiGetRequest.java |
227 | XPostingsHighlighter highlighter = new XPostingsHighlighter(10000) {
@Override
protected String[][] loadFieldValues(IndexSearcher searcher, String[] fields, int[] docids, int maxLength) throws IOException {
assertThat(fields.length, equalTo(1));
assertThat(docids.length, equalTo(1));
String[][] contents = new String[1][1];
contents[0][0] = text;
return contents;
}
@Override
protected BreakIterator getBreakIterator(String field) {
return new WholeBreakIterator();
}
}; | 0true
| src_test_java_org_apache_lucene_search_postingshighlight_XPostingsHighlighterTests.java |
222 | public class Orient extends OListenerManger<OOrientListener> {
public static final String ORIENTDB_HOME = "ORIENTDB_HOME";
public static final String URL_SYNTAX = "<engine>:<db-type>:<db-name>[?<db-param>=<db-value>[&]]*";
protected static final Orient instance = new Orient();
protected static boolean registerDatabaseByPath = false;
protected final Map<String, OEngine> engines = new HashMap<String, OEngine>();
protected final Map<String, OStorage> storages = new HashMap<String, OStorage>();
protected final Set<ODatabaseLifecycleListener> dbLifecycleListeners = new HashSet<ODatabaseLifecycleListener>();
protected final ODatabaseFactory databaseFactory = new ODatabaseFactory();
protected final OScriptManager scriptManager = new OScriptManager();
protected OClusterFactory clusterFactory = new ODefaultClusterFactory();
protected ORecordFactoryManager recordFactoryManager = new ORecordFactoryManager();
protected OrientShutdownHook shutdownHook;
protected final Timer timer = new Timer(true);
protected final ThreadGroup threadGroup = new ThreadGroup("OrientDB");
protected final AtomicInteger serialId = new AtomicInteger();
protected OMemoryWatchDog memoryWatchDog;
protected OProfilerMBean profiler = new OProfiler(); ;
protected ODatabaseThreadLocalFactory databaseThreadFactory;
protected volatile boolean active = false;
protected Orient() {
super(new OAdaptiveLock(OGlobalConfiguration.ENVIRONMENT_CONCURRENT.getValueAsBoolean()));
startup();
}
public Orient startup() {
getLock().lock();
try {
if (active)
// ALREADY ACTIVE
return this;
shutdownHook = new OrientShutdownHook();
// REGISTER THE EMBEDDED ENGINE
registerEngine(new OEngineLocal());
registerEngine(new OEngineLocalPaginated());
registerEngine(new OEngineMemory());
registerEngine("com.orientechnologies.orient.client.remote.OEngineRemote");
if (OGlobalConfiguration.PROFILER_ENABLED.getValueAsBoolean())
// ACTIVATE RECORDING OF THE PROFILER
profiler.startRecording();
if (OGlobalConfiguration.ENVIRONMENT_DUMP_CFG_AT_STARTUP.getValueAsBoolean())
OGlobalConfiguration.dumpConfiguration(System.out);
memoryWatchDog = new OMemoryWatchDog();
active = true;
return this;
} finally {
getLock().unlock();
}
}
public Orient shutdown() {
getLock().lock();
try {
if (!active)
return this;
active = false;
if (memoryWatchDog != null) {
// SHUTDOWN IT AND WAIT FOR COMPLETITION
memoryWatchDog.interrupt();
try {
memoryWatchDog.join();
} catch (InterruptedException e) {
}
}
if (shutdownHook != null) {
shutdownHook.cancel();
shutdownHook = null;
}
OLogManager.instance().debug(this, "Orient Engine is shutting down...");
// CALL THE SHUTDOWN ON ALL THE LISTENERS
for (OOrientListener l : browseListeners()) {
if (l != null)
l.onShutdown();
}
// SHUTDOWN ENGINES
for (OEngine engine : engines.values())
engine.shutdown();
engines.clear();
if (databaseFactory != null)
// CLOSE ALL DATABASES
databaseFactory.shutdown();
if (storages != null) {
// CLOSE ALL THE STORAGES
final List<OStorage> storagesCopy = new ArrayList<OStorage>(storages.values());
for (OStorage stg : storagesCopy) {
OLogManager.instance().info(this, "Shutting down storage: " + stg.getName() + "...");
stg.close(true);
}
}
if (OMMapManagerLocator.getInstance() != null)
OMMapManagerLocator.getInstance().shutdown();
if (threadGroup != null)
// STOP ALL THE PENDING THREADS
threadGroup.interrupt();
resetListeners();
timer.purge();
profiler.shutdown();
OLogManager.instance().info(this, "Orient Engine shutdown complete\n");
} finally {
getLock().unlock();
}
return this;
}
public OStorage loadStorage(String iURL) {
if (iURL == null || iURL.length() == 0)
throw new IllegalArgumentException("URL missed");
if (iURL.endsWith("/"))
iURL = iURL.substring(0, iURL.length() - 1);
// SEARCH FOR ENGINE
int pos = iURL.indexOf(':');
if (pos <= 0)
throw new OConfigurationException("Error in database URL: the engine was not specified. Syntax is: " + URL_SYNTAX
+ ". URL was: " + iURL);
final String engineName = iURL.substring(0, pos);
getLock().lock();
try {
final OEngine engine = engines.get(engineName.toLowerCase());
if (engine == null)
throw new OConfigurationException("Error on opening database: the engine '" + engineName + "' was not found. URL was: "
+ iURL + ". Registered engines are: " + engines.keySet());
// SEARCH FOR DB-NAME
iURL = iURL.substring(pos + 1);
pos = iURL.indexOf('?');
Map<String, String> parameters = null;
String dbPath = null;
if (pos > 0) {
dbPath = iURL.substring(0, pos);
iURL = iURL.substring(pos + 1);
// PARSE PARAMETERS
parameters = new HashMap<String, String>();
String[] pairs = iURL.split("&");
String[] kv;
for (String pair : pairs) {
kv = pair.split("=");
if (kv.length < 2)
throw new OConfigurationException("Error on opening database: parameter has no value. Syntax is: " + URL_SYNTAX
+ ". URL was: " + iURL);
parameters.put(kv[0], kv[1]);
}
} else
dbPath = iURL;
final String dbName = registerDatabaseByPath ? dbPath : OIOUtils.getRelativePathIfAny(dbPath, null);
OStorage storage;
if (engine.isShared()) {
// SEARCH IF ALREADY USED
storage = storages.get(dbName);
if (storage == null) {
// NOT FOUND: CREATE IT
storage = engine.createStorage(dbPath, parameters);
storages.put(dbName, storage);
}
} else {
// REGISTER IT WITH A SERIAL NAME TO AVOID BEING REUSED
storage = engine.createStorage(dbPath, parameters);
storages.put(dbName + "__" + serialId.incrementAndGet(), storage);
}
for (OOrientListener l : browseListeners())
l.onStorageRegistered(storage);
return storage;
} finally {
getLock().unlock();
}
}
public OStorage registerStorage(final OStorage iStorage) throws IOException {
getLock().lock();
try {
for (OOrientListener l : browseListeners())
l.onStorageRegistered(iStorage);
if (!storages.containsKey(iStorage.getName()))
storages.put(iStorage.getName(), iStorage);
} finally {
getLock().unlock();
}
return iStorage;
}
public OStorage getStorage(final String iDbName) {
getLock().lock();
try {
return storages.get(iDbName);
} finally {
getLock().unlock();
}
}
public void registerEngine(final OEngine iEngine) {
getLock().lock();
try {
engines.put(iEngine.getName(), iEngine);
} finally {
getLock().unlock();
}
}
private void registerEngine(final String iClassName) {
try {
final Class<?> cls = Class.forName(iClassName);
registerEngine((OEngine) cls.newInstance());
} catch (Exception e) {
}
}
/**
* Returns the engine by its name.
*
* @param iEngineName
* Engine name to retrieve
* @return OEngine instance of found, otherwise null
*/
public OEngine getEngine(final String iEngineName) {
getLock().lock();
try {
return engines.get(iEngineName);
} finally {
getLock().unlock();
}
}
public Set<String> getEngines() {
getLock().lock();
try {
return Collections.unmodifiableSet(engines.keySet());
} finally {
getLock().unlock();
}
}
public void unregisterStorageByName(final String iName) {
final String dbName = registerDatabaseByPath ? iName : OIOUtils.getRelativePathIfAny(iName, null);
final OStorage stg = storages.get(dbName);
unregisterStorage(stg);
}
public void unregisterStorage(final OStorage iStorage) {
if (!active)
// SHUTDOWNING OR NOT ACTIVE: RETURN
return;
if (iStorage == null)
return;
getLock().lock();
try {
// UNREGISTER ALL THE LISTENER ONE BY ONE AVOIDING SELF-RECURSION BY REMOVING FROM THE LIST
final Iterable<OOrientListener> listenerCopy = getListenersCopy();
for (Iterator<OOrientListener> it = listenerCopy.iterator(); it.hasNext();) {
final OOrientListener l = it.next();
unregisterListener(l);
l.onStorageUnregistered(iStorage);
}
for (Entry<String, OStorage> s : storages.entrySet()) {
if (s.getValue().equals(iStorage)) {
storages.remove(s.getKey());
break;
}
}
} finally {
getLock().unlock();
}
}
public Collection<OStorage> getStorages() {
getLock().lock();
try {
return new ArrayList<OStorage>(storages.values());
} finally {
getLock().unlock();
}
}
public Timer getTimer() {
return timer;
}
public void removeShutdownHook() {
if (shutdownHook != null)
Runtime.getRuntime().removeShutdownHook(shutdownHook);
}
public Iterator<ODatabaseLifecycleListener> getDbLifecycleListeners() {
return dbLifecycleListeners.iterator();
}
public void addDbLifecycleListener(final ODatabaseLifecycleListener iListener) {
dbLifecycleListeners.add(iListener);
}
public void removeDbLifecycleListener(final ODatabaseLifecycleListener iListener) {
dbLifecycleListeners.remove(iListener);
}
public static Orient instance() {
return instance;
}
public ThreadGroup getThreadGroup() {
return threadGroup;
}
public ODatabaseThreadLocalFactory getDatabaseThreadFactory() {
return databaseThreadFactory;
}
public OMemoryWatchDog getMemoryWatchDog() {
return memoryWatchDog;
}
public ORecordFactoryManager getRecordFactoryManager() {
return recordFactoryManager;
}
public OClusterFactory getClusterFactory() {
return clusterFactory;
}
public ODatabaseFactory getDatabaseFactory() {
return databaseFactory;
}
public void setRecordFactoryManager(final ORecordFactoryManager iRecordFactoryManager) {
recordFactoryManager = iRecordFactoryManager;
}
public static String getHomePath() {
String v = System.getProperty("orient.home");
if (v == null)
v = System.getProperty(ORIENTDB_HOME);
if (v == null)
v = System.getenv(ORIENTDB_HOME);
return v;
}
public void setClusterFactory(final OClusterFactory clusterFactory) {
this.clusterFactory = clusterFactory;
}
public OProfilerMBean getProfiler() {
return profiler;
}
public void registerThreadDatabaseFactory(final ODatabaseThreadLocalFactory iDatabaseFactory) {
databaseThreadFactory = iDatabaseFactory;
}
public OScriptManager getScriptManager() {
return scriptManager;
}
/**
* Tells if to register database by path. Default is false. Setting to true allows to have multiple databases in different path
* with the same name.
*
* @see #setRegisterDatabaseByPath(boolean)
* @return
*/
public static boolean isRegisterDatabaseByPath() {
return registerDatabaseByPath;
}
/**
* Register database by path. Default is false. Setting to true allows to have multiple databases in different path with the same
* name.
*
* @param iValue
*/
public static void setRegisterDatabaseByPath(final boolean iValue) {
registerDatabaseByPath = iValue;
}
public void setProfiler(final OProfilerMBean iProfiler) {
profiler = iProfiler;
}
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_Orient.java |
196 | public class XmlClientConfigBuilder extends AbstractXmlConfigHelper {
private static final ILogger LOGGER = Logger.getLogger(XmlClientConfigBuilder.class);
private ClientConfig clientConfig;
private InputStream in;
public XmlClientConfigBuilder(String resource) throws IOException {
URL url = ConfigLoader.locateConfig(resource);
if (url == null) {
throw new IllegalArgumentException("Could not load " + resource);
}
this.in = url.openStream();
}
public XmlClientConfigBuilder(File file) throws IOException {
if (file == null) {
throw new NullPointerException("File is null!");
}
in = new FileInputStream(file);
}
public XmlClientConfigBuilder(URL url) throws IOException {
if (url == null) {
throw new NullPointerException("URL is null!");
}
in = url.openStream();
}
public XmlClientConfigBuilder(InputStream in) {
this.in = in;
}
public XmlClientConfigBuilder() {
String configFile = System.getProperty("hazelcast.client.config");
try {
File configurationFile = null;
if (configFile != null) {
configurationFile = new File(configFile);
LOGGER.info("Using configuration file at " + configurationFile.getAbsolutePath());
if (!configurationFile.exists()) {
String msg = "Config file at '" + configurationFile.getAbsolutePath() + "' doesn't exist.";
msg += "\nHazelcast will try to use the hazelcast-client.xml config file in the working directory.";
LOGGER.warning(msg);
configurationFile = null;
}
}
if (configurationFile == null) {
configFile = "hazelcast-client.xml";
configurationFile = new File("hazelcast-client.xml");
if (!configurationFile.exists()) {
configurationFile = null;
}
}
URL configurationUrl;
if (configurationFile != null) {
LOGGER.info("Using configuration file at " + configurationFile.getAbsolutePath());
try {
in = new FileInputStream(configurationFile);
} catch (final Exception e) {
String msg = "Having problem reading config file at '" + configFile + "'.";
msg += "\nException message: " + e.getMessage();
msg += "\nHazelcast will try to use the hazelcast-client.xml config file in classpath.";
LOGGER.warning(msg);
in = null;
}
}
if (in == null) {
LOGGER.info("Looking for hazelcast-client.xml config file in classpath.");
configurationUrl = Config.class.getClassLoader().getResource("hazelcast-client.xml");
if (configurationUrl == null) {
configurationUrl = Config.class.getClassLoader().getResource("hazelcast-client-default.xml");
LOGGER.warning(
"Could not find hazelcast-client.xml in classpath."
+ "\nHazelcast will use hazelcast-client-default.xml config file in jar.");
if (configurationUrl == null) {
LOGGER.warning("Could not find hazelcast-client-default.xml in the classpath!"
+ "\nThis may be due to a wrong-packaged or corrupted jar file.");
return;
}
}
LOGGER.info("Using configuration file " + configurationUrl.getFile() + " in the classpath.");
in = configurationUrl.openStream();
if (in == null) {
throw new IllegalStateException("Cannot read configuration file, giving up.");
}
}
} catch (final Throwable e) {
LOGGER.severe("Error while creating configuration:" + e.getMessage(), e);
}
}
public ClientConfig build() {
return build(Thread.currentThread().getContextClassLoader());
}
public ClientConfig build(ClassLoader classLoader) {
final ClientConfig clientConfig = new ClientConfig();
clientConfig.setClassLoader(classLoader);
try {
parse(clientConfig);
return clientConfig;
} catch (Exception e) {
throw ExceptionUtil.rethrow(e);
}
}
private void parse(ClientConfig clientConfig) throws Exception {
this.clientConfig = clientConfig;
final DocumentBuilder builder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
Document doc;
try {
doc = builder.parse(in);
} catch (final Exception e) {
throw new IllegalStateException("Could not parse configuration file, giving up.");
}
Element element = doc.getDocumentElement();
try {
element.getTextContent();
} catch (final Throwable e) {
domLevel3 = false;
}
handleConfig(element);
}
private void handleConfig(final Element docElement) throws Exception {
for (Node node : new IterableNodeList(docElement.getChildNodes())) {
final String nodeName = cleanNodeName(node.getNodeName());
if ("security".equals(nodeName)) {
handleSecurity(node);
} else if ("proxy-factories".equals(nodeName)) {
handleProxyFactories(node);
} else if ("properties".equals(nodeName)) {
fillProperties(node, clientConfig.getProperties());
} else if ("serialization".equals(nodeName)) {
handleSerialization(node);
} else if ("group".equals(nodeName)) {
handleGroup(node);
} else if ("listeners".equals(nodeName)) {
handleListeners(node);
} else if ("network".equals(nodeName)) {
handleNetwork(node);
} else if ("load-balancer".equals(nodeName)) {
handleLoadBalancer(node);
} else if ("near-cache".equals(nodeName)) {
handleNearCache(node);
} else if ("executor-pool-size".equals(nodeName)) {
final int poolSize = Integer.parseInt(getTextContent(node));
clientConfig.setExecutorPoolSize(poolSize);
}
}
}
private void handleNearCache(Node node) {
final String name = getAttribute(node, "name");
final NearCacheConfig nearCacheConfig = new NearCacheConfig();
for (Node child : new IterableNodeList(node.getChildNodes())) {
final String nodeName = cleanNodeName(child);
if ("max-size".equals(nodeName)) {
nearCacheConfig.setMaxSize(Integer.parseInt(getTextContent(child)));
} else if ("time-to-live-seconds".equals(nodeName)) {
nearCacheConfig.setTimeToLiveSeconds(Integer.parseInt(getTextContent(child)));
} else if ("max-idle-seconds".equals(nodeName)) {
nearCacheConfig.setMaxIdleSeconds(Integer.parseInt(getTextContent(child)));
} else if ("eviction-policy".equals(nodeName)) {
nearCacheConfig.setEvictionPolicy(getTextContent(child));
} else if ("in-memory-format".equals(nodeName)) {
nearCacheConfig.setInMemoryFormat(InMemoryFormat.valueOf(getTextContent(child)));
} else if ("invalidate-on-change".equals(nodeName)) {
nearCacheConfig.setInvalidateOnChange(Boolean.parseBoolean(getTextContent(child)));
} else if ("cache-local-entries".equals(nodeName)) {
nearCacheConfig.setCacheLocalEntries(Boolean.parseBoolean(getTextContent(child)));
}
}
clientConfig.addNearCacheConfig(name, nearCacheConfig);
}
private void handleLoadBalancer(Node node) {
final String type = getAttribute(node, "type");
if ("random".equals(type)) {
clientConfig.setLoadBalancer(new RandomLB());
} else if ("round-robin".equals(type)) {
clientConfig.setLoadBalancer(new RoundRobinLB());
}
}
private void handleNetwork(Node node) {
final ClientNetworkConfig clientNetworkConfig = new ClientNetworkConfig();
for (Node child : new IterableNodeList(node.getChildNodes())) {
final String nodeName = cleanNodeName(child);
if ("cluster-members".equals(nodeName)) {
handleClusterMembers(child, clientNetworkConfig);
} else if ("smart-routing".equals(nodeName)) {
clientNetworkConfig.setSmartRouting(Boolean.parseBoolean(getTextContent(child)));
} else if ("redo-operation".equals(nodeName)) {
clientNetworkConfig.setRedoOperation(Boolean.parseBoolean(getTextContent(child)));
} else if ("connection-timeout".equals(nodeName)) {
clientNetworkConfig.setConnectionTimeout(Integer.parseInt(getTextContent(child)));
} else if ("connection-attempt-period".equals(nodeName)) {
clientNetworkConfig.setConnectionAttemptPeriod(Integer.parseInt(getTextContent(child)));
} else if ("connection-attempt-limit".equals(nodeName)) {
clientNetworkConfig.setConnectionAttemptLimit(Integer.parseInt(getTextContent(child)));
} else if ("socket-options".equals(nodeName)) {
handleSocketOptions(child, clientNetworkConfig);
} else if ("socket-interceptor".equals(nodeName)) {
handleSocketInterceptorConfig(node, clientNetworkConfig);
} else if ("ssl".equals(nodeName)) {
handleSSLConfig(node, clientNetworkConfig);
}
}
clientConfig.setNetworkConfig(clientNetworkConfig);
}
private void handleSSLConfig(final org.w3c.dom.Node node, ClientNetworkConfig clientNetworkConfig) {
SSLConfig sslConfig = new SSLConfig();
final NamedNodeMap atts = node.getAttributes();
final Node enabledNode = atts.getNamedItem("enabled");
final boolean enabled = enabledNode != null && checkTrue(getTextContent(enabledNode).trim());
sslConfig.setEnabled(enabled);
for (org.w3c.dom.Node n : new IterableNodeList(node.getChildNodes())) {
final String nodeName = cleanNodeName(n.getNodeName());
if ("factory-class-name".equals(nodeName)) {
sslConfig.setFactoryClassName(getTextContent(n).trim());
} else if ("properties".equals(nodeName)) {
fillProperties(n, sslConfig.getProperties());
}
}
clientNetworkConfig.setSSLConfig(sslConfig);
}
private void handleSocketOptions(Node node, ClientNetworkConfig clientNetworkConfig) {
SocketOptions socketOptions = clientConfig.getSocketOptions();
for (Node child : new IterableNodeList(node.getChildNodes())) {
final String nodeName = cleanNodeName(child);
if ("tcp-no-delay".equals(nodeName)) {
socketOptions.setTcpNoDelay(Boolean.parseBoolean(getTextContent(child)));
} else if ("keep-alive".equals(nodeName)) {
socketOptions.setKeepAlive(Boolean.parseBoolean(getTextContent(child)));
} else if ("reuse-address".equals(nodeName)) {
socketOptions.setReuseAddress(Boolean.parseBoolean(getTextContent(child)));
} else if ("linger-seconds".equals(nodeName)) {
socketOptions.setLingerSeconds(Integer.parseInt(getTextContent(child)));
} else if ("buffer-size".equals(nodeName)) {
socketOptions.setBufferSize(Integer.parseInt(getTextContent(child)));
}
}
clientNetworkConfig.setSocketOptions(socketOptions);
}
private void handleClusterMembers(Node node, ClientNetworkConfig clientNetworkConfig) {
for (Node child : new IterableNodeList(node.getChildNodes())) {
if ("address".equals(cleanNodeName(child))) {
clientNetworkConfig.addAddress(getTextContent(child));
}
}
}
private void handleListeners(Node node) throws Exception {
for (Node child : new IterableNodeList(node.getChildNodes())) {
if ("listener".equals(cleanNodeName(child))) {
String className = getTextContent(child);
clientConfig.addListenerConfig(new ListenerConfig(className));
}
}
}
private void handleGroup(Node node) {
for (org.w3c.dom.Node n : new IterableNodeList(node.getChildNodes())) {
final String value = getTextContent(n).trim();
final String nodeName = cleanNodeName(n.getNodeName());
if ("name".equals(nodeName)) {
clientConfig.getGroupConfig().setName(value);
} else if ("password".equals(nodeName)) {
clientConfig.getGroupConfig().setPassword(value);
}
}
}
private void handleSerialization(Node node) {
SerializationConfig serializationConfig = parseSerialization(node);
clientConfig.setSerializationConfig(serializationConfig);
}
private void handleProxyFactories(Node node) throws Exception {
for (Node child : new IterableNodeList(node.getChildNodes())) {
final String nodeName = cleanNodeName(child.getNodeName());
if ("proxy-factory".equals(nodeName)) {
handleProxyFactory(child);
}
}
}
private void handleProxyFactory(Node node) throws Exception {
final String service = getAttribute(node, "service");
final String className = getAttribute(node, "class-name");
final ProxyFactoryConfig proxyFactoryConfig = new ProxyFactoryConfig(className, service);
clientConfig.addProxyFactoryConfig(proxyFactoryConfig);
}
private void handleSocketInterceptorConfig(final org.w3c.dom.Node node, ClientNetworkConfig clientNetworkConfig) {
SocketInterceptorConfig socketInterceptorConfig = parseSocketInterceptorConfig(node);
clientNetworkConfig.setSocketInterceptorConfig(socketInterceptorConfig);
}
private void handleSecurity(Node node) throws Exception {
for (Node child : new IterableNodeList(node.getChildNodes())) {
final String nodeName = cleanNodeName(child.getNodeName());
if ("login-credentials".equals(nodeName)) {
handleLoginCredentials(child);
}
}
}
private void handleLoginCredentials(Node node) {
UsernamePasswordCredentials credentials = new UsernamePasswordCredentials();
for (Node child : new IterableNodeList(node.getChildNodes())) {
final String nodeName = cleanNodeName(child.getNodeName());
if ("username".equals(nodeName)) {
credentials.setUsername(getTextContent(child));
} else if ("password".equals(nodeName)) {
credentials.setPassword(getTextContent(child));
}
}
clientConfig.setCredentials(credentials);
}
} | 1no label
| hazelcast-client_src_main_java_com_hazelcast_client_config_XmlClientConfigBuilder.java |
3,386 | public class PagedBytesIndexFieldData extends AbstractBytesIndexFieldData<PagedBytesAtomicFieldData> {
private final CircuitBreakerService breakerService;
public static class Builder implements IndexFieldData.Builder {
@Override
public IndexFieldData<PagedBytesAtomicFieldData> build(Index index, @IndexSettings Settings indexSettings, FieldMapper<?> mapper,
IndexFieldDataCache cache, CircuitBreakerService breakerService) {
return new PagedBytesIndexFieldData(index, indexSettings, mapper.names(), mapper.fieldDataType(), cache, breakerService);
}
}
public PagedBytesIndexFieldData(Index index, @IndexSettings Settings indexSettings, FieldMapper.Names fieldNames,
FieldDataType fieldDataType, IndexFieldDataCache cache, CircuitBreakerService breakerService) {
super(index, indexSettings, fieldNames, fieldDataType, cache);
this.breakerService = breakerService;
}
@Override
public PagedBytesAtomicFieldData loadDirect(AtomicReaderContext context) throws Exception {
AtomicReader reader = context.reader();
PagedBytesEstimator estimator = new PagedBytesEstimator(context, breakerService.getBreaker());
Terms terms = reader.terms(getFieldNames().indexName());
if (terms == null) {
PagedBytesAtomicFieldData emptyData = PagedBytesAtomicFieldData.empty(reader.maxDoc());
estimator.adjustForNoTerms(emptyData.getMemorySizeInBytes());
return emptyData;
}
final PagedBytes bytes = new PagedBytes(15);
final MonotonicAppendingLongBuffer termOrdToBytesOffset = new MonotonicAppendingLongBuffer();
termOrdToBytesOffset.add(0); // first ord is reserved for missing values
final long numTerms;
if (regex == null && frequency == null) {
numTerms = terms.size();
} else {
numTerms = -1;
}
final float acceptableTransientOverheadRatio = fieldDataType.getSettings().getAsFloat(
FilterSettingFields.ACCEPTABLE_TRANSIENT_OVERHEAD_RATIO, OrdinalsBuilder.DEFAULT_ACCEPTABLE_OVERHEAD_RATIO);
OrdinalsBuilder builder = new OrdinalsBuilder(numTerms, reader.maxDoc(), acceptableTransientOverheadRatio);
// Wrap the context in an estimator and use it to either estimate
// the entire set, or wrap the TermsEnum so it can be calculated
// per-term
PagedBytesAtomicFieldData data = null;
TermsEnum termsEnum = estimator.beforeLoad(terms);
boolean success = false;
try {
// 0 is reserved for "unset"
bytes.copyUsingLengthPrefix(new BytesRef());
DocsEnum docsEnum = null;
for (BytesRef term = termsEnum.next(); term != null; term = termsEnum.next()) {
final long termOrd = builder.nextOrdinal();
assert termOrd == termOrdToBytesOffset.size();
termOrdToBytesOffset.add(bytes.copyUsingLengthPrefix(term));
docsEnum = termsEnum.docs(null, docsEnum, DocsEnum.FLAG_NONE);
for (int docId = docsEnum.nextDoc(); docId != DocsEnum.NO_MORE_DOCS; docId = docsEnum.nextDoc()) {
builder.addDoc(docId);
}
}
final long sizePointer = bytes.getPointer();
PagedBytes.Reader bytesReader = bytes.freeze(true);
final Ordinals ordinals = builder.build(fieldDataType.getSettings());
data = new PagedBytesAtomicFieldData(bytesReader, sizePointer, termOrdToBytesOffset, ordinals);
success = true;
return data;
} finally {
if (!success) {
// If something went wrong, unwind any current estimations we've made
estimator.afterLoad(termsEnum, 0);
} else {
// Call .afterLoad() to adjust the breaker now that we have an exact size
estimator.afterLoad(termsEnum, data.getMemorySizeInBytes());
}
builder.close();
}
}
/**
* Estimator that wraps string field data by either using
* BlockTreeTermsReader, or wrapping the data in a RamAccountingTermsEnum
* if the BlockTreeTermsReader cannot be used.
*/
public class PagedBytesEstimator implements PerValueEstimator {
private final AtomicReaderContext context;
private final MemoryCircuitBreaker breaker;
private long estimatedBytes;
PagedBytesEstimator(AtomicReaderContext context, MemoryCircuitBreaker breaker) {
this.breaker = breaker;
this.context = context;
}
/**
* @return the number of bytes for the term based on the length and ordinal overhead
*/
public long bytesPerValue(BytesRef term) {
long bytes = term.length;
// 64 bytes for miscellaneous overhead
bytes += 64;
// Seems to be about a 1.5x compression per term/ord, plus 1 for some wiggle room
bytes = (long) ((double) bytes / 1.5) + 1;
return bytes;
}
/**
* @return the estimate for loading the entire term set into field data, or 0 if unavailable
*/
public long estimateStringFieldData() {
try {
AtomicReader reader = context.reader();
Terms terms = reader.terms(getFieldNames().indexName());
Fields fields = reader.fields();
final Terms fieldTerms = fields.terms(getFieldNames().indexName());
if (fieldTerms instanceof BlockTreeTermsReader.FieldReader) {
final BlockTreeTermsReader.Stats stats = ((BlockTreeTermsReader.FieldReader) fieldTerms).computeStats();
long totalTermBytes = stats.totalTermBytes;
if (logger.isTraceEnabled()) {
logger.trace("totalTermBytes: {}, terms.size(): {}, terms.getSumDocFreq(): {}",
totalTermBytes, terms.size(), terms.getSumDocFreq());
}
long totalBytes = totalTermBytes + (2 * terms.size()) + (4 * terms.getSumDocFreq());
return totalBytes;
}
} catch (Exception e) {
logger.warn("Unable to estimate memory overhead", e);
}
return 0;
}
/**
* Determine whether the BlockTreeTermsReader.FieldReader can be used
* for estimating the field data, adding the estimate to the circuit
* breaker if it can, otherwise wrapping the terms in a
* RamAccountingTermsEnum to be estimated on a per-term basis.
*
* @param terms terms to be estimated
* @return A possibly wrapped TermsEnum for the terms
* @throws IOException
*/
public TermsEnum beforeLoad(Terms terms) throws IOException {
final float acceptableTransientOverheadRatio = fieldDataType.getSettings().getAsFloat(
FilterSettingFields.ACCEPTABLE_TRANSIENT_OVERHEAD_RATIO,
OrdinalsBuilder.DEFAULT_ACCEPTABLE_OVERHEAD_RATIO);
AtomicReader reader = context.reader();
// Check if one of the following is present:
// - The OrdinalsBuilder overhead has been tweaked away from the default
// - A field data filter is present
// - A regex filter is present
if (acceptableTransientOverheadRatio != OrdinalsBuilder.DEFAULT_ACCEPTABLE_OVERHEAD_RATIO ||
fieldDataType.getSettings().getAsDouble(FilterSettingFields.FREQUENCY_MIN, 0d) != 0d ||
fieldDataType.getSettings().getAsDouble(FilterSettingFields.FREQUENCY_MAX, 0d) != 0d ||
fieldDataType.getSettings().getAsDouble(FilterSettingFields.FREQUENCY_MIN_SEGMENT_SIZE, 0d) != 0d ||
fieldDataType.getSettings().get(FilterSettingFields.REGEX_PATTERN) != null) {
if (logger.isTraceEnabled()) {
logger.trace("Filter exists, can't circuit break normally, using RamAccountingTermsEnum");
}
return new RamAccountingTermsEnum(filter(terms, reader), breaker, this);
} else {
estimatedBytes = this.estimateStringFieldData();
// If we weren't able to estimate, wrap in the RamAccountingTermsEnum
if (estimatedBytes == 0) {
return new RamAccountingTermsEnum(filter(terms, reader), breaker, this);
}
breaker.addEstimateBytesAndMaybeBreak(estimatedBytes);
return filter(terms, reader);
}
}
/**
* Adjust the circuit breaker now that terms have been loaded, getting
* the actual used either from the parameter (if estimation worked for
* the entire set), or from the TermsEnum if it has been wrapped in a
* RamAccountingTermsEnum.
*
* @param termsEnum terms that were loaded
* @param actualUsed actual field data memory usage
*/
public void afterLoad(TermsEnum termsEnum, long actualUsed) {
if (termsEnum instanceof RamAccountingTermsEnum) {
estimatedBytes = ((RamAccountingTermsEnum) termsEnum).getTotalBytes();
}
breaker.addWithoutBreaking(-(estimatedBytes - actualUsed));
}
/**
* Adjust the breaker when no terms were actually loaded, but the field
* data takes up space regardless. For instance, when ordinals are
* used.
* @param actualUsed bytes actually used
*/
public void adjustForNoTerms(long actualUsed) {
breaker.addWithoutBreaking(actualUsed);
}
}
static final class FilterSettingFields {
static final String ACCEPTABLE_TRANSIENT_OVERHEAD_RATIO = "acceptable_transient_overhead_ratio";
static final String FREQUENCY_MIN = "filter.frequency.min";
static final String FREQUENCY_MAX = "filter.frequency.max";
static final String FREQUENCY_MIN_SEGMENT_SIZE = "filter.frequency.min_segment_size";
static final String REGEX_PATTERN = "filter.regex.pattern";
}
} | 1no label
| src_main_java_org_elasticsearch_index_fielddata_plain_PagedBytesIndexFieldData.java |
1,969 | @Entity
@Inheritance(strategy = InheritanceType.JOINED)
@Table(name = "BLC_COUNTRY")
@Cache(usage = CacheConcurrencyStrategy.READ_WRITE, region="blStandardElements")
@AdminPresentationClass(friendlyName = "CountryImpl_baseCountry")
public class CountryImpl implements Country {
private static final long serialVersionUID = 1L;
@Id
@Column(name = "ABBREVIATION")
protected String abbreviation;
@Column(name = "NAME", nullable=false)
@AdminPresentation(friendlyName = "CountryImpl_Country", order=12, group = "CountryImpl_Address", prominent = true)
protected String name;
public String getAbbreviation() {
return abbreviation;
}
public void setAbbreviation(String Abbreviation) {
this.abbreviation = Abbreviation;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
CountryImpl other = (CountryImpl) obj;
if (abbreviation == null) {
if (other.abbreviation != null)
return false;
} else if (!abbreviation.equals(other.abbreviation))
return false;
if (name == null) {
if (other.name != null)
return false;
} else if (!name.equals(other.name))
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((abbreviation == null) ? 0 : abbreviation.hashCode());
result = prime * result + ((name == null) ? 0 : name.hashCode());
return result;
}
} | 1no label
| core_broadleaf-profile_src_main_java_org_broadleafcommerce_profile_core_domain_CountryImpl.java |
1,364 | public class OStorageMemory extends OStorageEmbedded {
private final List<ODataSegmentMemory> dataSegments = new ArrayList<ODataSegmentMemory>();
private final List<OClusterMemory> clusters = new ArrayList<OClusterMemory>();
private final Map<String, OClusterMemory> clusterMap = new HashMap<String, OClusterMemory>();
private int defaultClusterId = 0;
private long positionGenerator = 0;
public OStorageMemory(final String iURL) {
super(iURL, iURL, "rw");
configuration = new OStorageConfiguration(this);
}
public void create(final Map<String, Object> iOptions) {
addUser();
lock.acquireExclusiveLock();
try {
addDataSegment(OStorage.DATA_DEFAULT_NAME);
addDataSegment(OMetadataDefault.DATASEGMENT_INDEX_NAME);
// ADD THE METADATA CLUSTER TO STORE INTERNAL STUFF
addCluster(CLUSTER_TYPE.PHYSICAL.toString(), OMetadataDefault.CLUSTER_INTERNAL_NAME, null, null, true);
// ADD THE INDEX CLUSTER TO STORE, BY DEFAULT, ALL THE RECORDS OF INDEXING IN THE INDEX DATA SEGMENT
addCluster(CLUSTER_TYPE.PHYSICAL.toString(), OMetadataDefault.CLUSTER_INDEX_NAME, null,
OMetadataDefault.DATASEGMENT_INDEX_NAME, true);
// ADD THE INDEX CLUSTER TO STORE, BY DEFAULT, ALL THE RECORDS OF INDEXING
addCluster(CLUSTER_TYPE.PHYSICAL.toString(), OMetadataDefault.CLUSTER_MANUAL_INDEX_NAME, null, null, true);
// ADD THE DEFAULT CLUSTER
defaultClusterId = addCluster(CLUSTER_TYPE.PHYSICAL.toString(), CLUSTER_DEFAULT_NAME, null, null, false);
configuration.create();
status = STATUS.OPEN;
} catch (OStorageException e) {
close();
throw e;
} catch (IOException e) {
close();
throw new OStorageException("Error on creation of storage: " + name, e);
} finally {
lock.releaseExclusiveLock();
}
}
public void open(final String iUserName, final String iUserPassword, final Map<String, Object> iOptions) {
addUser();
if (status == STATUS.OPEN)
// ALREADY OPENED: THIS IS THE CASE WHEN A STORAGE INSTANCE IS
// REUSED
return;
lock.acquireExclusiveLock();
try {
if (!exists())
throw new OStorageException("Cannot open the storage '" + name + "' because it does not exist in path: " + url);
status = STATUS.OPEN;
} finally {
lock.releaseExclusiveLock();
}
}
public void close(final boolean iForce) {
lock.acquireExclusiveLock();
try {
if (!checkForClose(iForce))
return;
status = STATUS.CLOSING;
// CLOSE ALL THE CLUSTERS
for (OClusterMemory c : clusters)
if (c != null)
c.close();
clusters.clear();
clusterMap.clear();
// CLOSE THE DATA SEGMENTS
for (ODataSegmentMemory d : dataSegments)
if (d != null)
d.close();
dataSegments.clear();
level2Cache.shutdown();
super.close(iForce);
Orient.instance().unregisterStorage(this);
status = STATUS.CLOSED;
} finally {
lock.releaseExclusiveLock();
}
}
public void delete() {
close(true);
}
@Override
public void backup(OutputStream out, Map<String, Object> options, Callable<Object> callable) throws IOException {
throw new UnsupportedOperationException("backup");
}
@Override
public void restore(InputStream in, Map<String, Object> options, Callable<Object> callable) throws IOException {
throw new UnsupportedOperationException("restore");
}
public void reload() {
}
public int addCluster(final String iClusterType, String iClusterName, final String iLocation, final String iDataSegmentName,
boolean forceListBased, final Object... iParameters) {
iClusterName = iClusterName.toLowerCase();
lock.acquireExclusiveLock();
try {
int clusterId = clusters.size();
for (int i = 0; i < clusters.size(); ++i) {
if (clusters.get(i) == null) {
clusterId = i;
break;
}
}
final OClusterMemory cluster = (OClusterMemory) Orient.instance().getClusterFactory().createCluster(OClusterMemory.TYPE);
cluster.configure(this, clusterId, iClusterName, iLocation, getDataSegmentIdByName(iDataSegmentName), iParameters);
if (clusterId == clusters.size())
// APPEND IT
clusters.add(cluster);
else
// RECYCLE THE FREE POSITION
clusters.set(clusterId, cluster);
clusterMap.put(iClusterName, cluster);
return clusterId;
} finally {
lock.releaseExclusiveLock();
}
}
public int addCluster(String iClusterType, String iClusterName, int iRequestedId, String iLocation, String iDataSegmentName,
boolean forceListBased, Object... iParameters) {
throw new UnsupportedOperationException("This operation is unsupported for " + getType()
+ " storage. If you are doing import please use parameter -preserveClusterIDs=false .");
}
public boolean dropCluster(final int iClusterId, final boolean iTruncate) {
lock.acquireExclusiveLock();
try {
final OCluster c = clusters.get(iClusterId);
if (c != null) {
if (iTruncate)
c.truncate();
c.delete();
clusters.set(iClusterId, null);
getLevel2Cache().freeCluster(iClusterId);
clusterMap.remove(c.getName());
}
} catch (IOException e) {
} finally {
lock.releaseExclusiveLock();
}
return false;
}
public boolean dropDataSegment(final String iName) {
lock.acquireExclusiveLock();
try {
final int id = getDataSegmentIdByName(iName);
final ODataSegment data = dataSegments.get(id);
if (data == null)
return false;
data.drop();
dataSegments.set(id, null);
// UPDATE CONFIGURATION
configuration.dropCluster(id);
return true;
} catch (Exception e) {
OLogManager.instance().exception("Error while removing data segment '" + iName + '\'', e, OStorageException.class);
} finally {
lock.releaseExclusiveLock();
}
return false;
}
public int addDataSegment(final String iDataSegmentName) {
lock.acquireExclusiveLock();
try {
int pos = -1;
for (int i = 0; i < dataSegments.size(); ++i) {
if (dataSegments.get(i) == null) {
pos = i;
break;
}
}
if (pos == -1)
pos = dataSegments.size();
final ODataSegmentMemory dataSegment = new ODataSegmentMemory(iDataSegmentName, pos);
if (pos == dataSegments.size())
dataSegments.add(dataSegment);
else
dataSegments.set(pos, dataSegment);
return pos;
} finally {
lock.releaseExclusiveLock();
}
}
public int addDataSegment(final String iSegmentName, final String iLocation) {
return addDataSegment(iSegmentName);
}
public OStorageOperationResult<OPhysicalPosition> createRecord(final int iDataSegmentId, final ORecordId iRid,
final byte[] iContent, ORecordVersion iRecordVersion, final byte iRecordType, final int iMode,
ORecordCallback<OClusterPosition> iCallback) {
final long timer = Orient.instance().getProfiler().startChrono();
lock.acquireSharedLock();
try {
final ODataSegmentMemory data = getDataSegmentById(iDataSegmentId);
final long offset = data.createRecord(iContent);
final OCluster cluster = getClusterById(iRid.clusterId);
// ASSIGN THE POSITION IN THE CLUSTER
final OPhysicalPosition ppos = new OPhysicalPosition(iDataSegmentId, offset, iRecordType);
if (cluster.isHashBased()) {
if (iRid.isNew()) {
if (OGlobalConfiguration.USE_NODE_ID_CLUSTER_POSITION.getValueAsBoolean()) {
ppos.clusterPosition = OClusterPositionFactory.INSTANCE.generateUniqueClusterPosition();
} else {
ppos.clusterPosition = OClusterPositionFactory.INSTANCE.valueOf(positionGenerator++);
}
} else {
ppos.clusterPosition = iRid.clusterPosition;
}
}
if (!cluster.addPhysicalPosition(ppos)) {
data.readRecord(ppos.dataSegmentPos);
throw new OStorageException("Record with given id " + iRid + " has already exists.");
}
iRid.clusterPosition = ppos.clusterPosition;
if (iCallback != null)
iCallback.call(iRid, iRid.clusterPosition);
if (iRecordVersion.getCounter() > 0 && iRecordVersion.compareTo(ppos.recordVersion) != 0) {
// OVERWRITE THE VERSION
cluster.updateVersion(iRid.clusterPosition, iRecordVersion);
ppos.recordVersion = iRecordVersion;
}
return new OStorageOperationResult<OPhysicalPosition>(ppos);
} catch (IOException e) {
throw new OStorageException("Error on create record in cluster: " + iRid.clusterId, e);
} finally {
lock.releaseSharedLock();
Orient.instance().getProfiler()
.stopChrono(PROFILER_CREATE_RECORD, "Create a record in database", timer, "db.*.data.updateHole");
}
}
public OStorageOperationResult<ORawBuffer> readRecord(final ORecordId iRid, String iFetchPlan, boolean iIgnoreCache,
ORecordCallback<ORawBuffer> iCallback, boolean loadTombstones) {
return new OStorageOperationResult<ORawBuffer>(readRecord(getClusterById(iRid.clusterId), iRid, true, loadTombstones));
}
@Override
protected ORawBuffer readRecord(final OCluster iClusterSegment, final ORecordId iRid, final boolean iAtomicLock,
boolean loadTombstones) {
final long timer = Orient.instance().getProfiler().startChrono();
lock.acquireSharedLock();
try {
lockManager.acquireLock(Thread.currentThread(), iRid, LOCK.SHARED);
try {
final OClusterPosition lastPos = iClusterSegment.getLastPosition();
if (!iClusterSegment.isHashBased()) {
if (iRid.clusterPosition.compareTo(lastPos) > 0)
return null;
}
final OPhysicalPosition ppos = iClusterSegment.getPhysicalPosition(new OPhysicalPosition(iRid.clusterPosition));
if (ppos != null && loadTombstones && ppos.recordVersion.isTombstone())
return new ORawBuffer(null, ppos.recordVersion, ppos.recordType);
if (ppos == null || ppos.recordVersion.isTombstone())
return null;
final ODataSegmentMemory dataSegment = getDataSegmentById(ppos.dataSegmentId);
return new ORawBuffer(dataSegment.readRecord(ppos.dataSegmentPos), ppos.recordVersion, ppos.recordType);
} finally {
lockManager.releaseLock(Thread.currentThread(), iRid, LOCK.SHARED);
}
} catch (IOException e) {
throw new OStorageException("Error on read record in cluster: " + iClusterSegment.getId(), e);
} finally {
lock.releaseSharedLock();
Orient.instance().getProfiler().stopChrono(PROFILER_READ_RECORD, "Read a record from database", timer, "db.*.readRecord");
}
}
public OStorageOperationResult<ORecordVersion> updateRecord(final ORecordId iRid, final byte[] iContent,
final ORecordVersion iVersion, final byte iRecordType, final int iMode, ORecordCallback<ORecordVersion> iCallback) {
final long timer = Orient.instance().getProfiler().startChrono();
final OCluster cluster = getClusterById(iRid.clusterId);
lock.acquireSharedLock();
try {
lockManager.acquireLock(Thread.currentThread(), iRid, LOCK.EXCLUSIVE);
try {
final OPhysicalPosition ppos = cluster.getPhysicalPosition(new OPhysicalPosition(iRid.clusterPosition));
if (ppos == null || ppos.recordVersion.isTombstone()) {
final ORecordVersion v = OVersionFactory.instance().createUntrackedVersion();
if (iCallback != null) {
iCallback.call(iRid, v);
}
return new OStorageOperationResult<ORecordVersion>(v);
}
// VERSION CONTROL CHECK
switch (iVersion.getCounter()) {
// DOCUMENT UPDATE, NO VERSION CONTROL
case -1:
ppos.recordVersion.increment();
cluster.updateVersion(iRid.clusterPosition, ppos.recordVersion);
break;
// DOCUMENT UPDATE, NO VERSION CONTROL, NO VERSION UPDATE
case -2:
break;
default:
// MVCC CONTROL AND RECORD UPDATE OR WRONG VERSION VALUE
if (iVersion.getCounter() > -1) {
// MVCC TRANSACTION: CHECK IF VERSION IS THE SAME
if (!iVersion.equals(ppos.recordVersion))
if (OFastConcurrentModificationException.enabled())
throw OFastConcurrentModificationException.instance();
else
throw new OConcurrentModificationException(iRid, ppos.recordVersion, iVersion, ORecordOperation.UPDATED);
ppos.recordVersion.increment();
cluster.updateVersion(iRid.clusterPosition, ppos.recordVersion);
} else {
// DOCUMENT ROLLBACKED
iVersion.clearRollbackMode();
ppos.recordVersion.copyFrom(iVersion);
cluster.updateVersion(iRid.clusterPosition, ppos.recordVersion);
}
}
if (ppos.recordType != iRecordType)
cluster.updateRecordType(iRid.clusterPosition, iRecordType);
final ODataSegmentMemory dataSegment = getDataSegmentById(ppos.dataSegmentId);
dataSegment.updateRecord(ppos.dataSegmentPos, iContent);
if (iCallback != null)
iCallback.call(null, ppos.recordVersion);
return new OStorageOperationResult<ORecordVersion>(ppos.recordVersion);
} finally {
lockManager.releaseLock(Thread.currentThread(), iRid, LOCK.EXCLUSIVE);
}
} catch (IOException e) {
throw new OStorageException("Error on update record " + iRid, e);
} finally {
lock.releaseSharedLock();
Orient.instance().getProfiler().stopChrono(PROFILER_UPDATE_RECORD, "Update a record to database", timer, "db.*.updateRecord");
}
}
@Override
public boolean updateReplica(int dataSegmentId, ORecordId rid, byte[] content, ORecordVersion recordVersion, byte recordType)
throws IOException {
if (rid.isNew())
throw new OStorageException("Passed record with id " + rid + " is new and can not be treated as replica.");
checkOpeness();
final OCluster cluster = getClusterById(rid.clusterId);
final ODataSegmentMemory data = getDataSegmentById(dataSegmentId);
lock.acquireSharedLock();
try {
lockManager.acquireLock(Thread.currentThread(), rid, LOCK.EXCLUSIVE);
try {
OPhysicalPosition ppos = cluster.getPhysicalPosition(new OPhysicalPosition(rid.clusterPosition));
if (ppos == null) {
if (!cluster.isHashBased())
throw new OStorageException("Cluster with LH support is required.");
ppos = new OPhysicalPosition(rid.clusterPosition, recordVersion);
ppos.recordType = recordType;
ppos.dataSegmentId = data.getId();
if (!recordVersion.isTombstone()) {
ppos.dataSegmentPos = data.createRecord(content);
}
cluster.addPhysicalPosition(ppos);
return true;
} else {
if (ppos.recordType != recordType)
throw new OStorageException("Record types of provided and stored replicas are different " + recordType + ":"
+ ppos.recordType + ".");
if (ppos.recordVersion.compareTo(recordVersion) < 0) {
if (!recordVersion.isTombstone() && !ppos.recordVersion.isTombstone()) {
data.updateRecord(ppos.dataSegmentPos, content);
} else if (recordVersion.isTombstone() && !ppos.recordVersion.isTombstone()) {
data.deleteRecord(ppos.dataSegmentPos);
} else if (!recordVersion.isTombstone() && ppos.recordVersion.isTombstone()) {
ppos.dataSegmentPos = data.createRecord(content);
cluster.updateDataSegmentPosition(ppos.clusterPosition, dataSegmentId, ppos.dataSegmentPos);
}
cluster.updateVersion(ppos.clusterPosition, recordVersion);
return true;
}
}
} finally {
lockManager.releaseLock(Thread.currentThread(), rid, LOCK.EXCLUSIVE);
}
} finally {
lock.releaseSharedLock();
}
return false;
}
@Override
public <V> V callInRecordLock(Callable<V> callable, ORID rid, boolean exclusiveLock) {
lock.acquireSharedLock();
try {
lockManager.acquireLock(Thread.currentThread(), rid, exclusiveLock ? LOCK.EXCLUSIVE : LOCK.SHARED);
try {
return callable.call();
} finally {
lockManager.releaseLock(Thread.currentThread(), rid, exclusiveLock ? LOCK.EXCLUSIVE : LOCK.SHARED);
}
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new OException("Error on nested call in lock", e);
} finally {
lock.releaseSharedLock();
}
}
@Override
public OStorageOperationResult<Boolean> deleteRecord(final ORecordId iRid, final ORecordVersion iVersion, final int iMode,
ORecordCallback<Boolean> iCallback) {
return new OStorageOperationResult<Boolean>(deleteRecord(iRid, iVersion,
OGlobalConfiguration.STORAGE_USE_TOMBSTONES.getValueAsBoolean(), iCallback));
}
@Override
public boolean cleanOutRecord(ORecordId recordId, ORecordVersion recordVersion, int iMode, ORecordCallback<Boolean> callback) {
return deleteRecord(recordId, recordVersion, false, callback);
}
private boolean deleteRecord(ORecordId iRid, ORecordVersion iVersion, boolean useTombstones, ORecordCallback<Boolean> iCallback) {
final long timer = Orient.instance().getProfiler().startChrono();
final OCluster cluster = getClusterById(iRid.clusterId);
lock.acquireSharedLock();
try {
lockManager.acquireLock(Thread.currentThread(), iRid, LOCK.EXCLUSIVE);
try {
final OPhysicalPosition ppos = cluster.getPhysicalPosition(new OPhysicalPosition(iRid.clusterPosition));
if (ppos == null || (ppos.recordVersion.isTombstone() && useTombstones)) {
if (iCallback != null)
iCallback.call(iRid, false);
return false;
}
// MVCC TRANSACTION: CHECK IF VERSION IS THE SAME
if (iVersion.getCounter() > -1 && !ppos.recordVersion.equals(iVersion))
if (OFastConcurrentModificationException.enabled())
throw OFastConcurrentModificationException.instance();
else
throw new OConcurrentModificationException(iRid, ppos.recordVersion, iVersion, ORecordOperation.DELETED);
if (!ppos.recordVersion.isTombstone()) {
final ODataSegmentMemory dataSegment = getDataSegmentById(ppos.dataSegmentId);
dataSegment.deleteRecord(ppos.dataSegmentPos);
ppos.dataSegmentPos = -1;
}
if (useTombstones && cluster.hasTombstonesSupport())
cluster.convertToTombstone(iRid.clusterPosition);
else
cluster.removePhysicalPosition(iRid.clusterPosition);
if (iCallback != null)
iCallback.call(null, true);
return true;
} finally {
lockManager.releaseLock(Thread.currentThread(), iRid, LOCK.EXCLUSIVE);
}
} catch (IOException e) {
throw new OStorageException("Error on delete record " + iRid, e);
} finally {
lock.releaseSharedLock();
Orient.instance().getProfiler()
.stopChrono(PROFILER_DELETE_RECORD, "Delete a record from database", timer, "db.*.deleteRecord");
}
}
public long count(final int iClusterId) {
return count(iClusterId, false);
}
@Override
public long count(int iClusterId, boolean countTombstones) {
final OCluster cluster = getClusterById(iClusterId);
lock.acquireSharedLock();
try {
return cluster.getEntries() - (countTombstones ? 0L : cluster.getTombstonesCount());
} finally {
lock.releaseSharedLock();
}
}
public OClusterPosition[] getClusterDataRange(final int iClusterId) {
final OCluster cluster = getClusterById(iClusterId);
lock.acquireSharedLock();
try {
return new OClusterPosition[] { cluster.getFirstPosition(), cluster.getLastPosition() };
} catch (IOException ioe) {
throw new OStorageException("Can not retrieve information about data range", ioe);
} finally {
lock.releaseSharedLock();
}
}
public long count(final int[] iClusterIds) {
return count(iClusterIds, false);
}
@Override
public long count(int[] iClusterIds, boolean countTombstones) {
lock.acquireSharedLock();
try {
long tot = 0;
for (int iClusterId : iClusterIds) {
if (iClusterId > -1) {
final OCluster cluster = clusters.get(iClusterId);
if (cluster != null)
tot += cluster.getEntries() - (countTombstones ? 0L : cluster.getTombstonesCount());
}
}
return tot;
} finally {
lock.releaseSharedLock();
}
}
public OCluster getClusterByName(final String iClusterName) {
lock.acquireSharedLock();
try {
return clusterMap.get(iClusterName.toLowerCase());
} finally {
lock.releaseSharedLock();
}
}
public int getClusterIdByName(String iClusterName) {
iClusterName = iClusterName.toLowerCase();
lock.acquireSharedLock();
try {
final OCluster cluster = clusterMap.get(iClusterName.toLowerCase());
if (cluster == null)
return -1;
return cluster.getId();
} finally {
lock.releaseSharedLock();
}
}
public String getClusterTypeByName(final String iClusterName) {
return OClusterMemory.TYPE;
}
public String getPhysicalClusterNameById(final int iClusterId) {
lock.acquireSharedLock();
try {
for (OClusterMemory cluster : clusters) {
if (cluster != null && cluster.getId() == iClusterId)
return cluster.getName();
}
return null;
} finally {
lock.releaseSharedLock();
}
}
public Set<String> getClusterNames() {
lock.acquireSharedLock();
try {
return new HashSet<String>(clusterMap.keySet());
} finally {
lock.releaseSharedLock();
}
}
public void commit(final OTransaction iTx, Runnable callback) {
lock.acquireExclusiveLock();
try {
final List<ORecordOperation> tmpEntries = new ArrayList<ORecordOperation>();
while (iTx.getCurrentRecordEntries().iterator().hasNext()) {
for (ORecordOperation txEntry : iTx.getCurrentRecordEntries())
tmpEntries.add(txEntry);
iTx.clearRecordEntries();
for (ORecordOperation txEntry : tmpEntries)
// COMMIT ALL THE SINGLE ENTRIES ONE BY ONE
commitEntry(iTx, txEntry);
tmpEntries.clear();
}
// UPDATE THE CACHE ONLY IF THE ITERATOR ALLOWS IT
OTransactionAbstract.updateCacheFromEntries(iTx, iTx.getAllRecordEntries(), true);
} catch (IOException e) {
rollback(iTx);
} finally {
lock.releaseExclusiveLock();
}
}
public void rollback(final OTransaction iTx) {
}
public void synch() {
}
public boolean exists() {
lock.acquireSharedLock();
try {
return !clusters.isEmpty();
} finally {
lock.releaseSharedLock();
}
}
public ODataSegmentMemory getDataSegmentById(int iDataId) {
lock.acquireSharedLock();
try {
if (iDataId < 0 || iDataId > dataSegments.size() - 1)
throw new IllegalArgumentException("Invalid data segment id " + iDataId + ". Range is 0-" + (dataSegments.size() - 1));
return dataSegments.get(iDataId);
} finally {
lock.releaseSharedLock();
}
}
public int getDataSegmentIdByName(final String iDataSegmentName) {
if (iDataSegmentName == null)
return 0;
lock.acquireSharedLock();
try {
for (ODataSegmentMemory d : dataSegments)
if (d != null && d.getName().equalsIgnoreCase(iDataSegmentName))
return d.getId();
throw new IllegalArgumentException("Data segment '" + iDataSegmentName + "' does not exist in storage '" + name + "'");
} finally {
lock.releaseSharedLock();
}
}
public OCluster getClusterById(int iClusterId) {
lock.acquireSharedLock();
try {
if (iClusterId == ORID.CLUSTER_ID_INVALID)
// GET THE DEFAULT CLUSTER
iClusterId = defaultClusterId;
checkClusterSegmentIndexRange(iClusterId);
return clusters.get(iClusterId);
} finally {
lock.releaseSharedLock();
}
}
public int getClusters() {
lock.acquireSharedLock();
try {
return clusterMap.size();
} finally {
lock.releaseSharedLock();
}
}
public Collection<? extends OCluster> getClusterInstances() {
lock.acquireSharedLock();
try {
return Collections.unmodifiableCollection(clusters);
} finally {
lock.releaseSharedLock();
}
}
public int getDefaultClusterId() {
return defaultClusterId;
}
public long getSize() {
long size = 0;
lock.acquireSharedLock();
try {
for (ODataSegmentMemory d : dataSegments)
if (d != null)
size += d.getSize();
} finally {
lock.releaseSharedLock();
}
return size;
}
@Override
public boolean checkForRecordValidity(final OPhysicalPosition ppos) {
if (ppos.dataSegmentId > 0)
return false;
lock.acquireSharedLock();
try {
final ODataSegmentMemory dataSegment = getDataSegmentById(ppos.dataSegmentId);
if (ppos.dataSegmentPos >= dataSegment.count())
return false;
} finally {
lock.releaseSharedLock();
}
return true;
}
private void commitEntry(final OTransaction iTx, final ORecordOperation txEntry) throws IOException {
final ORecordId rid = (ORecordId) txEntry.getRecord().getIdentity();
final OCluster cluster = getClusterById(rid.clusterId);
rid.clusterId = cluster.getId();
if (txEntry.getRecord() instanceof OTxListener)
((OTxListener) txEntry.getRecord()).onEvent(txEntry, OTxListener.EVENT.BEFORE_COMMIT);
switch (txEntry.type) {
case ORecordOperation.LOADED:
break;
case ORecordOperation.CREATED:
if (rid.isNew()) {
// CHECK 2 TIMES TO ASSURE THAT IT'S A CREATE OR AN UPDATE BASED ON RECURSIVE TO-STREAM METHOD
final byte[] stream = txEntry.getRecord().toStream();
if (stream == null) {
OLogManager.instance().warn(this, "Null serialization on committing new record %s in transaction", rid);
break;
}
if (rid.isNew()) {
final ORecordId oldRID = rid.copy();
final OPhysicalPosition ppos = createRecord(txEntry.dataSegmentId, rid, stream,
OVersionFactory.instance().createVersion(), txEntry.getRecord().getRecordType(), 0, null).getResult();
txEntry.getRecord().getRecordVersion().copyFrom(ppos.recordVersion);
iTx.updateIdentityAfterCommit(oldRID, rid);
} else {
txEntry
.getRecord()
.getRecordVersion()
.copyFrom(
updateRecord(rid, stream, txEntry.getRecord().getRecordVersion(), txEntry.getRecord().getRecordType(), 0, null)
.getResult());
}
}
break;
case ORecordOperation.UPDATED:
final byte[] stream = txEntry.getRecord().toStream();
if (stream == null) {
OLogManager.instance().warn(this, "Null serialization on committing updated record %s in transaction", rid);
break;
}
txEntry
.getRecord()
.getRecordVersion()
.copyFrom(
updateRecord(rid, stream, txEntry.getRecord().getRecordVersion(), txEntry.getRecord().getRecordType(), 0, null)
.getResult());
break;
case ORecordOperation.DELETED:
deleteRecord(rid, txEntry.getRecord().getRecordVersion(), 0, null);
break;
}
txEntry.getRecord().unsetDirty();
if (txEntry.getRecord() instanceof OTxListener)
((OTxListener) txEntry.getRecord()).onEvent(txEntry, OTxListener.EVENT.AFTER_COMMIT);
}
@Override
public String getURL() {
return OEngineMemory.NAME + ":" + url;
}
public OStorageConfigurationSegment getConfigurationSegment() {
return null;
}
public void renameCluster(final String iOldName, final String iNewName) {
final OClusterMemory cluster = (OClusterMemory) getClusterByName(iOldName);
if (cluster != null)
try {
cluster.set(com.orientechnologies.orient.core.storage.OCluster.ATTRIBUTES.NAME, iNewName);
} catch (IOException e) {
}
}
public void setDefaultClusterId(int defaultClusterId) {
this.defaultClusterId = defaultClusterId;
}
@Override
public String getType() {
return OEngineMemory.NAME;
}
private void checkClusterSegmentIndexRange(final int iClusterId) {
if (iClusterId > clusters.size() - 1)
throw new IllegalArgumentException("Cluster segment #" + iClusterId + " does not exist in database '" + name + "'");
}
} | 1no label
| core_src_main_java_com_orientechnologies_orient_core_storage_impl_memory_OStorageMemory.java |
1,690 | public class OSyncCommandResultListener extends OAbstractCommandResultListener {
@Override
public boolean result(final Object iRecord) {
fetchRecord(iRecord);
return true;
}
public boolean isEmpty() {
return false;
}
} | 1no label
| server_src_main_java_com_orientechnologies_orient_server_network_protocol_binary_OSyncCommandResultListener.java |
904 | @SuppressWarnings({ "unchecked" })
public class ODocument extends ORecordSchemaAwareAbstract<Object> implements Iterable<Entry<String, Object>>, ODetachable,
Externalizable {
private static final long serialVersionUID = 1L;
public static final byte RECORD_TYPE = 'd';
protected Map<String, Object> _fieldValues;
protected Map<String, Object> _fieldOriginalValues;
protected Map<String, OType> _fieldTypes;
protected Map<String, OSimpleMultiValueChangeListener<String, Object>> _fieldChangeListeners;
protected Map<String, OMultiValueChangeTimeLine<String, Object>> _fieldCollectionChangeTimeLines;
protected boolean _trackingChanges = true;
protected boolean _ordered = true;
protected boolean _lazyLoad = true;
protected boolean _allowChainedAccess = true;
protected transient List<WeakReference<ORecordElement>> _owners = null;
protected static final String[] EMPTY_STRINGS = new String[] {};
/**
* Internal constructor used on unmarshalling.
*/
public ODocument() {
setup();
}
/**
* Creates a new instance by the raw stream usually read from the database. New instances are not persistent until {@link #save()}
* is called.
*
* @param iSource
* Raw stream
*/
public ODocument(final byte[] iSource) {
_source = iSource;
setup();
}
/**
* Creates a new instance by the raw stream usually read from the database. New instances are not persistent until {@link #save()}
* is called.
*
* @param iSource
* Raw stream as InputStream
*/
public ODocument(final InputStream iSource) throws IOException {
final ByteArrayOutputStream out = new ByteArrayOutputStream();
OIOUtils.copyStream(iSource, out, -1);
_source = out.toByteArray();
setup();
}
/**
* Creates a new instance in memory linked by the Record Id to the persistent one. New instances are not persistent until
* {@link #save()} is called.
*
* @param iRID
* Record Id
*/
public ODocument(final ORID iRID) {
setup();
_recordId = (ORecordId) iRID;
_status = STATUS.NOT_LOADED;
_dirty = false;
}
/**
* Creates a new instance in memory of the specified class, linked by the Record Id to the persistent one. New instances are not
* persistent until {@link #save()} is called.
*
* @param iClassName
* Class name
* @param iRID
* Record Id
*/
public ODocument(final String iClassName, final ORID iRID) {
this(iClassName);
_recordId = (ORecordId) iRID;
_dirty = false;
_status = STATUS.NOT_LOADED;
}
/**
* Creates a new instance in memory of the specified class. New instances are not persistent until {@link #save()} is called.
*
* @param iClassName
* Class name
*/
public ODocument(final String iClassName) {
setClassName(iClassName);
setup();
}
/**
* Creates a new instance in memory of the specified schema class. New instances are not persistent until {@link #save()} is
* called. The database reference is taken from the thread local.
*
* @param iClass
* OClass instance
*/
public ODocument(final OClass iClass) {
setup();
_clazz = iClass;
}
/**
* Fills a document passing the field array in form of pairs of field name and value.
*
* @param iFields
* Array of field pairs
*/
public ODocument(final Object[] iFields) {
setup();
if (iFields != null && iFields.length > 0)
for (int i = 0; i < iFields.length; i += 2) {
field(iFields[i].toString(), iFields[i + 1]);
}
}
/**
* Fills a document passing a map of key/values where the key is the field name and the value the field's value.
*
* @param iFieldMap
* Map of Object/Object
*/
public ODocument(final Map<? extends Object, Object> iFieldMap) {
setup();
if (iFieldMap != null && !iFieldMap.isEmpty())
for (Entry<? extends Object, Object> entry : iFieldMap.entrySet()) {
field(entry.getKey().toString(), entry.getValue());
}
}
/**
* Fills a document passing the field names/values pair, where the first pair is mandatory.
*/
public ODocument(final String iFieldName, final Object iFieldValue, final Object... iFields) {
this(iFields);
field(iFieldName, iFieldValue);
}
/**
* Copies the current instance to a new one. Hasn't been choose the clone() to let ODocument return type. Once copied the new
* instance has the same identity and values but all the internal structure are totally independent by the source.
*/
public ODocument copy() {
return (ODocument) copyTo(new ODocument());
}
/**
* Copies all the fields into iDestination document.
*/
@Override
public ORecordAbstract<Object> copyTo(final ORecordAbstract<Object> iDestination) {
// TODO: REMOVE THIS
checkForFields();
ODocument destination = (ODocument) iDestination;
super.copyTo(iDestination);
destination._ordered = _ordered;
destination._clazz = _clazz;
destination._trackingChanges = _trackingChanges;
if (_owners != null)
destination._owners = new ArrayList<WeakReference<ORecordElement>>(_owners);
if (_fieldValues != null) {
destination._fieldValues = _fieldValues instanceof LinkedHashMap ? new LinkedHashMap<String, Object>()
: new HashMap<String, Object>();
for (Entry<String, Object> entry : _fieldValues.entrySet())
ODocumentHelper.copyFieldValue(destination, entry);
}
if (_fieldTypes != null)
destination._fieldTypes = new HashMap<String, OType>(_fieldTypes);
destination._fieldChangeListeners = null;
destination._fieldCollectionChangeTimeLines = null;
destination._fieldOriginalValues = null;
destination.addAllMultiValueChangeListeners();
destination._dirty = _dirty; // LEAVE IT AS LAST TO AVOID SOMETHING SET THE FLAG TO TRUE
return destination;
}
@Override
public ODocument flatCopy() {
if (isDirty())
throw new IllegalStateException("Cannot execute a flat copy of a dirty record");
final ODocument cloned = new ODocument();
cloned.setOrdered(_ordered);
cloned.fill(_recordId, _recordVersion, _source, false);
return cloned;
}
/**
* Returns an empty record as place-holder of the current. Used when a record is requested, but only the identity is needed.
*
* @return
*/
public ORecord<?> placeholder() {
final ODocument cloned = new ODocument();
cloned._source = null;
cloned._recordId = _recordId.copy();
cloned._status = STATUS.NOT_LOADED;
cloned._dirty = false;
return cloned;
}
/**
* Detaches all the connected records. If new records are linked to the document the detaching cannot be completed and false will
* be returned.
*
* @return true if the record has been detached, otherwise false
*/
public boolean detach() {
boolean fullyDetached = true;
if (_fieldValues != null) {
Object fieldValue;
for (Map.Entry<String, Object> entry : _fieldValues.entrySet()) {
fieldValue = entry.getValue();
if (fieldValue instanceof ORecord<?>)
if (((ORecord<?>) fieldValue).getIdentity().isNew())
fullyDetached = false;
else
_fieldValues.put(entry.getKey(), ((ORecord<?>) fieldValue).getIdentity());
if (fieldValue instanceof ODetachable) {
if (!((ODetachable) fieldValue).detach())
fullyDetached = false;
}
}
}
return fullyDetached;
}
/**
* Loads the record using a fetch plan. Example:
* <p>
* <code>doc.load( "*:3" ); // LOAD THE DOCUMENT BY EARLY FETCHING UP TO 3rd LEVEL OF CONNECTIONS</code>
* </p>
*
* @param iFetchPlan
* Fetch plan to use
*/
public ODocument load(final String iFetchPlan) {
return load(iFetchPlan, false);
}
/**
* Loads the record using a fetch plan. Example:
* <p>
* <code>doc.load( "*:3", true ); // LOAD THE DOCUMENT BY EARLY FETCHING UP TO 3rd LEVEL OF CONNECTIONS IGNORING THE CACHE</code>
* </p>
*
* @param iIgnoreCache
* Ignore the cache or use it
*/
public ODocument load(final String iFetchPlan, boolean iIgnoreCache) {
Object result = null;
try {
result = getDatabase().load(this, iFetchPlan, iIgnoreCache);
} catch (Exception e) {
throw new ORecordNotFoundException("The record with id '" + getIdentity() + "' was not found", e);
}
if (result == null)
throw new ORecordNotFoundException("The record with id '" + getIdentity() + "' was not found");
return (ODocument) result;
}
public ODocument load(final String iFetchPlan, boolean iIgnoreCache, boolean loadTombstone) {
Object result = null;
try {
result = getDatabase().load(this, iFetchPlan, iIgnoreCache, loadTombstone);
} catch (Exception e) {
throw new ORecordNotFoundException("The record with id '" + getIdentity() + "' was not found", e);
}
if (result == null)
throw new ORecordNotFoundException("The record with id '" + getIdentity() + "' was not found");
return (ODocument) result;
}
@Override
public ODocument reload(String iFetchPlan, boolean iIgnoreCache) {
super.reload(iFetchPlan, iIgnoreCache);
if (!_lazyLoad) {
checkForFields();
checkForLoading();
}
return this;
}
public boolean hasSameContentOf(final ODocument iOther) {
final ODatabaseRecord currentDb = ODatabaseRecordThreadLocal.INSTANCE.getIfDefined();
return ODocumentHelper.hasSameContentOf(this, currentDb, iOther, currentDb, null);
}
@Override
public byte[] toStream() {
if (_recordFormat == null)
setup();
return super.toStream();
}
/**
* Dumps the instance as string.
*/
@Override
public String toString() {
final boolean saveDirtyStatus = _dirty;
final StringBuilder buffer = new StringBuilder();
try {
checkForFields();
if (_clazz != null)
buffer.append(_clazz.getStreamableName());
if (_recordId != null) {
if (_recordId.isValid())
buffer.append(_recordId);
}
boolean first = true;
ORecord<?> record;
for (Entry<String, Object> f : _fieldValues.entrySet()) {
buffer.append(first ? '{' : ',');
buffer.append(f.getKey());
buffer.append(':');
if (f.getValue() instanceof Collection<?>) {
buffer.append('[');
buffer.append(((Collection<?>) f.getValue()).size());
buffer.append(']');
} else if (f.getValue() instanceof ORecord<?>) {
record = (ORecord<?>) f.getValue();
if (record.getIdentity().isValid())
record.getIdentity().toString(buffer);
else
buffer.append(record.toString());
} else
buffer.append(f.getValue());
if (first)
first = false;
}
if (!first)
buffer.append('}');
if (_recordId != null && _recordId.isValid()) {
buffer.append(" v");
buffer.append(_recordVersion);
}
} finally {
_dirty = saveDirtyStatus;
}
return buffer.toString();
}
/**
* Fills the ODocument directly with the string representation of the document itself. Use it for faster insertion but pay
* attention to respect the OrientDB record format.
* <p>
* <code>
* record.reset();<br/>
* record.setClassName("Account");<br/>
* record.fromString(new String("Account@id:" + data.getCyclesDone() + ",name:'Luca',surname:'Garulli',birthDate:" + date.getTime()<br/>
* + ",salary:" + 3000f + i));<br/>
* record.save();<br/>
* </code>
* </p>
*
* @param iValue
*/
public void fromString(final String iValue) {
_dirty = true;
_source = OBinaryProtocol.string2bytes(iValue);
removeAllCollectionChangeListeners();
_fieldCollectionChangeTimeLines = null;
_fieldOriginalValues = null;
_fieldTypes = null;
_fieldValues = null;
}
/**
* Returns the set of field names.
*/
public String[] fieldNames() {
checkForLoading();
checkForFields();
if (_fieldValues == null || _fieldValues.size() == 0)
return EMPTY_STRINGS;
return _fieldValues.keySet().toArray(new String[_fieldValues.size()]);
}
/**
* Returns the array of field values.
*/
public Object[] fieldValues() {
checkForLoading();
checkForFields();
return _fieldValues.values().toArray(new Object[_fieldValues.size()]);
}
public <RET> RET rawField(final String iFieldName) {
if (iFieldName == null || iFieldName.length() == 0)
return null;
checkForLoading();
if (!checkForFields(iFieldName))
// NO FIELDS
return null;
// OPTIMIZATION
if (iFieldName.charAt(0) != '@' && OStringSerializerHelper.indexOf(iFieldName, 0, '.', '[') == -1)
return (RET) _fieldValues.get(iFieldName);
// NOT FOUND, PARSE THE FIELD NAME
return (RET) ODocumentHelper.getFieldValue(this, iFieldName);
}
/**
* Reads the field value.
*
* @param iFieldName
* field name
* @return field value if defined, otherwise null
*/
public <RET> RET field(final String iFieldName) {
RET value = this.<RET> rawField(iFieldName);
final OType t = fieldType(iFieldName);
if (_lazyLoad && value instanceof ORID && t != OType.LINK && ODatabaseRecordThreadLocal.INSTANCE.isDefined()) {
// CREATE THE DOCUMENT OBJECT IN LAZY WAY
value = (RET) getDatabase().load((ORID) value);
if (!iFieldName.contains(".")) {
removeCollectionChangeListener(iFieldName);
removeCollectionTimeLine(iFieldName);
_fieldValues.put(iFieldName, value);
addCollectionChangeListener(iFieldName, value);
}
}
// CHECK FOR CONVERSION
if (t != null) {
Object newValue = null;
if (t == OType.BINARY && value instanceof String)
newValue = OStringSerializerHelper.getBinaryContent(value);
else if (t == OType.DATE && value instanceof Long)
newValue = (RET) new Date(((Long) value).longValue());
else if ((t == OType.EMBEDDEDSET || t == OType.LINKSET) && value instanceof List)
// CONVERT LIST TO SET
newValue = (RET) ODocumentHelper.convertField(this, iFieldName, Set.class, value);
else if ((t == OType.EMBEDDEDLIST || t == OType.LINKLIST) && value instanceof Set)
// CONVERT SET TO LIST
newValue = (RET) ODocumentHelper.convertField(this, iFieldName, List.class, value);
if (newValue != null) {
// VALUE CHANGED: SET THE NEW ONE
removeCollectionChangeListener(iFieldName);
removeCollectionTimeLine(iFieldName);
_fieldValues.put(iFieldName, newValue);
addCollectionChangeListener(iFieldName, newValue);
value = (RET) newValue;
}
}
return value;
}
/**
* Reads the field value forcing the return type. Use this method to force return of ORID instead of the entire document by
* passing ORID.class as iFieldType.
*
* @param iFieldName
* field name
* @param iFieldType
* Forced type.
* @return field value if defined, otherwise null
*/
public <RET> RET field(final String iFieldName, final Class<?> iFieldType) {
RET value = this.<RET> rawField(iFieldName);
if (value != null)
value = (RET) ODocumentHelper.convertField(this, iFieldName, iFieldType, value);
return value;
}
/**
* Reads the field value forcing the return type. Use this method to force return of binary data.
*
* @param iFieldName
* field name
* @param iFieldType
* Forced type.
* @return field value if defined, otherwise null
*/
public <RET> RET field(final String iFieldName, final OType iFieldType) {
setFieldType(iFieldName, iFieldType);
return (RET) field(iFieldName);
}
/**
* Writes the field value. This method sets the current document as dirty.
*
* @param iFieldName
* field name. If contains dots (.) the change is applied to the nested documents in chain. To disable this feature call
* {@link #setAllowChainedAccess(boolean)} to false.
* @param iPropertyValue
* field value
* @return The Record instance itself giving a "fluent interface". Useful to call multiple methods in chain.
*/
public ODocument field(final String iFieldName, Object iPropertyValue) {
return field(iFieldName, iPropertyValue, null);
}
/**
* Fills a document passing the field names/values.
*/
public ODocument fields(final String iFieldName, final Object iFieldValue, final Object... iFields) {
if (iFields != null && iFields.length % 2 != 0)
throw new IllegalArgumentException("Fields must be passed in pairs as name and value");
field(iFieldName, iFieldValue);
if (iFields != null && iFields.length > 0)
for (int i = 0; i < iFields.length; i += 2) {
field(iFields[i].toString(), iFields[i + 1]);
}
return this;
}
/**
* Fills a document passing the field names/values as a Map<String,Object> where the keys are the field names and the values are
* the field values.
*/
public ODocument fields(final Map<String, Object> iMap) {
if (iMap != null) {
for (Entry<String, Object> entry : iMap.entrySet())
field(entry.getKey(), entry.getValue());
}
return this;
}
/**
* Writes the field value forcing the type. This method sets the current document as dirty.
*
* @param iFieldName
* field name. If contains dots (.) the change is applied to the nested documents in chain. To disable this feature call
* {@link #setAllowChainedAccess(boolean)} to false.
* @param iPropertyValue
* field value
* @param iFieldType
* Forced type (not auto-determined)
* @return The Record instance itself giving a "fluent interface". Useful to call multiple methods in chain. If the updated
* document is another document (using the dot (.) notation) then the document returned is the changed one or NULL if no
* document has been found in chain
*/
public ODocument field(String iFieldName, Object iPropertyValue, OType iFieldType) {
if ("@class".equals(iFieldName)) {
setClassName(iPropertyValue.toString());
return this;
} else if ("@rid".equals(iFieldName)) {
_recordId.fromString(iPropertyValue.toString());
return this;
}
final int lastSep = _allowChainedAccess ? iFieldName.lastIndexOf('.') : -1;
if (lastSep > -1) {
// SUB PROPERTY GET 1 LEVEL BEFORE LAST
final Object subObject = field(iFieldName.substring(0, lastSep));
if (subObject != null) {
final String subFieldName = iFieldName.substring(lastSep + 1);
if (subObject instanceof ODocument) {
// SUB-DOCUMENT
((ODocument) subObject).field(subFieldName, iPropertyValue);
return (ODocument) (((ODocument) subObject).isEmbedded() ? this : subObject);
} else if (subObject instanceof Map<?, ?>)
// KEY/VALUE
((Map<String, Object>) subObject).put(subFieldName, iPropertyValue);
else if (OMultiValue.isMultiValue(subObject)) {
// APPLY CHANGE TO ALL THE ITEM IN SUB-COLLECTION
for (Object subObjectItem : OMultiValue.getMultiValueIterable(subObject)) {
if (subObjectItem instanceof ODocument) {
// SUB-DOCUMENT, CHECK IF IT'S NOT LINKED
if (!((ODocument) subObjectItem).isEmbedded())
throw new IllegalArgumentException("Property '" + iFieldName
+ "' points to linked collection of items. You can only change embedded documents in this way");
((ODocument) subObjectItem).field(subFieldName, iPropertyValue);
} else if (subObjectItem instanceof Map<?, ?>) {
// KEY/VALUE
((Map<String, Object>) subObjectItem).put(subFieldName, iPropertyValue);
}
}
return this;
}
}
return null;
}
iFieldName = checkFieldName(iFieldName);
checkForLoading();
checkForFields();
final boolean knownProperty = _fieldValues.containsKey(iFieldName);
final Object oldValue = _fieldValues.get(iFieldName);
if (knownProperty)
// CHECK IF IS REALLY CHANGED
if (iPropertyValue == null) {
if (oldValue == null)
// BOTH NULL: UNCHANGED
return this;
} else {
try {
if (iPropertyValue.equals(oldValue)) {
if (!(iPropertyValue instanceof ORecordElement))
// SAME BUT NOT TRACKABLE: SET THE RECORD AS DIRTY TO BE SURE IT'S SAVED
setDirty();
// SAVE VALUE: UNCHANGED
return this;
}
if (OType.isSimpleType(iPropertyValue) && iPropertyValue.equals(oldValue))
// SAVE VALUE: UNCHANGED
return this;
} catch (Exception e) {
OLogManager.instance().warn(this, "Error on checking the value of property %s against the record %s", e, iFieldName,
getIdentity());
}
}
setFieldType(iFieldName, iFieldType);
if (iFieldType == null && _clazz != null) {
// SCHEMAFULL?
final OProperty prop = _clazz.getProperty(iFieldName);
if (prop != null)
iFieldType = prop.getType();
}
if (iPropertyValue != null)
// CHECK FOR CONVERSION
if (iFieldType != null)
iPropertyValue = ODocumentHelper.convertField(this, iFieldName, iFieldType.getDefaultJavaType(), iPropertyValue);
else if (iPropertyValue instanceof Enum)
iPropertyValue = iPropertyValue.toString();
removeCollectionChangeListener(iFieldName);
removeCollectionTimeLine(iFieldName);
_fieldValues.put(iFieldName, iPropertyValue);
addCollectionChangeListener(iFieldName, iPropertyValue);
if (_status != STATUS.UNMARSHALLING) {
setDirty();
if (_trackingChanges && _recordId.isValid()) {
// SAVE THE OLD VALUE IN A SEPARATE MAP ONLY IF TRACKING IS ACTIVE AND THE RECORD IS NOT NEW
if (_fieldOriginalValues == null)
_fieldOriginalValues = new HashMap<String, Object>();
// INSERT IT ONLY IF NOT EXISTS TO AVOID LOOSE OF THE ORIGINAL VALUE (FUNDAMENTAL FOR INDEX HOOK)
if (!_fieldOriginalValues.containsKey(iFieldName))
_fieldOriginalValues.put(iFieldName, oldValue);
}
}
return this;
}
/**
* Removes a field.
*/
public Object removeField(final String iFieldName) {
checkForLoading();
checkForFields();
final boolean knownProperty = _fieldValues.containsKey(iFieldName);
final Object oldValue = _fieldValues.get(iFieldName);
if (knownProperty && _trackingChanges) {
// SAVE THE OLD VALUE IN A SEPARATE MAP
if (_fieldOriginalValues == null)
_fieldOriginalValues = new HashMap<String, Object>();
// INSERT IT ONLY IF NOT EXISTS TO AVOID LOOSE OF THE ORIGINAL VALUE (FUNDAMENTAL FOR INDEX HOOK)
if (!_fieldOriginalValues.containsKey(iFieldName)) {
_fieldOriginalValues.put(iFieldName, oldValue);
}
}
removeCollectionTimeLine(iFieldName);
removeCollectionChangeListener(iFieldName);
_fieldValues.remove(iFieldName);
_source = null;
setDirty();
return oldValue;
}
/**
* Merge current document with the document passed as parameter. If the field already exists then the conflicts are managed based
* on the value of the parameter 'iConflictsOtherWins'.
*
* @param iOther
* Other ODocument instance to merge
* @param iUpdateOnlyMode
* if true, the other document properties will always be added or overwritten. If false, the missed properties in the
* "other" document will be removed by original document
* @param iMergeSingleItemsOfMultiValueFields
*
* @return
*/
public ODocument merge(final ODocument iOther, boolean iUpdateOnlyMode, boolean iMergeSingleItemsOfMultiValueFields) {
iOther.checkForLoading();
iOther.checkForFields();
if (_clazz == null && iOther.getSchemaClass() != null)
_clazz = iOther.getSchemaClass();
return merge(iOther._fieldValues, iUpdateOnlyMode, iMergeSingleItemsOfMultiValueFields);
}
/**
* Merge current document with the document passed as parameter. If the field already exists then the conflicts are managed based
* on the value of the parameter 'iConflictsOtherWins'.
*
* @param iOther
* Other ODocument instance to merge
* @param iUpdateOnlyMode
* if true, the other document properties will always be added or overwritten. If false, the missed properties in the
* "other" document will be removed by original document
* @param iMergeSingleItemsOfMultiValueFields
*
* @return
*/
public ODocument merge(final Map<String, Object> iOther, final boolean iUpdateOnlyMode,
boolean iMergeSingleItemsOfMultiValueFields) {
checkForLoading();
checkForFields();
_source = null;
for (String f : iOther.keySet()) {
if (containsField(f) && iMergeSingleItemsOfMultiValueFields) {
Object field = field(f);
if (field instanceof Map<?, ?>) {
final Map<String, Object> map = (Map<String, Object>) field;
final Map<String, Object> otherMap = (Map<String, Object>) iOther.get(f);
for (Entry<String, Object> entry : otherMap.entrySet()) {
map.put(entry.getKey(), entry.getValue());
}
continue;
} else if (field instanceof Collection<?>) {
final Collection<Object> coll = (Collection<Object>) field;
final Collection<Object> otherColl = (Collection<Object>) iOther.get(f);
for (Object item : otherColl) {
if (coll.contains(item))
// REMOVE PREVIOUS ITEM BECAUSE THIS COULD BE UPDATED INSIDE OF IT
coll.remove(item);
coll.add(item);
}
// JUMP RAW REPLACE
continue;
}
}
// RESET THE FIELD TYPE
setFieldType(f, null);
// RAW SET/REPLACE
field(f, iOther.get(f));
}
if (!iUpdateOnlyMode) {
// REMOVE PROPERTIES NOT FOUND IN OTHER DOC
for (String f : fieldNames())
if (!iOther.containsKey(f))
removeField(f);
}
return this;
}
/**
* Returns list of changed fields. There are two types of changes:
* <ol>
* <li>Value of field itself was changed by calling of {@link #field(String, Object)} method for example.</li>
* <li>Internal state of field was changed but was not saved. This case currently is applicable for for collections only.</li>
* </ol>
*
* @return List of fields, values of which were changed.
*/
public String[] getDirtyFields() {
if ((_fieldOriginalValues == null || _fieldOriginalValues.isEmpty())
&& (_fieldCollectionChangeTimeLines == null || _fieldCollectionChangeTimeLines.isEmpty()))
return EMPTY_STRINGS;
final Set<String> dirtyFields = new HashSet<String>();
if (_fieldOriginalValues != null)
dirtyFields.addAll(_fieldOriginalValues.keySet());
if (_fieldCollectionChangeTimeLines != null)
dirtyFields.addAll(_fieldCollectionChangeTimeLines.keySet());
return dirtyFields.toArray(new String[dirtyFields.size()]);
}
/**
* Returns the original value of a field before it has been changed.
*
* @param iFieldName
* Property name to retrieve the original value
*/
public Object getOriginalValue(final String iFieldName) {
return _fieldOriginalValues != null ? _fieldOriginalValues.get(iFieldName) : null;
}
public OMultiValueChangeTimeLine<String, Object> getCollectionTimeLine(final String iFieldName) {
return _fieldCollectionChangeTimeLines != null ? _fieldCollectionChangeTimeLines.get(iFieldName) : null;
}
/**
* Returns the iterator fields
*/
public Iterator<Entry<String, Object>> iterator() {
checkForLoading();
checkForFields();
if (_fieldValues == null)
return OEmptyIterator.INSTANCE;
final Iterator<Entry<String, Object>> iterator = _fieldValues.entrySet().iterator();
return new Iterator<Entry<String, Object>>() {
private Entry<String, Object> current;
public boolean hasNext() {
return iterator.hasNext();
}
public Entry<String, Object> next() {
current = iterator.next();
return current;
}
public void remove() {
iterator.remove();
if (_trackingChanges) {
// SAVE THE OLD VALUE IN A SEPARATE MAP
if (_fieldOriginalValues == null)
_fieldOriginalValues = new HashMap<String, Object>();
// INSERT IT ONLY IF NOT EXISTS TO AVOID LOOSE OF THE ORIGINAL VALUE (FUNDAMENTAL FOR INDEX HOOK)
if (!_fieldOriginalValues.containsKey(current.getKey())) {
_fieldOriginalValues.put(current.getKey(), current.getValue());
}
}
removeCollectionChangeListener(current.getKey());
removeCollectionTimeLine(current.getKey());
}
};
}
/**
* Checks if a field exists.
*
* @return True if exists, otherwise false.
*/
public boolean containsField(final String iFieldName) {
if (iFieldName == null)
return false;
checkForLoading();
checkForFields(iFieldName);
return _fieldValues.containsKey(iFieldName);
}
/**
* Internal.
*/
public byte getRecordType() {
return RECORD_TYPE;
}
/**
* Returns true if the record has some owner.
*/
public boolean hasOwners() {
return _owners != null && !_owners.isEmpty();
}
/**
* Internal.
*
* @return
*/
public ODocument addOwner(final ORecordElement iOwner) {
if (_owners == null)
_owners = new ArrayList<WeakReference<ORecordElement>>();
this._owners.add(new WeakReference<ORecordElement>(iOwner));
return this;
}
public Iterable<ORecordElement> getOwners() {
if (_owners == null)
return Collections.emptyList();
final List<ORecordElement> result = new ArrayList<ORecordElement>();
for (WeakReference<ORecordElement> o : _owners)
result.add(o.get());
return result;
}
public ODocument removeOwner(final ORecordElement iRecordElement) {
if (_owners != null) {
// PROPAGATES TO THE OWNER
ORecordElement e;
for (int i = 0; i < _owners.size(); ++i) {
e = _owners.get(i).get();
if (e == iRecordElement) {
_owners.remove(i);
break;
}
}
}
return this;
}
/**
* Propagates the dirty status to the owner, if any. This happens when the object is embedded in another one.
*/
@Override
public ORecordAbstract<Object> setDirty() {
if (_owners != null) {
// PROPAGATES TO THE OWNER
ORecordElement e;
for (WeakReference<ORecordElement> o : _owners) {
e = o.get();
if (e != null)
e.setDirty();
}
}
// THIS IS IMPORTANT TO BE SURE THAT FIELDS ARE LOADED BEFORE IT'S TOO LATE AND THE RECORD _SOURCE IS NULL
checkForFields();
return super.setDirty();
}
@Override
public void onBeforeIdentityChanged(final ORID iRID) {
if (_owners != null) {
final List<WeakReference<ORecordElement>> temp = new ArrayList<WeakReference<ORecordElement>>(_owners);
ORecordElement e;
for (WeakReference<ORecordElement> o : temp) {
e = o.get();
if (e != null)
e.onBeforeIdentityChanged(iRID);
}
}
}
@Override
public void onAfterIdentityChanged(final ORecord<?> iRecord) {
super.onAfterIdentityChanged(iRecord);
if (_owners != null) {
final List<WeakReference<ORecordElement>> temp = new ArrayList<WeakReference<ORecordElement>>(_owners);
ORecordElement e;
for (WeakReference<ORecordElement> o : temp) {
e = o.get();
if (e != null)
e.onAfterIdentityChanged(iRecord);
}
}
}
@Override
public ODocument fromStream(final byte[] iRecordBuffer) {
removeAllCollectionChangeListeners();
_fieldValues = null;
_fieldTypes = null;
_fieldOriginalValues = null;
_fieldChangeListeners = null;
_fieldCollectionChangeTimeLines = null;
super.fromStream(iRecordBuffer);
if (!_lazyLoad) {
checkForFields();
checkForLoading();
}
return (ODocument) this;
}
/**
* Returns the forced field type if any.
*
* @param iFieldName
*/
public OType fieldType(final String iFieldName) {
return _fieldTypes != null ? _fieldTypes.get(iFieldName) : null;
}
@Override
public ODocument unload() {
super.unload();
internalReset();
return this;
}
/**
* Clears all the field values and types.
*/
@Override
public ODocument clear() {
super.clear();
internalReset();
_owners = null;
return this;
}
/**
* Resets the record values and class type to being reused. This can be used only if no transactions are begun.
*/
@Override
public ODocument reset() {
ODatabaseRecord db = ODatabaseRecordThreadLocal.INSTANCE.getIfDefined();
if (db != null && db.getTransaction().isActive())
throw new IllegalStateException("Cannot reset documents during a transaction. Create a new one each time");
super.reset();
internalReset();
if (_fieldOriginalValues != null)
_fieldOriginalValues.clear();
_owners = null;
return this;
}
protected void internalReset() {
removeAllCollectionChangeListeners();
if (_fieldCollectionChangeTimeLines != null)
_fieldCollectionChangeTimeLines.clear();
if (_fieldValues != null)
_fieldValues.clear();
}
/**
* Rollbacks changes to the loaded version without reloading the document. Works only if tracking changes is enabled @see
* {@link #isTrackingChanges()} and {@link #setTrackingChanges(boolean)} methods.
*/
public ODocument undo() {
if (!_trackingChanges)
throw new OConfigurationException("Cannot undo the document because tracking of changes is disabled");
for (Entry<String, Object> entry : _fieldOriginalValues.entrySet()) {
final Object value = entry.getValue();
if (value == null)
_fieldValues.remove(entry.getKey());
else
_fieldValues.put(entry.getKey(), entry.getValue());
}
return this;
}
public boolean isLazyLoad() {
return _lazyLoad;
}
public void setLazyLoad(final boolean iLazyLoad) {
this._lazyLoad = iLazyLoad;
if (_fieldValues != null) {
// PROPAGATE LAZINESS TO THE FIELDS
for (Entry<String, Object> field : _fieldValues.entrySet()) {
if (field.getValue() instanceof ORecordLazyMultiValue)
((ORecordLazyMultiValue) field.getValue()).setAutoConvertToRecord(false);
}
}
}
public boolean isTrackingChanges() {
return _trackingChanges;
}
/**
* Enabled or disabled the tracking of changes in the document. This is needed by some triggers like
* {@link com.orientechnologies.orient.core.index.OClassIndexManager} to determine what fields are changed to update indexes.
*
* @param iTrackingChanges
* True to enable it, otherwise false
* @return
*/
public ODocument setTrackingChanges(final boolean iTrackingChanges) {
this._trackingChanges = iTrackingChanges;
if (!iTrackingChanges) {
// FREE RESOURCES
this._fieldOriginalValues = null;
removeAllCollectionChangeListeners();
_fieldChangeListeners = null;
_fieldCollectionChangeTimeLines = null;
} else {
addAllMultiValueChangeListeners();
}
return this;
}
public boolean isOrdered() {
return _ordered;
}
public ODocument setOrdered(final boolean iOrdered) {
this._ordered = iOrdered;
return this;
}
@Override
public boolean equals(Object obj) {
if (!super.equals(obj))
return false;
return this == obj || _recordId.isValid();
}
/**
* Returns the number of fields in memory.
*/
public int fields() {
return _fieldValues == null ? 0 : _fieldValues.size();
}
public boolean isEmpty() {
return _fieldValues == null || _fieldValues.isEmpty();
}
public boolean isEmbedded() {
return _owners != null && !_owners.isEmpty();
}
@Override
protected boolean checkForFields(final String... iFields) {
if (_fieldValues == null)
_fieldValues = _ordered ? new LinkedHashMap<String, Object>() : new HashMap<String, Object>();
if (_status == ORecordElement.STATUS.LOADED && _source != null)
// POPULATE FIELDS LAZY
return deserializeFields(iFields);
return true;
}
/**
* Internal.
*/
@Override
protected void setup() {
super.setup();
_recordFormat = ORecordSerializerFactory.instance().getFormat(ORecordSerializerSchemaAware2CSV.NAME);
}
/**
* Sets the field type. This overrides the schema property settings if any.
*
* @param iFieldName
* Field name
* @param iFieldType
* Type to set between OType enumaration values
*/
public ODocument setFieldType(final String iFieldName, final OType iFieldType) {
if (iFieldType != null) {
// SET THE FORCED TYPE
if (_fieldTypes == null)
_fieldTypes = new HashMap<String, OType>();
_fieldTypes.put(iFieldName, iFieldType);
} else if (_fieldTypes != null) {
// REMOVE THE FIELD TYPE
_fieldTypes.remove(iFieldName);
if (_fieldTypes.size() == 0)
// EMPTY: OPTIMIZE IT BY REMOVING THE ENTIRE MAP
_fieldTypes = null;
}
return this;
}
@Override
public ODocument save() {
return save(false);
}
@Override
public ODocument save(final String iClusterName) {
return save(iClusterName, false);
}
@Override
public ODocument save(boolean forceCreate) {
if (_clazz != null)
return save(getDatabase().getClusterNameById(_clazz.getDefaultClusterId()), forceCreate);
convertAllMultiValuesToTrackedVersions();
validate();
return (ODocument) super.save(forceCreate);
}
@Override
public ODocument save(final String iClusterName, boolean forceCreate) {
convertAllMultiValuesToTrackedVersions();
validate();
return (ODocument) super.save(iClusterName, forceCreate);
}
/*
* Initializes the object if has been unserialized
*/
@Override
public boolean deserializeFields(final String... iFields) {
if (_source == null)
// ALREADY UNMARSHALLED OR JUST EMPTY
return true;
if (iFields != null && iFields.length > 0) {
// EXTRACT REAL FIELD NAMES
for (int i = 0; i < iFields.length; ++i) {
final String f = iFields[i];
if (!f.startsWith("@")) {
int pos1 = f.indexOf('[');
int pos2 = f.indexOf('.');
if (pos1 > -1 || pos2 > -1) {
int pos = pos1 > -1 ? pos1 : pos2;
if (pos2 > -1 && pos2 < pos)
pos = pos2;
// REPLACE THE FIELD NAME
iFields[i] = f.substring(0, pos);
}
}
}
// CHECK IF HAS BEEN ALREADY UNMARSHALLED
if (_fieldValues != null && !_fieldValues.isEmpty()) {
boolean allFound = true;
for (String f : iFields)
if (!f.startsWith("@") && !_fieldValues.containsKey(f)) {
allFound = false;
break;
}
if (allFound)
// ALL THE REQUESTED FIELDS HAVE BEEN LOADED BEFORE AND AVAILABLES, AVOID UNMARSHALLIGN
return true;
}
}
if (_recordFormat == null)
setup();
super.deserializeFields(iFields);
if (iFields != null && iFields.length > 0) {
if (iFields[0].startsWith("@"))
// ATTRIBUTE
return true;
// PARTIAL UNMARSHALLING
if (_fieldValues != null && !_fieldValues.isEmpty())
for (String f : iFields)
if (_fieldValues.containsKey(f))
return true;
// NO FIELDS FOUND
return false;
} else if (_source != null)
// FULL UNMARSHALLING
_source = null;
return true;
}
protected String checkFieldName(final String iFieldName) {
final Character c = OSchemaShared.checkNameIfValid(iFieldName);
if (c != null)
throw new IllegalArgumentException("Invalid field name '" + iFieldName + "'. Character '" + c + "' is invalid");
return iFieldName;
}
private void addCollectionChangeListener(final String fieldName) {
final Object fieldValue = _fieldValues.get(fieldName);
addCollectionChangeListener(fieldName, fieldValue);
}
private void addCollectionChangeListener(final String fieldName, final Object fieldValue) {
OType fieldType = fieldType(fieldName);
if (fieldType == null && _clazz != null) {
final OProperty prop = _clazz.getProperty(fieldName);
fieldType = prop != null ? prop.getType() : null;
}
if (fieldType == null
|| !(OType.EMBEDDEDLIST.equals(fieldType) || OType.EMBEDDEDMAP.equals(fieldType) || OType.EMBEDDEDSET.equals(fieldType)
|| OType.LINKSET.equals(fieldType) || OType.LINKLIST.equals(fieldType) || OType.LINKMAP.equals(fieldType)))
return;
if (!(fieldValue instanceof OTrackedMultiValue))
return;
final OTrackedMultiValue<String, Object> multiValue = (OTrackedMultiValue<String, Object>) fieldValue;
if (_fieldChangeListeners == null)
_fieldChangeListeners = new HashMap<String, OSimpleMultiValueChangeListener<String, Object>>();
if (!_fieldChangeListeners.containsKey(fieldName)) {
final OSimpleMultiValueChangeListener<String, Object> listener = new OSimpleMultiValueChangeListener<String, Object>(
fieldName);
multiValue.addChangeListener(listener);
_fieldChangeListeners.put(fieldName, listener);
}
}
private void removeAllCollectionChangeListeners() {
if (_fieldValues == null)
return;
for (final String fieldName : _fieldValues.keySet()) {
removeCollectionChangeListener(fieldName);
}
_fieldChangeListeners = null;
}
private void addAllMultiValueChangeListeners() {
if (_fieldValues == null)
return;
for (final String fieldName : _fieldValues.keySet()) {
addCollectionChangeListener(fieldName);
}
}
private void removeCollectionChangeListener(final String fieldName) {
if (_fieldChangeListeners == null)
return;
final OMultiValueChangeListener<String, Object> changeListener = _fieldChangeListeners.remove(fieldName);
final Object fieldValue;
if (_fieldValues == null)
return;
fieldValue = _fieldValues.get(fieldName);
if (!(fieldValue instanceof OTrackedMultiValue))
return;
if (changeListener != null) {
final OTrackedMultiValue<String, Object> multiValue = (OTrackedMultiValue<String, Object>) fieldValue;
multiValue.removeRecordChangeListener(changeListener);
}
}
private void removeCollectionTimeLine(final String fieldName) {
if (_fieldCollectionChangeTimeLines == null)
return;
_fieldCollectionChangeTimeLines.remove(fieldName);
}
/**
* Converts all non-tracked collections implementations contained in document fields to tracked ones.
*
* @see OTrackedMultiValue
*/
public void convertAllMultiValuesToTrackedVersions() {
if (_fieldValues == null)
return;
final Map<String, Object> fieldsToUpdate = new HashMap<String, Object>();
for (Map.Entry<String, Object> fieldEntry : _fieldValues.entrySet()) {
final Object fieldValue = fieldEntry.getValue();
OType fieldType = fieldType(fieldEntry.getKey());
if (fieldType == null && _clazz != null) {
final OProperty prop = _clazz.getProperty(fieldEntry.getKey());
fieldType = prop != null ? prop.getType() : null;
}
if (fieldType == null
|| !(OType.EMBEDDEDLIST.equals(fieldType) || OType.EMBEDDEDMAP.equals(fieldType) || OType.EMBEDDEDSET.equals(fieldType)
|| OType.LINKSET.equals(fieldType) || OType.LINKLIST.equals(fieldType) || OType.LINKMAP.equals(fieldType)))
continue;
if (fieldValue instanceof List && fieldType.equals(OType.EMBEDDEDLIST) && !(fieldValue instanceof OTrackedMultiValue))
fieldsToUpdate.put(fieldEntry.getKey(), new OTrackedList<Object>(this, (List<?>) fieldValue, null));
else if (fieldValue instanceof Set && fieldType.equals(OType.EMBEDDEDSET) && !(fieldValue instanceof OTrackedMultiValue))
fieldsToUpdate.put(fieldEntry.getKey(), new OTrackedSet<Object>(this, (Set<OIdentifiable>) fieldValue, null));
else if (fieldValue instanceof Map && fieldType.equals(OType.EMBEDDEDMAP) && !(fieldValue instanceof OTrackedMultiValue))
fieldsToUpdate
.put(fieldEntry.getKey(), new OTrackedMap<OIdentifiable>(this, (Map<Object, OIdentifiable>) fieldValue, null));
else if (fieldValue instanceof Set && fieldType.equals(OType.LINKSET) && !(fieldValue instanceof OTrackedMultiValue))
fieldsToUpdate.put(fieldEntry.getKey(), new ORecordLazySet(this, (Collection<OIdentifiable>) fieldValue));
else if (fieldValue instanceof List && fieldType.equals(OType.LINKLIST) && !(fieldValue instanceof OTrackedMultiValue))
fieldsToUpdate.put(fieldEntry.getKey(), new ORecordLazyList(this, (List<OIdentifiable>) fieldValue));
else if (fieldValue instanceof Map && fieldType.equals(OType.LINKMAP) && !(fieldValue instanceof OTrackedMultiValue))
fieldsToUpdate.put(fieldEntry.getKey(), new ORecordLazyMap(this, (Map<Object, OIdentifiable>) fieldValue));
}
_fieldValues.putAll(fieldsToUpdate);
addAllMultiValueChangeListeners();
}
/**
* Perform gathering of all operations performed on tracked collection and create mapping between list of collection operations
* and field name that contains collection that was changed.
*
* @param <K>
* Value that uniquely identifies position of item in collection
* @param <V>
* Item value.
*/
private final class OSimpleMultiValueChangeListener<K, V> implements OMultiValueChangeListener<K, V> {
private final String fieldName;
private OSimpleMultiValueChangeListener(final String fieldName) {
this.fieldName = fieldName;
}
public void onAfterRecordChanged(final OMultiValueChangeEvent<K, V> event) {
if (_status != STATUS.UNMARSHALLING)
setDirty();
if (!(_trackingChanges && _recordId.isValid()) || _status == STATUS.UNMARSHALLING)
return;
if (_fieldOriginalValues != null && _fieldOriginalValues.containsKey(fieldName))
return;
if (_fieldCollectionChangeTimeLines == null)
_fieldCollectionChangeTimeLines = new HashMap<String, OMultiValueChangeTimeLine<String, Object>>();
OMultiValueChangeTimeLine<String, Object> timeLine = _fieldCollectionChangeTimeLines.get(fieldName);
if (timeLine == null) {
timeLine = new OMultiValueChangeTimeLine<String, Object>();
_fieldCollectionChangeTimeLines.put(fieldName, timeLine);
}
timeLine.addCollectionChangeEvent((OMultiValueChangeEvent<String, Object>) event);
}
}
@Override
public void writeExternal(ObjectOutput stream) throws IOException {
final byte[] idBuffer = _recordId.toStream();
stream.writeInt(idBuffer.length);
stream.write(idBuffer);
_recordVersion.getSerializer().writeTo(stream, _recordVersion);
final byte[] content = toStream();
stream.writeInt(content.length);
stream.write(content);
stream.writeBoolean(_dirty);
}
@Override
public void readExternal(ObjectInput stream) throws IOException, ClassNotFoundException {
final byte[] idBuffer = new byte[stream.readInt()];
stream.readFully(idBuffer);
_recordId.fromStream(idBuffer);
_recordVersion.getSerializer().readFrom(stream, _recordVersion);
final int len = stream.readInt();
final byte[] content = new byte[len];
stream.readFully(content);
fromStream(content);
_dirty = stream.readBoolean();
}
/**
* Returns the behavior of field() methods allowing access to the sub documents with dot notation ('.'). Default is true. Set it
* to false if you allow to store properties with the dot.
*/
public boolean isAllowChainedAccess() {
return _allowChainedAccess;
}
/**
* Change the behavior of field() methods allowing access to the sub documents with dot notation ('.'). Default is true. Set it to
* false if you allow to store properties with the dot.
*
* @param _allowChainedAccess
*/
public ODocument setAllowChainedAccess(final boolean _allowChainedAccess) {
this._allowChainedAccess = _allowChainedAccess;
return this;
}
} | 1no label
| core_src_main_java_com_orientechnologies_orient_core_record_impl_ODocument.java |
356 | future.andThen(new ExecutionCallback<Map<String, List<Integer>>>() {
@Override
public void onResponse(Map<String, List<Integer>> response) {
listenerResults.putAll(response);
semaphore.release();
}
@Override
public void onFailure(Throwable t) {
semaphore.release();
}
}); | 0true
| hazelcast-client_src_test_java_com_hazelcast_client_mapreduce_ClientMapReduceTest.java |
5,097 | static final class FreeContextResponseHandler extends EmptyTransportResponseHandler {
private final ESLogger logger;
FreeContextResponseHandler(ESLogger logger) {
super(ThreadPool.Names.SAME);
this.logger = logger;
}
@Override
public void handleException(TransportException exp) {
logger.warn("Failed to send release search context", exp);
}
} | 1no label
| src_main_java_org_elasticsearch_search_action_SearchServiceTransportAction.java |
1,388 | public static class Timestamp {
public static String parseStringTimestamp(String timestampAsString, FormatDateTimeFormatter dateTimeFormatter) throws TimestampParsingException {
long ts;
try {
// if we manage to parse it, its a millisecond timestamp, just return the string as is
ts = Long.parseLong(timestampAsString);
return timestampAsString;
} catch (NumberFormatException e) {
try {
ts = dateTimeFormatter.parser().parseMillis(timestampAsString);
} catch (RuntimeException e1) {
throw new TimestampParsingException(timestampAsString);
}
}
return Long.toString(ts);
}
public static final Timestamp EMPTY = new Timestamp(false, null, TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT);
private final boolean enabled;
private final String path;
private final String format;
private final String[] pathElements;
private final FormatDateTimeFormatter dateTimeFormatter;
public Timestamp(boolean enabled, String path, String format) {
this.enabled = enabled;
this.path = path;
if (path == null) {
pathElements = Strings.EMPTY_ARRAY;
} else {
pathElements = Strings.delimitedListToStringArray(path, ".");
}
this.format = format;
this.dateTimeFormatter = Joda.forPattern(format);
}
public boolean enabled() {
return enabled;
}
public boolean hasPath() {
return path != null;
}
public String path() {
return this.path;
}
public String[] pathElements() {
return this.pathElements;
}
public String format() {
return this.format;
}
public FormatDateTimeFormatter dateTimeFormatter() {
return this.dateTimeFormatter;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Timestamp timestamp = (Timestamp) o;
if (enabled != timestamp.enabled) return false;
if (dateTimeFormatter != null ? !dateTimeFormatter.equals(timestamp.dateTimeFormatter) : timestamp.dateTimeFormatter != null)
return false;
if (format != null ? !format.equals(timestamp.format) : timestamp.format != null) return false;
if (path != null ? !path.equals(timestamp.path) : timestamp.path != null) return false;
if (!Arrays.equals(pathElements, timestamp.pathElements)) return false;
return true;
}
@Override
public int hashCode() {
int result = (enabled ? 1 : 0);
result = 31 * result + (path != null ? path.hashCode() : 0);
result = 31 * result + (format != null ? format.hashCode() : 0);
result = 31 * result + (pathElements != null ? Arrays.hashCode(pathElements) : 0);
result = 31 * result + (dateTimeFormatter != null ? dateTimeFormatter.hashCode() : 0);
return result;
}
} | 1no label
| src_main_java_org_elasticsearch_cluster_metadata_MappingMetaData.java |
5,101 | class SearchFreeContextTransportHandler extends BaseTransportRequestHandler<SearchFreeContextRequest> {
static final String ACTION = "search/freeContext";
@Override
public SearchFreeContextRequest newInstance() {
return new SearchFreeContextRequest();
}
@Override
public void messageReceived(SearchFreeContextRequest request, TransportChannel channel) throws Exception {
searchService.freeContext(request.id());
channel.sendResponse(TransportResponse.Empty.INSTANCE);
}
@Override
public String executor() {
// freeing the context is cheap,
// no need for fork it to another thread
return ThreadPool.Names.SAME;
}
} | 1no label
| src_main_java_org_elasticsearch_search_action_SearchServiceTransportAction.java |
2,107 | public class TryPutOperation extends BasePutOperation {
private boolean successful;
public TryPutOperation(String name, Data dataKey, Data value, long timeout) {
super(name, dataKey, value);
setWaitTimeout(timeout);
}
public TryPutOperation() {
}
public void run() {
successful = recordStore.tryPut(dataKey, dataValue, ttl);
}
public void afterRun() {
if (successful)
super.afterRun();
}
public boolean shouldBackup() {
return successful;
}
public void onWaitExpire() {
getResponseHandler().sendResponse(false);
}
public Object getResponse() {
return successful;
}
@Override
public String toString() {
return "TryPutOperation{" + name + "}";
}
} | 1no label
| hazelcast_src_main_java_com_hazelcast_map_operation_TryPutOperation.java |
508 | public class DayOfWeekType implements Serializable, BroadleafEnumerationType {
private static final long serialVersionUID = 1L;
private static final Map<String, DayOfWeekType> TYPES = new LinkedHashMap<String, DayOfWeekType>();
public static final DayOfWeekType SUNDAY = new DayOfWeekType("1", "Sunday");
public static final DayOfWeekType MONDAY = new DayOfWeekType("2", "Monday");
public static final DayOfWeekType TUESDAY = new DayOfWeekType("3", "Tuesday");
public static final DayOfWeekType WEDNESDAY = new DayOfWeekType("4", "Wednesday");
public static final DayOfWeekType THURSDAY = new DayOfWeekType("5", "Thursday");
public static final DayOfWeekType FRIDAY = new DayOfWeekType("6", "Friday");
public static final DayOfWeekType SATURDAY = new DayOfWeekType("7", "Saturday");
public static DayOfWeekType getInstance(final String type) {
return TYPES.get(type);
}
private String type;
private String friendlyType;
public DayOfWeekType() {
//do nothing
}
public DayOfWeekType(final String type, final String friendlyType) {
this.friendlyType = friendlyType;
setType(type);
}
public String getType() {
return type;
}
public String getFriendlyType() {
return friendlyType;
}
private void setType(final String type) {
this.type = type;
if (!TYPES.containsKey(type)) {
TYPES.put(type, this);
} else {
throw new RuntimeException("Cannot add the type: (" + type + "). It already exists as a type via " + getInstance(type).getClass().getName());
}
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((type == null) ? 0 : type.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
DayOfWeekType other = (DayOfWeekType) obj;
if (type == null) {
if (other.type != null)
return false;
} else if (!type.equals(other.type))
return false;
return true;
}
} | 1no label
| common_src_main_java_org_broadleafcommerce_common_time_DayOfWeekType.java |
3,150 | public class TxnReservePollOperation extends QueueOperation implements WaitSupport {
private long reservedOfferId;
private String transactionId;
public TxnReservePollOperation() {
}
public TxnReservePollOperation(String name, long timeoutMillis, long reservedOfferId, String transactionId) {
super(name, timeoutMillis);
this.reservedOfferId = reservedOfferId;
this.transactionId = transactionId;
}
@Override
public void run() throws Exception {
QueueContainer container = getOrCreateContainer();
response = container.txnPollReserve(reservedOfferId, transactionId);
}
@Override
public WaitNotifyKey getWaitKey() {
QueueContainer container = getOrCreateContainer();
return container.getPollWaitNotifyKey();
}
@Override
public boolean shouldWait() {
return getWaitTimeout() != 0 && getOrCreateContainer().size() == 0;
}
@Override
public void onWaitExpire() {
getResponseHandler().sendResponse(null);
}
@Override
public int getId() {
return QueueDataSerializerHook.TXN_RESERVE_POLL;
}
@Override
protected void writeInternal(ObjectDataOutput out) throws IOException {
super.writeInternal(out);
out.writeLong(reservedOfferId);
out.writeUTF(transactionId);
}
@Override
protected void readInternal(ObjectDataInput in) throws IOException {
super.readInternal(in);
reservedOfferId = in.readLong();
transactionId = in.readUTF();
}
} | 1no label
| hazelcast_src_main_java_com_hazelcast_queue_tx_TxnReservePollOperation.java |
141 | final class GenericClientExceptionConverter implements ClientExceptionConverter {
@Override
public Object convert(Throwable t) {
StringWriter s = new StringWriter();
t.printStackTrace(new PrintWriter(s));
String clazzName = t.getClass().getName();
return new GenericError(clazzName, t.getMessage(), s.toString(), 0);
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_client_GenericClientExceptionConverter.java |
1,539 | public class ResourceAdapterImpl implements ResourceAdapter, Serializable {
/**
* Identity generator
*/
private static final AtomicInteger ID_GEN = new AtomicInteger();
private static final long serialVersionUID = -1727994229521767306L;
/**
* The hazelcast instance itself
*/
private HazelcastInstance hazelcast;
/**
* The configured hazelcast configuration location
*/
private String configurationLocation;
/**
* Identity
*/
private final transient int id;
public ResourceAdapterImpl() {
id = ID_GEN.incrementAndGet();
}
/* (non-Javadoc)
* @see javax.resource.spi.ResourceAdapter
* #endpointActivation(javax.resource.spi.endpoint.MessageEndpointFactory, javax.resource.spi.ActivationSpec)
*/
public void endpointActivation(MessageEndpointFactory endpointFactory, ActivationSpec spec)
throws ResourceException {
}
/* (non-Javadoc)
* @see javax.resource.spi.ResourceAdapter
* #endpointDeactivation(javax.resource.spi.endpoint.MessageEndpointFactory, javax.resource.spi.ActivationSpec)
*/
public void endpointDeactivation(MessageEndpointFactory endpointFactory, ActivationSpec spec) {
}
/* (non-Javadoc)
* @see javax.resource.spi.ResourceAdapter
* #getXAResources(javax.resource.spi.ActivationSpec[])
*/
public XAResource[] getXAResources(ActivationSpec[] specs) throws ResourceException {
//JBoss is fine with null, weblogic requires an empty array
return new XAResource[0];
}
/* (non-Javadoc)
* @see javax.resource.spi.ResourceAdapter#start(javax.resource.spi.BootstrapContext)
*/
public void start(BootstrapContext ctx) throws ResourceAdapterInternalException {
// Gets/creates the hazelcast instance
ConfigBuilder config = buildConfiguration();
setHazelcast(Hazelcast.newHazelcastInstance(config.build()));
;
}
/**
* Creates a hazelcast configuration based on the {@link #getConfigLocation()}
*
* @return the created hazelcast configuration
* @throws ResourceAdapterInternalException If there was a problem with the configuration creation
*/
private ConfigBuilder buildConfiguration()
throws ResourceAdapterInternalException {
XmlConfigBuilder config;
if (configurationLocation == null || configurationLocation.length() == 0) {
config = new XmlConfigBuilder();
} else {
try {
config = new XmlConfigBuilder(configurationLocation);
} catch (FileNotFoundException e) {
throw new ResourceAdapterInternalException(e.getMessage(), e);
}
}
return config;
}
/* (non-Javadoc)
* @see javax.resource.spi.ResourceAdapter#stop()
*/
public void stop() {
if (getHazelcast() != null) {
getHazelcast().getLifecycleService().shutdown();
}
}
/**
* Sets the underlying hazelcast instance
*/
private void setHazelcast(HazelcastInstance hazelcast) {
this.hazelcast = hazelcast;
}
/**
* Provides access to the underlying hazelcast instance
*/
HazelcastInstance getHazelcast() {
return hazelcast;
}
/**
* Called by the container
*
* @param configLocation Hazelcast's configuration location
*/
public void setConfigLocation(String configLocation) {
this.configurationLocation = configLocation;
}
/**
* @return The configured hazelcast configuration location via RAR deployment descriptor
*/
public String getConfigLocation() {
return configurationLocation;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + id;
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
ResourceAdapterImpl other = (ResourceAdapterImpl) obj;
if (id != other.id) {
return false;
}
return true;
}
} | 1no label
| hazelcast-ra_hazelcast-jca_src_main_java_com_hazelcast_jca_ResourceAdapterImpl.java |
4,443 | public class IndicesFieldDataCache extends AbstractComponent implements RemovalListener<IndicesFieldDataCache.Key, AtomicFieldData> {
Cache<Key, AtomicFieldData> cache;
private volatile String size;
private volatile long sizeInBytes;
private volatile TimeValue expire;
@Inject
public IndicesFieldDataCache(Settings settings) {
super(settings);
this.size = componentSettings.get("size", "-1");
this.sizeInBytes = componentSettings.getAsMemory("size", "-1").bytes();
this.expire = componentSettings.getAsTime("expire", null);
buildCache();
}
private void buildCache() {
CacheBuilder<Key, AtomicFieldData> cacheBuilder = CacheBuilder.newBuilder()
.removalListener(this);
if (sizeInBytes > 0) {
cacheBuilder.maximumWeight(sizeInBytes).weigher(new FieldDataWeigher());
}
// defaults to 4, but this is a busy map for all indices, increase it a bit
cacheBuilder.concurrencyLevel(16);
if (expire != null && expire.millis() > 0) {
cacheBuilder.expireAfterAccess(expire.millis(), TimeUnit.MILLISECONDS);
}
logger.debug("using size [{}] [{}], expire [{}]", size, new ByteSizeValue(sizeInBytes), expire);
cache = cacheBuilder.build();
}
public void close() {
cache.invalidateAll();
}
public IndexFieldDataCache buildIndexFieldDataCache(@Nullable IndexService indexService, Index index, FieldMapper.Names fieldNames, FieldDataType fieldDataType) {
return new IndexFieldCache(indexService, index, fieldNames, fieldDataType);
}
@Override
public void onRemoval(RemovalNotification<Key, AtomicFieldData> notification) {
Key key = notification.getKey();
if (key == null || key.listener == null) {
return; // nothing to do here really...
}
IndexFieldCache indexCache = key.indexCache;
long sizeInBytes = key.sizeInBytes;
AtomicFieldData value = notification.getValue();
if (sizeInBytes == -1 && value != null) {
sizeInBytes = value.getMemorySizeInBytes();
}
key.listener.onUnload(indexCache.fieldNames, indexCache.fieldDataType, notification.wasEvicted(), sizeInBytes, value);
}
public static class FieldDataWeigher implements Weigher<Key, AtomicFieldData> {
@Override
public int weigh(Key key, AtomicFieldData fieldData) {
int weight = (int) Math.min(fieldData.getMemorySizeInBytes(), Integer.MAX_VALUE);
return weight == 0 ? 1 : weight;
}
}
/**
* A specific cache instance for the relevant parameters of it (index, fieldNames, fieldType).
*/
class IndexFieldCache implements IndexFieldDataCache, SegmentReader.CoreClosedListener {
@Nullable
private final IndexService indexService;
final Index index;
final FieldMapper.Names fieldNames;
final FieldDataType fieldDataType;
IndexFieldCache(@Nullable IndexService indexService, Index index, FieldMapper.Names fieldNames, FieldDataType fieldDataType) {
this.indexService = indexService;
this.index = index;
this.fieldNames = fieldNames;
this.fieldDataType = fieldDataType;
}
@Override
public <FD extends AtomicFieldData, IFD extends IndexFieldData<FD>> FD load(final AtomicReaderContext context, final IFD indexFieldData) throws Exception {
final Key key = new Key(this, context.reader().getCoreCacheKey());
//noinspection unchecked
return (FD) cache.get(key, new Callable<AtomicFieldData>() {
@Override
public AtomicFieldData call() throws Exception {
SegmentReaderUtils.registerCoreListener(context.reader(), IndexFieldCache.this);
AtomicFieldData fieldData = indexFieldData.loadDirect(context);
if (indexService != null) {
ShardId shardId = ShardUtils.extractShardId(context.reader());
if (shardId != null) {
IndexShard shard = indexService.shard(shardId.id());
if (shard != null) {
key.listener = shard.fieldData();
}
}
}
if (key.listener != null) {
key.listener.onLoad(fieldNames, fieldDataType, fieldData);
}
return fieldData;
}
});
}
@Override
public void onClose(Object coreKey) {
cache.invalidate(new Key(this, coreKey));
}
@Override
public void clear() {
for (Key key : cache.asMap().keySet()) {
if (key.indexCache.index.equals(index)) {
cache.invalidate(key);
}
}
}
@Override
public void clear(String fieldName) {
for (Key key : cache.asMap().keySet()) {
if (key.indexCache.index.equals(index)) {
if (key.indexCache.fieldNames.fullName().equals(fieldName)) {
cache.invalidate(key);
}
}
}
}
@Override
public void clear(Object coreCacheKey) {
cache.invalidate(new Key(this, coreCacheKey));
}
}
public static class Key {
public final IndexFieldCache indexCache;
public final Object readerKey;
@Nullable
public IndexFieldDataCache.Listener listener; // optional stats listener
long sizeInBytes = -1; // optional size in bytes (we keep it here in case the values are soft references)
Key(IndexFieldCache indexCache, Object readerKey) {
this.indexCache = indexCache;
this.readerKey = readerKey;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
Key key = (Key) o;
if (!indexCache.equals(key.indexCache)) return false;
if (!readerKey.equals(key.readerKey)) return false;
return true;
}
@Override
public int hashCode() {
int result = indexCache.hashCode();
result = 31 * result + readerKey.hashCode();
return result;
}
}
} | 1no label
| src_main_java_org_elasticsearch_indices_fielddata_cache_IndicesFieldDataCache.java |
1,469 | public static class Builder {
private ShardId shardId;
private final List<ShardRouting> shards;
private boolean primaryAllocatedPostApi;
public Builder(IndexShardRoutingTable indexShard) {
this.shardId = indexShard.shardId;
this.shards = newArrayList(indexShard.shards);
this.primaryAllocatedPostApi = indexShard.primaryAllocatedPostApi();
}
public Builder(ShardId shardId, boolean primaryAllocatedPostApi) {
this.shardId = shardId;
this.shards = newArrayList();
this.primaryAllocatedPostApi = primaryAllocatedPostApi;
}
public Builder addShard(ImmutableShardRouting shardEntry) {
for (ShardRouting shard : shards) {
// don't add two that map to the same node id
// we rely on the fact that a node does not have primary and backup of the same shard
if (shard.assignedToNode() && shardEntry.assignedToNode()
&& shard.currentNodeId().equals(shardEntry.currentNodeId())) {
return this;
}
}
shards.add(shardEntry);
return this;
}
public Builder removeShard(ShardRouting shardEntry) {
shards.remove(shardEntry);
return this;
}
public IndexShardRoutingTable build() {
// we can automatically set allocatedPostApi to true if the primary is active
if (!primaryAllocatedPostApi) {
for (ShardRouting shardRouting : shards) {
if (shardRouting.primary() && shardRouting.active()) {
primaryAllocatedPostApi = true;
}
}
}
return new IndexShardRoutingTable(shardId, ImmutableList.copyOf(shards), primaryAllocatedPostApi);
}
public static IndexShardRoutingTable readFrom(StreamInput in) throws IOException {
String index = in.readString();
return readFromThin(in, index);
}
public static IndexShardRoutingTable readFromThin(StreamInput in, String index) throws IOException {
int iShardId = in.readVInt();
boolean allocatedPostApi = in.readBoolean();
Builder builder = new Builder(new ShardId(index, iShardId), allocatedPostApi);
int size = in.readVInt();
for (int i = 0; i < size; i++) {
ImmutableShardRouting shard = ImmutableShardRouting.readShardRoutingEntry(in, index, iShardId);
builder.addShard(shard);
}
return builder.build();
}
public static void writeTo(IndexShardRoutingTable indexShard, StreamOutput out) throws IOException {
out.writeString(indexShard.shardId().index().name());
writeToThin(indexShard, out);
}
public static void writeToThin(IndexShardRoutingTable indexShard, StreamOutput out) throws IOException {
out.writeVInt(indexShard.shardId.id());
out.writeBoolean(indexShard.primaryAllocatedPostApi());
out.writeVInt(indexShard.shards.size());
for (ShardRouting entry : indexShard) {
entry.writeToThin(out);
}
}
} | 1no label
| src_main_java_org_elasticsearch_cluster_routing_IndexShardRoutingTable.java |
287 | public class OScriptManager {
protected final String DEF_LANGUAGE = "javascript";
protected ScriptEngineManager scriptEngineManager;
protected Map<String, ScriptEngineFactory> engines = new HashMap<String, ScriptEngineFactory>();
protected Map<String, ScriptEngine> sharedEngines = new HashMap<String, ScriptEngine>();
protected String defaultLanguage = DEF_LANGUAGE;
protected Map<String, OScriptFormatter> formatters = new HashMap<String, OScriptFormatter>();
protected List<OScriptInjection> injections = new ArrayList<OScriptInjection>();
protected static final Object[] EMPTY_PARAMS = new Object[] {};
protected static final int LINES_AROUND_ERROR = 5;
public OScriptManager() {
scriptEngineManager = new ScriptEngineManager();
registerSharedEngine(OSQLScriptEngine.NAME, new OSQLScriptEngineFactory().getScriptEngine());
for (ScriptEngineFactory f : scriptEngineManager.getEngineFactories()) {
if (f.getParameter("THREADING") != null)
// MULTI-THREAD: CACHE IT AS SHARED
registerSharedEngine(f.getLanguageName().toLowerCase(), f.getScriptEngine());
else
registerEngine(f.getLanguageName().toLowerCase(), f);
if (defaultLanguage == null)
defaultLanguage = f.getLanguageName();
}
if (!existsEngine(DEF_LANGUAGE)) {
final ScriptEngine defEngine = scriptEngineManager.getEngineByName(DEF_LANGUAGE);
if (defEngine == null) {
OLogManager.instance().warn(this, "Cannot find default script language for %s", DEF_LANGUAGE);
} else {
// GET DIRECTLY THE LANGUAGE BY NAME (DON'T KNOW WHY SOMETIMES DOESN'T RETURN IT WITH getEngineFactories() ABOVE!
registerEngine(DEF_LANGUAGE, defEngine.getFactory());
defaultLanguage = DEF_LANGUAGE;
}
}
registerFormatter(OSQLScriptEngine.NAME, new OSQLScriptFormatter());
registerFormatter(DEF_LANGUAGE, new OJSScriptFormatter());
registerFormatter("ruby", new ORubyScriptFormatter());
}
public String getFunctionDefinition(final OFunction iFunction) {
final OScriptFormatter formatter = formatters.get(iFunction.getLanguage().toLowerCase());
if (formatter == null)
throw new IllegalArgumentException("Cannot find script formatter for the language '" + iFunction.getLanguage() + "'");
return formatter.getFunctionDefinition(iFunction);
}
public String getFunctionInvoke(final OFunction iFunction, final Object[] iArgs) {
final OScriptFormatter formatter = formatters.get(iFunction.getLanguage().toLowerCase());
if (formatter == null)
throw new IllegalArgumentException("Cannot find script formatter for the language '" + iFunction.getLanguage() + "'");
return formatter.getFunctionInvoke(iFunction, iArgs);
}
/**
* Format the library of functions for a language.
*
* @param db
* Current database instance
* @param iLanguage
* Language as filter
* @return String containing all the functions
*/
public String getLibrary(final ODatabaseComplex<?> db, final String iLanguage) {
if (db == null)
// NO DB = NO LIBRARY
return null;
final StringBuilder code = new StringBuilder();
final Set<String> functions = db.getMetadata().getFunctionLibrary().getFunctionNames();
for (String fName : functions) {
final OFunction f = db.getMetadata().getFunctionLibrary().getFunction(fName);
if (f.getLanguage() == null)
throw new OConfigurationException("Database function '" + fName + "' has no language");
if (f.getLanguage().equalsIgnoreCase(iLanguage)) {
final String def = getFunctionDefinition(f);
if (def != null) {
code.append(def);
code.append("\n");
}
}
}
return code.length() == 0 ? null : code.toString();
}
public boolean existsEngine(String iLanguage) {
if (iLanguage == null)
return false;
iLanguage = iLanguage.toLowerCase();
return sharedEngines.containsKey(iLanguage) || engines.containsKey(iLanguage);
}
public ScriptEngine getEngine(final String iLanguage) {
if (iLanguage == null)
throw new OCommandScriptException("No language was specified");
final String lang = iLanguage.toLowerCase();
ScriptEngine scriptEngine = sharedEngines.get(lang);
if (scriptEngine == null) {
final ScriptEngineFactory scriptEngineFactory = engines.get(lang);
if (scriptEngineFactory == null)
throw new OCommandScriptException("Unsupported language: " + iLanguage + ". Supported languages are: "
+ getSupportedLanguages());
scriptEngine = scriptEngineFactory.getScriptEngine();
}
return scriptEngine;
}
public Iterable<String> getSupportedLanguages() {
final HashSet<String> result = new HashSet<String>();
result.addAll(sharedEngines.keySet());
result.addAll(engines.keySet());
return result;
}
public Bindings bind(final Bindings binding, final ODatabaseRecordTx db, final OCommandContext iContext,
final Map<Object, Object> iArgs) {
if (db != null) {
// BIND FIXED VARIABLES
binding.put("db", new OScriptDocumentDatabaseWrapper(db));
binding.put("gdb", new OScriptGraphDatabaseWrapper(db));
binding.put("orient", new OScriptOrientWrapper(db));
}
binding.put("util", new OFunctionUtilWrapper(null));
for (OScriptInjection i : injections)
i.bind(binding);
// BIND CONTEXT VARIABLE INTO THE SCRIPT
if (iContext != null) {
binding.put("ctx", iContext);
for (Entry<String, Object> a : iContext.getVariables().entrySet())
binding.put(a.getKey(), a.getValue());
}
// BIND PARAMETERS INTO THE SCRIPT
if (iArgs != null) {
for (Entry<Object, Object> a : iArgs.entrySet())
binding.put(a.getKey().toString(), a.getValue());
binding.put("params", iArgs.values().toArray());
} else
binding.put("params", EMPTY_PARAMS);
return binding;
}
public String getErrorMessage(final ScriptException e, final String lib) {
int errorLineNumber = e.getLineNumber();
if (errorLineNumber <= 0) {
// FIX TO RHINO: SOMETIMES HAS THE LINE NUMBER INSIDE THE TEXT :-(
final String excMessage = e.toString();
final int pos = excMessage.indexOf("<Unknown Source>#");
if (pos > -1) {
final int end = excMessage.indexOf(')', pos + "<Unknown Source>#".length());
String lineNumberAsString = excMessage.substring(pos + "<Unknown Source>#".length(), end);
errorLineNumber = Integer.parseInt(lineNumberAsString);
}
}
if (errorLineNumber <= 0) {
throw new OCommandScriptException("Error on evaluation of the script library. Error: " + e.getMessage()
+ "\nScript library was:\n" + lib);
} else {
final StringBuilder code = new StringBuilder();
final Scanner scanner = new Scanner(lib);
try {
scanner.useDelimiter("\n");
String currentLine = null;
String lastFunctionName = "unknown";
for (int currentLineNumber = 1; scanner.hasNext(); currentLineNumber++) {
currentLine = scanner.next();
int pos = currentLine.indexOf("function");
if (pos > -1) {
final String[] words = OStringParser.getWords(currentLine.substring(pos + "function".length() + 1), " \r\n\t");
if (words.length > 0 && words[0] != "(")
lastFunctionName = words[0];
}
if (currentLineNumber == errorLineNumber)
// APPEND X LINES BEFORE
code.append(String.format("%4d: >>> %s\n", currentLineNumber, currentLine));
else if (Math.abs(currentLineNumber - errorLineNumber) <= LINES_AROUND_ERROR)
// AROUND: APPEND IT
code.append(String.format("%4d: %s\n", currentLineNumber, currentLine));
}
code.insert(0, String.format("ScriptManager: error %s.\nFunction %s:\n\n", e.getMessage(), lastFunctionName));
} finally {
scanner.close();
}
throw new OCommandScriptException(code.toString());
}
}
/**
* Unbinds variables
*
* @param binding
*/
public void unbind(Bindings binding) {
for (OScriptInjection i : injections)
i.unbind(binding);
}
public void registerInjection(final OScriptInjection iInj) {
if (!injections.contains(iInj))
injections.add(iInj);
}
public void unregisterInjection(final OScriptInjection iInj) {
injections.remove(iInj);
}
public List<OScriptInjection> getInjections() {
return injections;
}
public OScriptManager registerEngine(final String iLanguage, final ScriptEngineFactory iEngine) {
engines.put(iLanguage, iEngine);
return this;
}
/**
* Registers multi-thread engines can be cached and shared between threads.
*
* @param iLanguage
* Language name
* @param iEngine
* Engine instance
*/
public OScriptManager registerSharedEngine(final String iLanguage, final ScriptEngine iEngine) {
sharedEngines.put(iLanguage.toLowerCase(), iEngine);
return this;
}
public OScriptManager registerFormatter(final String iLanguage, final OScriptFormatter iFormatterImpl) {
formatters.put(iLanguage.toLowerCase(), iFormatterImpl);
return this;
}
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_command_script_OScriptManager.java |
4,481 | public class RecoveryStatus {
public static enum Stage {
INIT,
INDEX,
TRANSLOG,
FINALIZE,
DONE
}
final ShardId shardId;
final long recoveryId;
final InternalIndexShard indexShard;
public RecoveryStatus(long recoveryId, InternalIndexShard indexShard) {
this.recoveryId = recoveryId;
this.indexShard = indexShard;
this.shardId = indexShard.shardId();
}
volatile Thread recoveryThread;
private volatile boolean canceled;
volatile boolean sentCanceledToSource;
private volatile ConcurrentMap<String, IndexOutput> openIndexOutputs = ConcurrentCollections.newConcurrentMap();
ConcurrentMap<String, String> checksums = ConcurrentCollections.newConcurrentMap();
final long startTime = System.currentTimeMillis();
long time;
List<String> phase1FileNames;
List<Long> phase1FileSizes;
List<String> phase1ExistingFileNames;
List<Long> phase1ExistingFileSizes;
long phase1TotalSize;
long phase1ExistingTotalSize;
volatile Stage stage = Stage.INIT;
volatile long currentTranslogOperations = 0;
AtomicLong currentFilesSize = new AtomicLong();
public long startTime() {
return startTime;
}
public long time() {
return this.time;
}
public long phase1TotalSize() {
return phase1TotalSize;
}
public long phase1ExistingTotalSize() {
return phase1ExistingTotalSize;
}
public Stage stage() {
return stage;
}
public long currentTranslogOperations() {
return currentTranslogOperations;
}
public long currentFilesSize() {
return currentFilesSize.get();
}
public boolean isCanceled() {
return canceled;
}
public synchronized void cancel() {
canceled = true;
}
public IndexOutput getOpenIndexOutput(String key) {
final ConcurrentMap<String, IndexOutput> outputs = openIndexOutputs;
if (canceled || outputs == null) {
return null;
}
return outputs.get(key);
}
public synchronized Set<Entry<String, IndexOutput>> cancleAndClearOpenIndexInputs() {
cancel();
final ConcurrentMap<String, IndexOutput> outputs = openIndexOutputs;
openIndexOutputs = null;
if (outputs == null) {
return null;
}
Set<Entry<String, IndexOutput>> entrySet = outputs.entrySet();
return entrySet;
}
public IndexOutput removeOpenIndexOutputs(String name) {
final ConcurrentMap<String, IndexOutput> outputs = openIndexOutputs;
if (outputs == null) {
return null;
}
return outputs.remove(name);
}
public synchronized IndexOutput openAndPutIndexOutput(String key, String name, Store store) throws IOException {
if (isCanceled()) {
return null;
}
final ConcurrentMap<String, IndexOutput> outputs = openIndexOutputs;
IndexOutput indexOutput = store.createOutputRaw(name);
outputs.put(key, indexOutput);
return indexOutput;
}
} | 1no label
| src_main_java_org_elasticsearch_indices_recovery_RecoveryStatus.java |
322 | static class SimpleMapStore implements MapStore<String, String>, MapLoader<String, String> {
public static final int MAX_KEYS = 30;
public static final int DELAY_SECONDS_PER_KEY = 1;
@Override
public String load(String key) {
sleepSeconds(DELAY_SECONDS_PER_KEY);
return key + "value";
}
@Override
public Map<String, String> loadAll(Collection<String> keys) {
Map<String, String> map = new HashMap<String, String>();
for (String key : keys) {
map.put(key, load(key));
}
return map;
}
@Override
public Set<String> loadAllKeys() {
Set<String> keys = new HashSet<String>();
for (int k = 0; k < MAX_KEYS; k++) { keys.add("key" + k); }
return keys;
}
@Override
public void delete(String key) {
sleepSeconds(DELAY_SECONDS_PER_KEY);
}
@Override
public void deleteAll(Collection<String> keys) {
for (String key : keys) {
delete(key);
}
}
@Override
public void store(String key, String value) {
sleepSeconds(DELAY_SECONDS_PER_KEY);
}
@Override
public void storeAll(Map<String, String> entries) {
for (Map.Entry<String, String> e : entries.entrySet()) {
store(e.getKey(), e.getValue());
}
}
} | 0true
| hazelcast-client_src_test_java_com_hazelcast_client_map_ClientMapStoreTest.java |
41 | public class ClusterLeaveReelectionListener
extends ClusterListener.Adapter
{
private final Election election;
private final StringLogger logger;
public ClusterLeaveReelectionListener( Election election, StringLogger logger )
{
this.election = election;
this.logger = logger;
}
@Override
public void leftCluster( InstanceId member )
{
logger.warn( "Demoting member " + member + " because it left the cluster" );
// Suggest reelection for all roles of this node
election.demote( member );
}
} | 1no label
| enterprise_cluster_src_main_java_org_neo4j_cluster_protocol_election_ClusterLeaveReelectionListener.java |
45 | public class TouchCommandParser extends TypeAwareCommandParser {
public TouchCommandParser(TextCommandType type) {
super(type);
}
public TextCommand parser(SocketTextReader socketTextReader, String cmd, int space) {
StringTokenizer st = new StringTokenizer(cmd);
st.nextToken();
String key = null;
int expiration = 0;
boolean noReply = false;
if (st.hasMoreTokens()) {
key = st.nextToken();
} else {
return new ErrorCommand(ERROR_CLIENT);
}
if (st.hasMoreTokens()) {
expiration = Integer.parseInt(st.nextToken());
} else {
return new ErrorCommand(ERROR_CLIENT);
}
if (st.hasMoreTokens()) {
noReply = "noreply".equals(st.nextToken());
}
return new TouchCommand(type, key, expiration, noReply);
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_ascii_memcache_TouchCommandParser.java |
96 | @RunWith(HazelcastSerialClassRunner.class)
@Category(QuickTest.class)
public class ClientIssueTest extends HazelcastTestSupport {
@After
@Before
public void cleanup() throws Exception {
HazelcastClient.shutdownAll();
Hazelcast.shutdownAll();
}
@Test
public void testInitialMemberListener() throws InterruptedException {
final HazelcastInstance instance1 = Hazelcast.newHazelcastInstance();
final HazelcastInstance instance2 = Hazelcast.newHazelcastInstance();
final ClientConfig clientConfig = new ClientConfig();
final CountDownLatch latch1 = new CountDownLatch(1);
clientConfig.addListenerConfig(new ListenerConfig().setImplementation(new StaticMemberListener(latch1)));
final HazelcastInstance client = HazelcastClient.newHazelcastClient(clientConfig);
assertTrue("Before starting", latch1.await(5, TimeUnit.SECONDS));
final CountDownLatch latch2 = new CountDownLatch(1);
client.getCluster().addMembershipListener(new StaticMemberListener(latch2));
assertTrue("After starting", latch2.await(5, TimeUnit.SECONDS));
}
static class StaticMemberListener implements MembershipListener, InitialMembershipListener {
final CountDownLatch latch;
StaticMemberListener(CountDownLatch latch) {
this.latch = latch;
}
public void init(InitialMembershipEvent event) {
latch.countDown();
}
public void memberAdded(MembershipEvent membershipEvent) {
}
public void memberRemoved(MembershipEvent membershipEvent) {
}
public void memberAttributeChanged(MemberAttributeEvent memberAttributeEvent) {
}
}
@Test
public void testClientPortConnection() {
final Config config1 = new Config();
config1.getGroupConfig().setName("foo");
config1.getNetworkConfig().setPort(5701);
final HazelcastInstance instance1 = Hazelcast.newHazelcastInstance(config1);
instance1.getMap("map").put("key", "value");
final Config config2 = new Config();
config2.getGroupConfig().setName("bar");
config2.getNetworkConfig().setPort(5702);
final HazelcastInstance instance2 = Hazelcast.newHazelcastInstance(config2);
final ClientConfig clientConfig = new ClientConfig();
clientConfig.getGroupConfig().setName("bar");
final HazelcastInstance client = HazelcastClient.newHazelcastClient(clientConfig);
final IMap<Object, Object> map = client.getMap("map");
assertNull(map.put("key", "value"));
assertEquals(1, map.size());
}
/**
* Test for issues #267 and #493
*/
@Test
public void testIssue493() throws Exception {
final HazelcastInstance hz1 = Hazelcast.newHazelcastInstance();
final HazelcastInstance hz2 = Hazelcast.newHazelcastInstance();
ClientConfig clientConfig = new ClientConfig();
clientConfig.setRedoOperation(true);
HazelcastInstance client = HazelcastClient.newHazelcastClient(clientConfig);
final ILock lock = client.getLock("lock");
for (int k = 0; k < 10; k++) {
lock.lock();
try {
Thread.sleep(100);
} finally {
lock.unlock();
}
}
lock.lock();
hz1.shutdown();
lock.unlock();
}
@Test
@Category(ProblematicTest.class)
public void testOperationRedo() throws Exception {
final HazelcastInstance hz1 = Hazelcast.newHazelcastInstance();
final HazelcastInstance hz2 = Hazelcast.newHazelcastInstance();
ClientConfig clientConfig = new ClientConfig();
clientConfig.setRedoOperation(true);
HazelcastInstance client = HazelcastClient.newHazelcastClient(clientConfig);
final Thread thread = new Thread() {
public void run() {
try {
Thread.sleep(100);
} catch (InterruptedException e) {
e.printStackTrace();
}
hz1.getLifecycleService().terminate();
}
};
final IMap map = client.getMap("m");
thread.start();
int expected = 1000;
for (int i = 0; i < expected; i++) {
map.put(i, "item" + i);
}
thread.join();
assertEquals(expected, map.size());
}
@Test
public void testOperationRedo_smartRoutingDisabled() throws Exception {
final HazelcastInstance hz1 = Hazelcast.newHazelcastInstance();
final HazelcastInstance hz2 = Hazelcast.newHazelcastInstance();
ClientConfig clientConfig = new ClientConfig();
clientConfig.setRedoOperation(true);
clientConfig.setSmartRouting(false);
HazelcastInstance client = HazelcastClient.newHazelcastClient(clientConfig);
final Thread thread = new Thread() {
public void run() {
try {
Thread.sleep(10);
} catch (InterruptedException e) {
e.printStackTrace();
}
hz1.getLifecycleService().terminate();
}
};
final IMap map = client.getMap("m");
thread.start();
int expected = 1000;
for (int i = 0; i < expected; i++) {
map.put(i, i);
}
thread.join();
assertEquals(expected, map.size());
}
@Test
public void testGetDistributedObjectsIssue678() {
final HazelcastInstance hz = Hazelcast.newHazelcastInstance();
hz.getQueue("queue");
hz.getMap("map");
hz.getSemaphore("s");
final HazelcastInstance instance = HazelcastClient.newHazelcastClient();
final Collection<DistributedObject> distributedObjects = instance.getDistributedObjects();
assertEquals(3, distributedObjects.size());
}
@Test
public void testMapDestroyIssue764() throws Exception {
final HazelcastInstance instance = Hazelcast.newHazelcastInstance();
HazelcastInstance client = HazelcastClient.newHazelcastClient();
assertEquals(0, client.getDistributedObjects().size());
IMap map = client.getMap("m");
assertEquals(1, client.getDistributedObjects().size());
map.destroy();
assertEquals(0, instance.getDistributedObjects().size());
assertEquals(0, client.getDistributedObjects().size());
}
/**
* Client hangs at map.get after shutdown
*/
@Test
public void testIssue821() {
final HazelcastInstance instance = Hazelcast.newHazelcastInstance();
final HazelcastInstance client = HazelcastClient.newHazelcastClient();
final IMap<Object, Object> map = client.getMap("default");
map.put("key1", "value1");
instance.shutdown();
try {
map.get("key1");
fail();
} catch (HazelcastException ignored) {
}
assertFalse(instance.getLifecycleService().isRunning());
}
@Test
public void testClientConnectionEvents() throws InterruptedException {
final LinkedList<LifecycleState> list = new LinkedList<LifecycleState>();
list.offer(LifecycleState.STARTING);
list.offer(LifecycleState.STARTED);
list.offer(LifecycleState.CLIENT_CONNECTED);
list.offer(LifecycleState.CLIENT_DISCONNECTED);
list.offer(LifecycleState.CLIENT_CONNECTED);
final HazelcastInstance instance = Hazelcast.newHazelcastInstance();
final CountDownLatch latch = new CountDownLatch(list.size());
LifecycleListener listener = new LifecycleListener() {
public void stateChanged(LifecycleEvent event) {
final LifecycleState state = list.poll();
if (state != null && state.equals(event.getState())) {
latch.countDown();
}
}
};
final ListenerConfig listenerConfig = new ListenerConfig(listener);
final ClientConfig clientConfig = new ClientConfig();
clientConfig.addListenerConfig(listenerConfig);
clientConfig.getNetworkConfig().setConnectionAttemptLimit(100);
final HazelcastInstance client = HazelcastClient.newHazelcastClient(clientConfig);
Thread.sleep(100);
instance.shutdown();
Thread.sleep(800);
Hazelcast.newHazelcastInstance();
assertTrue(latch.await(10, TimeUnit.SECONDS));
}
/**
* add membership listener
*/
@Test
public void testIssue1181() throws InterruptedException {
final CountDownLatch latch = new CountDownLatch(1);
Hazelcast.newHazelcastInstance();
final ClientConfig clientConfig = new ClientConfig();
clientConfig.addListenerConfig(new ListenerConfig().setImplementation(new InitialMembershipListener() {
public void init(InitialMembershipEvent event) {
for (int i = 0; i < event.getMembers().size(); i++) {
latch.countDown();
}
}
public void memberAdded(MembershipEvent membershipEvent) {
}
public void memberRemoved(MembershipEvent membershipEvent) {
}
public void memberAttributeChanged(MemberAttributeEvent memberAttributeEvent) {
}
}));
HazelcastClient.newHazelcastClient(clientConfig);
assertTrue(latch.await(10, TimeUnit.SECONDS));
}
@Test
public void testInterceptor() throws InterruptedException {
final HazelcastInstance instance = Hazelcast.newHazelcastInstance();
final HazelcastInstance client = HazelcastClient.newHazelcastClient();
final IMap<Object, Object> map = client.getMap("map");
final MapInterceptorImpl interceptor = new MapInterceptorImpl();
final String id = map.addInterceptor(interceptor);
assertNotNull(id);
map.put("key1", "value");
assertEquals("value", map.get("key1"));
map.put("key1", "value1");
assertEquals("getIntercepted", map.get("key1"));
assertFalse(map.replace("key1", "getIntercepted", "val"));
assertTrue(map.replace("key1", "value1", "val"));
assertEquals("val", map.get("key1"));
map.put("key2", "oldValue");
assertEquals("oldValue", map.get("key2"));
map.put("key2", "newValue");
assertEquals("putIntercepted", map.get("key2"));
map.put("key3", "value2");
assertEquals("value2", map.get("key3"));
assertEquals("removeIntercepted", map.remove("key3"));
}
static class MapInterceptorImpl implements MapInterceptor {
MapInterceptorImpl() {
}
public Object interceptGet(Object value) {
if ("value1".equals(value)) {
return "getIntercepted";
}
return null;
}
public void afterGet(Object value) {
}
public Object interceptPut(Object oldValue, Object newValue) {
if ("oldValue".equals(oldValue) && "newValue".equals(newValue)) {
return "putIntercepted";
}
return null;
}
public void afterPut(Object value) {
}
public Object interceptRemove(Object removedValue) {
if ("value2".equals(removedValue)) {
return "removeIntercepted";
}
return null;
}
public void afterRemove(Object value) {
}
}
@Test
public void testClientPortableWithoutRegisteringToNode() {
Hazelcast.newHazelcastInstance();
final SerializationConfig serializationConfig = new SerializationConfig();
serializationConfig.addPortableFactory(5, new PortableFactory() {
public Portable create(int classId) {
return new SamplePortable();
}
});
final ClientConfig clientConfig = new ClientConfig();
clientConfig.setSerializationConfig(serializationConfig);
final HazelcastInstance client = HazelcastClient.newHazelcastClient(clientConfig);
final IMap<Integer, SamplePortable> sampleMap = client.getMap(randomString());
sampleMap.put(1, new SamplePortable(666));
final SamplePortable samplePortable = sampleMap.get(1);
assertEquals(666, samplePortable.a);
}
@Test
public void testCredentials() {
final Config config = new Config();
config.getGroupConfig().setName("foo").setPassword("bar");
final HazelcastInstance instance = Hazelcast.newHazelcastInstance(config);
final ClientConfig clientConfig = new ClientConfig();
final ClientSecurityConfig securityConfig = clientConfig.getSecurityConfig();
securityConfig.setCredentialsClassname(MyCredentials.class.getName());
final HazelcastInstance client = HazelcastClient.newHazelcastClient(clientConfig);
}
public static class MyCredentials extends UsernamePasswordCredentials {
public MyCredentials() {
super("foo", "bar");
}
}
public void testListenerReconnect() throws InterruptedException {
final HazelcastInstance instance1 = Hazelcast.newHazelcastInstance();
final HazelcastInstance client = HazelcastClient.newHazelcastClient();
final CountDownLatch latch = new CountDownLatch(2);
final IMap<Object, Object> m = client.getMap("m");
final String id = m.addEntryListener(new EntryAdapter() {
public void entryAdded(EntryEvent event) {
latch.countDown();
}
@Override
public void entryUpdated(EntryEvent event) {
latch.countDown();
}
}, true);
m.put("key1", "value1");
final HazelcastInstance instance2 = Hazelcast.newHazelcastInstance();
instance1.shutdown();
final Thread thread = new Thread() {
@Override
public void run() {
while (!isInterrupted()) {
m.put("key2", "value2");
try {
Thread.sleep(100);
} catch (InterruptedException ignored) {
}
}
}
};
thread.start();
assertTrueEventually(new AssertTask() {
public void run() {
try {
assertTrue(latch.await(10, TimeUnit.SECONDS));
} catch (InterruptedException e) {
e.printStackTrace();
}
}
});
thread.interrupt();
assertTrue(m.removeEntryListener(id));
assertFalse(m.removeEntryListener("foo"));
}
static class SamplePortable implements Portable {
public int a;
public SamplePortable(int a) {
this.a = a;
}
public SamplePortable() {
}
public int getFactoryId() {
return 5;
}
public int getClassId() {
return 6;
}
public void writePortable(PortableWriter writer) throws IOException {
writer.writeInt("a", a);
}
public void readPortable(PortableReader reader) throws IOException {
a = reader.readInt("a");
}
}
@Test
public void testNearCache_WhenRegisteredNodeIsDead() {
final HazelcastInstance instance = Hazelcast.newHazelcastInstance();
final ClientConfig clientConfig = new ClientConfig();
final String mapName = randomMapName();
NearCacheConfig nearCacheConfig = new NearCacheConfig();
nearCacheConfig.setName(mapName);
nearCacheConfig.setInvalidateOnChange(true);
clientConfig.addNearCacheConfig(nearCacheConfig);
final HazelcastInstance client = HazelcastClient.newHazelcastClient(clientConfig);
final IMap<Object, Object> map = client.getMap(mapName);
map.put("a", "b");
map.get("a"); //put to nearCache
instance.shutdown();
Hazelcast.newHazelcastInstance();
assertEquals(null, map.get("a"));
}
} | 0true
| hazelcast-client_src_test_java_com_hazelcast_client_ClientIssueTest.java |
523 | public class TypesExistsResponse extends ActionResponse {
private boolean exists;
TypesExistsResponse() {
}
public TypesExistsResponse(boolean exists) {
this.exists = exists;
}
public boolean isExists() {
return this.exists;
}
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
exists = in.readBoolean();
}
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeBoolean(exists);
}
} | 0true
| src_main_java_org_elasticsearch_action_admin_indices_exists_types_TypesExistsResponse.java |
16 | @Scope("prototype")
@Component("blSkuFieldsPersistenceProvider")
public class SkuFieldsPersistenceProvider extends FieldPersistenceProviderAdapter {
@Override
public int getOrder() {
return SkuPricingPersistenceProvider.ORDER + 1;
}
@Override
public FieldProviderResponse extractValue(ExtractValueRequest extractValueRequest, Property property) {
if (!canHandleExtraction(extractValueRequest, property)) {
return FieldProviderResponse.NOT_HANDLED;
}
Object actualValue = extractValueRequest.getRequestedValue();
String value = extractValueRequest.getRecordHelper().formatValue(actualValue);
String displayValue = value;
if (displayValue == null) {
try {
displayValue = extractValueRequest.getRecordHelper().getStringValueFromGetter(extractValueRequest.getEntity(), property.getName());
((BasicFieldMetadata)property.getMetadata()).setDerived(true);
} catch (Exception e) {
//swallow all exceptions because null is fine for the display value
}
}
property.setValue(value);
property.setDisplayValue(displayValue);
return FieldProviderResponse.HANDLED;
}
protected boolean canHandleExtraction(ExtractValueRequest extractValueRequest, Property property) {
return (
extractValueRequest.getMetadata().getTargetClass().equals(SkuImpl.class.getName()) ||
extractValueRequest.getMetadata().getTargetClass().equals(Sku.class.getName())
)
&& !property.getName().contains(FieldManager.MAPFIELDSEPARATOR);
}
} | 0true
| admin_broadleaf-admin-module_src_main_java_org_broadleafcommerce_admin_server_service_persistence_module_provider_SkuFieldsPersistenceProvider.java |
133 | @Test
public class SafeConverterTest extends AbstractConverterTest {
@BeforeClass
public void beforeClass() {
converter = new OSafeBinaryConverter();
}
@Override
public void testPutIntBigEndian() {
super.testPutIntBigEndian();
}
@Override
public void testPutIntLittleEndian() {
super.testPutIntLittleEndian();
}
@Override
public void testPutLongBigEndian() {
super.testPutLongBigEndian();
}
@Override
public void testPutLongLittleEndian() {
super.testPutLongLittleEndian();
}
@Override
public void testPutShortBigEndian() {
super.testPutShortBigEndian();
}
@Override
public void testPutShortLittleEndian() {
super.testPutShortLittleEndian();
}
@Override
public void testPutCharBigEndian() {
super.testPutCharBigEndian();
}
@Override
public void testPutCharLittleEndian() {
super.testPutCharLittleEndian();
}
} | 0true
| commons_src_test_java_com_orientechnologies_common_serialization_SafeConverterTest.java |
254 | public interface EmailTarget extends Serializable {
public String getEmailAddress();
public void setEmailAddress(String emailAddress);
public String[] getCCAddresses();
public void setCCAddresses(String[] ccAddresses);
public String[] getBCCAddresses();
public void setBCCAddresses(String[] BCCAddresses);
} | 0true
| common_src_main_java_org_broadleafcommerce_common_email_domain_EmailTarget.java |
1,604 | public class Property implements Serializable {
private static final long serialVersionUID = 1L;
protected String name;
protected String value;
protected String displayValue;
protected FieldMetadata metadata = new BasicFieldMetadata();
protected boolean isAdvancedCollection = false;
protected Boolean isDirty = false;
protected String unHtmlEncodedValue;
protected String rawValue;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
public FieldMetadata getMetadata() {
return metadata;
}
public void setMetadata(FieldMetadata metadata) {
this.metadata = metadata;
}
public String getDisplayValue() {
return displayValue;
}
public void setDisplayValue(String displayValue) {
this.displayValue = displayValue;
}
public Boolean getIsDirty() {
return isDirty;
}
public void setIsDirty(Boolean isDirty) {
this.isDirty = isDirty;
}
public String getUnHtmlEncodedValue() {
return unHtmlEncodedValue;
}
public void setUnHtmlEncodedValue(String unHtmlEncodedValue) {
this.unHtmlEncodedValue = unHtmlEncodedValue;
}
public String getRawValue() {
return rawValue;
}
public void setRawValue(String rawValue) {
this.rawValue = rawValue;
}
public boolean isAdvancedCollection() {
return isAdvancedCollection;
}
public void setAdvancedCollection(boolean advancedCollection) {
isAdvancedCollection = advancedCollection;
}
@Override
public String toString() {
return getName() + ": " + getValue();
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((metadata == null || metadata instanceof CollectionMetadata || ((BasicFieldMetadata) metadata).getMergedPropertyType() == null) ? 0 : ((BasicFieldMetadata) metadata).getMergedPropertyType().hashCode());
result = prime * result + ((name == null) ? 0 : name.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
Property other = (Property) obj;
if (metadata == null || metadata instanceof CollectionMetadata || ((BasicFieldMetadata) metadata).getMergedPropertyType() == null) {
if (other.metadata != null && other.metadata instanceof BasicFieldMetadata && ((BasicFieldMetadata) other.metadata).getMergedPropertyType() != null)
return false;
} else if (metadata instanceof BasicFieldMetadata && other.metadata instanceof BasicFieldMetadata && !((BasicFieldMetadata) metadata).getMergedPropertyType().equals(((BasicFieldMetadata) other.metadata).getMergedPropertyType()))
return false;
if (name == null) {
if (other.name != null)
return false;
} else if (!name.equals(other.name))
return false;
return true;
}
} | 1no label
| admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_dto_Property.java |
0 | public class BerkeleyStorageSetup extends StorageSetup {
public static ModifiableConfiguration getBerkeleyJEConfiguration(String dir) {
return buildConfiguration()
.set(STORAGE_BACKEND,"berkeleyje")
.set(STORAGE_DIRECTORY, dir);
}
public static ModifiableConfiguration getBerkeleyJEConfiguration() {
return getBerkeleyJEConfiguration(getHomeDir());
}
public static WriteConfiguration getBerkeleyJEGraphConfiguration() {
return getBerkeleyJEConfiguration().getConfiguration();
}
public static ModifiableConfiguration getBerkeleyJEPerformanceConfiguration() {
return getBerkeleyJEConfiguration()
.set(STORAGE_TRANSACTIONAL,false)
.set(TX_CACHE_SIZE,1000);
}
} | 0true
| titan-berkeleyje_src_test_java_com_thinkaurelius_titan_BerkeleyStorageSetup.java |
2,586 | class RejoinClusterRequestHandler extends BaseTransportRequestHandler<RejoinClusterRequest> {
static final String ACTION = "discovery/zen/rejoin";
@Override
public RejoinClusterRequest newInstance() {
return new RejoinClusterRequest();
}
@Override
public void messageReceived(final RejoinClusterRequest request, final TransportChannel channel) throws Exception {
clusterService.submitStateUpdateTask("received a request to rejoin the cluster from [" + request.fromNodeId + "]", Priority.URGENT, new ClusterStateUpdateTask() {
@Override
public ClusterState execute(ClusterState currentState) {
try {
channel.sendResponse(TransportResponse.Empty.INSTANCE);
} catch (Exception e) {
logger.warn("failed to send response on rejoin cluster request handling", e);
}
return rejoin(currentState, "received a request to rejoin the cluster from [" + request.fromNodeId + "]");
}
@Override
public void onFailure(String source, Throwable t) {
logger.error("unexpected failure during [{}]", t, source);
}
});
}
@Override
public String executor() {
return ThreadPool.Names.SAME;
}
} | 1no label
| src_main_java_org_elasticsearch_discovery_zen_ZenDiscovery.java |
421 | return new EventHandler<PortableEntryEvent>() {
public void handle(PortableEntryEvent event) {
V value = null;
V oldValue = null;
if (includeValue) {
value = toObject(event.getValue());
oldValue = toObject(event.getOldValue());
}
K key = toObject(event.getKey());
Member member = getContext().getClusterService().getMember(event.getUuid());
EntryEvent<K, V> entryEvent = new EntryEvent<K, V>(name, member,
event.getEventType().getType(), key, oldValue, value);
switch (event.getEventType()) {
case ADDED:
listener.entryAdded(entryEvent);
break;
case REMOVED:
listener.entryRemoved(entryEvent);
break;
case UPDATED:
listener.entryUpdated(entryEvent);
break;
case EVICTED:
listener.entryEvicted(entryEvent);
break;
default:
throw new IllegalArgumentException("Not a known event type " + event.getEventType());
}
}
@Override
public void onListenerRegister() {
}
}; | 1no label
| hazelcast-client_src_main_java_com_hazelcast_client_proxy_ClientMapProxy.java |
144 | static final class ThreadHashCode extends ThreadLocal<HashCode> {
public HashCode initialValue() { return new HashCode(); }
} | 0true
| src_main_java_jsr166e_Striped64.java |
120 | public final class ClientPrincipal implements Portable {
private String uuid;
private String ownerUuid;
public ClientPrincipal() {
}
public ClientPrincipal(String uuid, String ownerUuid) {
this.uuid = uuid;
this.ownerUuid = ownerUuid;
}
public String getUuid() {
return uuid;
}
public String getOwnerUuid() {
return ownerUuid;
}
@Override
public int getFactoryId() {
return ClientPortableHook.ID;
}
@Override
public int getClassId() {
return ClientPortableHook.PRINCIPAL;
}
@Override
public void writePortable(PortableWriter writer) throws IOException {
writer.writeUTF("uuid", uuid);
writer.writeUTF("ownerUuid", ownerUuid);
}
@Override
public void readPortable(PortableReader reader) throws IOException {
uuid = reader.readUTF("uuid");
ownerUuid = reader.readUTF("ownerUuid");
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
ClientPrincipal that = (ClientPrincipal) o;
if (ownerUuid != null ? !ownerUuid.equals(that.ownerUuid) : that.ownerUuid != null) {
return false;
}
if (uuid != null ? !uuid.equals(that.uuid) : that.uuid != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = uuid != null ? uuid.hashCode() : 0;
result = 31 * result + (ownerUuid != null ? ownerUuid.hashCode() : 0);
return result;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder("ClientPrincipal{");
sb.append("uuid='").append(uuid).append('\'');
sb.append(", ownerUuid='").append(ownerUuid).append('\'');
sb.append('}');
return sb.toString();
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_client_ClientPrincipal.java |
1,272 | public class FaunusSerializer {
// This is volatile to support double-checked locking
private static volatile Serializer standardSerializer;
private final FaunusSchemaManager types;
private final boolean trackState;
private final boolean trackPaths;
private final Configuration configuration;
private static final Logger log =
LoggerFactory.getLogger(FaunusSerializer.class);
public FaunusSerializer(final Configuration configuration) {
Preconditions.checkNotNull(configuration);
this.types = FaunusSchemaManager.getTypeManager(configuration);
this.configuration = configuration;
this.trackState = configuration.get(TitanHadoopConfiguration.PIPELINE_TRACK_STATE);
this.trackPaths = configuration.get(TitanHadoopConfiguration.PIPELINE_TRACK_PATHS);
}
public void writeVertex(final FaunusVertex vertex, final DataOutput out) throws IOException {
//Need to write the id up front for the comparator
WritableUtils.writeVLong(out, vertex.id);
Schema schema = new Schema();
vertex.updateSchema(schema);
schema.writeSchema(out);
writePathElement(vertex, schema, out);
writeEdges(vertex, vertex.inAdjacency, out, Direction.IN, schema);
FaunusVertexLabel vl = (FaunusVertexLabel)vertex.getVertexLabel();
out.writeUTF(vl.isDefault()?"":vl.getName());
}
public void readVertex(final FaunusVertex vertex, final DataInput in) throws IOException {
WritableUtils.readVLong(in);
Schema schema = readSchema(in);
readPathElement(vertex, schema, in);
vertex.inAdjacency = readEdges(vertex, in, Direction.IN, schema);
String labelName = in.readUTF();
vertex.setVertexLabel(StringUtils.isBlank(labelName)?FaunusVertexLabel.DEFAULT_VERTEXLABEL:
types.getVertexLabel(labelName));
}
public void writeEdge(final StandardFaunusEdge edge, final DataOutput out) throws IOException {
writePathElement(edge, out);
WritableUtils.writeVLong(out, edge.inVertex);
WritableUtils.writeVLong(out, edge.outVertex);
writeFaunusType(edge.getType(), out);
}
public void readEdge(final StandardFaunusEdge edge, final DataInput in) throws IOException {
readPathElement(edge, in);
edge.inVertex = WritableUtils.readVLong(in);
edge.outVertex = WritableUtils.readVLong(in);
edge.setLabel((FaunusEdgeLabel)readFaunusType(in));
}
public void writeProperty(final StandardFaunusProperty property, final DataOutput out) throws IOException {
writePathElement(property, out);
WritableUtils.writeVLong(out, property.vertexid);
serializeObject(out,property.getValue());
writeFaunusType(property.getType(), out);
}
public void readProperty(final StandardFaunusProperty property, final DataInput in) throws IOException {
readPathElement(property, in);
property.vertexid = WritableUtils.readVLong(in);
property.value = deserializeObject(in);
property.setKey((FaunusPropertyKey)readFaunusType(in));
}
private void readPathElement(final FaunusPathElement element, final DataInput in) throws IOException {
readPathElement(element, null, in);
}
private void writePathElement(final FaunusPathElement element, final DataOutput out) throws IOException {
writePathElement(element, null, out);
}
private void readPathElement(final FaunusPathElement element, Schema schema, final DataInput in) throws IOException {
readElement(element, schema, in);
if (trackPaths) {
List<List<MicroElement>> paths = readElementPaths(in);
element.tracker = new FaunusPathElement.Tracker(paths,
(element instanceof FaunusVertex) ? new FaunusVertex.MicroVertex(element.id) : new StandardFaunusEdge.MicroEdge(element.id));
log.trace("readPathElement element={} paths={}", element, paths);
} else {
element.pathCounter = WritableUtils.readVLong(in);
element.tracker = FaunusPathElement.DEFAULT_TRACK;
}
}
private void writePathElement(final FaunusPathElement element, final Schema schema, final DataOutput out) throws IOException {
writeElement(element, schema, out);
if (trackPaths)
writeElementPaths(element.tracker.paths, out);
else
WritableUtils.writeVLong(out, element.pathCounter);
}
private void readElement(final FaunusElement element, Schema schema, final DataInput in) throws IOException {
element.id = WritableUtils.readVLong(in);
if (trackState) element.setLifeCycle(in.readByte());
element.outAdjacency = readEdges(element,in,Direction.OUT,schema);
}
private void writeElement(final FaunusElement element, final Schema schema, final DataOutput out) throws IOException {
Preconditions.checkArgument(trackState || !element.isRemoved());
WritableUtils.writeVLong(out, element.id);
if (trackState) out.writeByte(element.getLifeCycle());
writeEdges(element, element.outAdjacency, out, Direction.OUT, schema);
}
private void serializeObject(final DataOutput out, Object value) throws IOException {
final com.thinkaurelius.titan.graphdb.database.serialize.DataOutput o = getStandardSerializer().getDataOutput(40);
o.writeClassAndObject(value);
final StaticBuffer buffer = o.getStaticBuffer();
WritableUtils.writeVInt(out, buffer.length());
out.write(buffer.as(StaticBuffer.ARRAY_FACTORY));
}
private Object deserializeObject(final DataInput in) throws IOException {
int byteLength = WritableUtils.readVInt(in);
byte[] bytes = new byte[byteLength];
in.readFully(bytes);
final ReadBuffer buffer = new ReadArrayBuffer(bytes);
return getStandardSerializer().readClassAndObject(buffer);
}
/**
* Return the StandardSerializer singleton shared between all instances of FaunusSerializer.
*
* If it has not yet been initialized, then the singleton is created using the maximum
* Kryo buffer size configured in the calling FaunusSerializer.
*
* @return
*/
private Serializer getStandardSerializer() {
if (null == standardSerializer) { // N.B. standardSerializer is volatile
synchronized (FaunusSerializer.class) {
if (null == standardSerializer) {
int maxOutputBufSize = configuration.get(KRYO_MAX_OUTPUT_SIZE);
standardSerializer = new StandardSerializer(true, maxOutputBufSize);
}
}
}
// TODO consider checking whether actual output buffer size matches config, create new StandardSerializer if mismatched? Might not be worth it
return standardSerializer;
}
private <T extends FaunusRelation> Iterable<T> filterDeletedRelations(Iterable<T> elements) {
if (trackState) return elements;
else return Iterables.filter(elements, new Predicate<T>() {
@Override
public boolean apply(@Nullable T element) {
return !element.isRemoved();
}
});
}
private SetMultimap<FaunusRelationType, FaunusRelation> readEdges(final FaunusElement element, final DataInput in, final Direction direction, final Schema schema) throws IOException {
final SetMultimap<FaunusRelationType, FaunusRelation> adjacency = HashMultimap.create();
int numTypes = WritableUtils.readVInt(in);
for (int i = 0; i < numTypes; i++) {
FaunusRelationType type;
if (schema == null) type = readFaunusType(in);
else type = schema.getType(WritableUtils.readVLong(in));
final int size = WritableUtils.readVInt(in);
for (int j = 0; j < size; j++) {
FaunusRelation relation;
if (element instanceof FaunusVertex) {
if (type.isEdgeLabel()) {
final StandardFaunusEdge edge = new StandardFaunusEdge(configuration);
edge.setLabel((FaunusEdgeLabel)type);
readPathElement(edge, schema, in);
long otherId = WritableUtils.readVLong(in);
switch (direction) {
case IN:
edge.inVertex = element.getLongId();
edge.outVertex = otherId;
break;
case OUT:
edge.outVertex = element.getLongId();
edge.inVertex = otherId;
break;
default:
throw ExceptionFactory.bothIsNotSupported();
}
relation = edge;
log.trace("readEdges edge={} paths={}", edge, edge.tracker.paths);
} else {
assert type.isPropertyKey() && direction==Direction.OUT;
final StandardFaunusProperty property = new StandardFaunusProperty(configuration);
property.setKey((FaunusPropertyKey) type);
readPathElement(property, schema, in);
property.value = deserializeObject(in);
relation = property;
}
} else {
byte lifecycle = trackState?in.readByte():-1;
if (type.isEdgeLabel()) {
relation = new SimpleFaunusEdge((FaunusEdgeLabel)type,new FaunusVertex(configuration,WritableUtils.readVLong(in)));
} else {
assert type.isPropertyKey() && direction==Direction.OUT;
relation = new SimpleFaunusProperty((FaunusPropertyKey)type,deserializeObject(in));
}
if (trackState) relation.setLifeCycle(lifecycle);
}
adjacency.put(type, relation);
}
}
if (adjacency.isEmpty()) return FaunusElement.EMPTY_ADJACENCY;
return adjacency;
}
private void writeEdges(final FaunusElement element, final SetMultimap<FaunusRelationType, FaunusRelation> edges, final DataOutput out, final Direction direction, final Schema schema) throws IOException {
Map<FaunusRelationType, Integer> counts = Maps.newHashMap();
int typeCount = 0;
for (FaunusRelationType type : edges.keySet()) {
int count = IterablesUtil.size(filterDeletedRelations(edges.get(type)));
counts.put(type, count);
if (count > 0) typeCount++;
}
WritableUtils.writeVInt(out, typeCount);
for (FaunusRelationType type : edges.keySet()) {
if (counts.get(type) == 0) continue;
if (schema == null) writeFaunusType(type, out);
else WritableUtils.writeVLong(out, schema.getTypeId(type));
WritableUtils.writeVInt(out, counts.get(type));
Iterable<FaunusRelation> subset = filterDeletedRelations(edges.get(type));
for (final FaunusRelation rel : subset) {
if (element instanceof FaunusVertex) {
assert rel instanceof StandardFaunusRelation;
writePathElement((StandardFaunusRelation)rel,schema,out);
} else {
assert rel instanceof SimpleFaunusRelation;
if (trackState) out.writeByte(((SimpleFaunusRelation)rel).getLifeCycle());
}
if (rel.isEdge()) {
WritableUtils.writeVLong(out, ((FaunusEdge)rel).getVertexId(direction.opposite()));
} else {
serializeObject(out,((FaunusProperty)rel).getValue());
}
}
}
}
private void writeElementPaths(final List<List<MicroElement>> paths, final DataOutput out) throws IOException {
if (null == paths) {
WritableUtils.writeVInt(out, 0);
} else {
WritableUtils.writeVInt(out, paths.size());
for (final List<MicroElement> path : paths) {
WritableUtils.writeVInt(out, path.size());
for (MicroElement element : path) {
if (element instanceof FaunusVertex.MicroVertex)
out.writeChar('v');
else
out.writeChar('e');
WritableUtils.writeVLong(out, element.getId());
}
}
}
}
private List<List<MicroElement>> readElementPaths(final DataInput in) throws IOException {
int pathsSize = WritableUtils.readVInt(in);
if (pathsSize == 0)
return new ArrayList<List<MicroElement>>();
else {
final List<List<MicroElement>> paths = new ArrayList<List<MicroElement>>(pathsSize);
for (int i = 0; i < pathsSize; i++) {
int pathSize = WritableUtils.readVInt(in);
final List<MicroElement> path = new ArrayList<MicroElement>(pathSize);
for (int j = 0; j < pathSize; j++) {
char type = in.readChar();
if (type == 'v')
path.add(new FaunusVertex.MicroVertex(WritableUtils.readVLong(in)));
else
path.add(new StandardFaunusEdge.MicroEdge(WritableUtils.readVLong(in)));
}
paths.add(path);
}
return paths;
}
}
private void writeFaunusType(final FaunusRelationType type, final DataOutput out) throws IOException {
out.writeByte(type.isPropertyKey()?0:1);
out.writeUTF(type.getName());
}
private FaunusRelationType readFaunusType(final DataInput in) throws IOException {
int type = in.readByte();
String typeName = in.readUTF();
assert type==0 || type==1;
if (type==0) return types.getOrCreatePropertyKey(typeName);
else return types.getOrCreateEdgeLabel(typeName);
}
class Schema {
private final BiMap<FaunusRelationType, Long> localTypes;
private long count = 1;
private Schema() {
this(8);
}
private Schema(int size) {
this.localTypes = HashBiMap.create(size);
}
void add(String type) {
this.add(types.getRelationType(type));
}
void add(FaunusRelationType type) {
if (!localTypes.containsKey(type)) localTypes.put(type, count++);
}
void addAll(Iterable<FaunusRelationType> types) {
for (FaunusRelationType type : types) add(type);
}
long getTypeId(FaunusRelationType type) {
Long id = localTypes.get(type);
Preconditions.checkArgument(id != null, "Type is not part of the schema: " + type);
return id;
}
FaunusRelationType getType(long id) {
FaunusRelationType type = localTypes.inverse().get(id);
Preconditions.checkArgument(type != null, "Type is not part of the schema: " + id);
return type;
}
private void add(FaunusRelationType type, long index) {
Preconditions.checkArgument(!localTypes.containsValue(index));
localTypes.put(type, index);
count = index + 1;
}
private void writeSchema(final DataOutput out) throws IOException {
WritableUtils.writeVInt(out, localTypes.size());
for (Map.Entry<FaunusRelationType, Long> entry : localTypes.entrySet()) {
writeFaunusType(entry.getKey(), out);
WritableUtils.writeVLong(out, entry.getValue());
}
}
}
private Schema readSchema(final DataInput in) throws IOException {
int size = WritableUtils.readVInt(in);
Schema schema = new Schema(size);
for (int i = 0; i < size; i++) {
schema.add(readFaunusType(in), WritableUtils.readVLong(in));
}
return schema;
}
static {
WritableComparator.define(FaunusPathElement.class, new Comparator());
}
public static class Comparator extends WritableComparator {
public Comparator() {
super(FaunusPathElement.class);
}
@Override
public int compare(final byte[] element1, final int start1, final int length1, final byte[] element2, final int start2, final int length2) {
try {
return Long.valueOf(readVLong(element1, start1)).compareTo(readVLong(element2, start2));
} catch (IOException e) {
return -1;
}
}
@Override
public int compare(final WritableComparable a, final WritableComparable b) {
if (a instanceof FaunusElement && b instanceof FaunusElement)
return ((Long) (((FaunusElement) a).getLongId())).compareTo(((FaunusElement) b).getLongId());
else
return super.compare(a, b);
}
}
//################################################
// Serialization for vanilla Blueprints
//################################################
/**
* All graph element identifiers must be of the long data type. Implementations of this
* interface makes it possible to control the conversion of the identifier in the
* VertexToHadoopBinary utility class.
*
* @author Stephen Mallette (http://stephen.genoprime.com)
*/
// public static interface ElementIdHandler {
// long convertIdentifier(final Element element);
// }
//
// public void writeVertex(final Vertex vertex, final ElementIdHandler elementIdHandler, final DataOutput out) throws IOException {
// Schema schema = new Schema();
// //Convert properties and update schema
// Multimap<HadoopType, FaunusProperty> properties = getProperties(vertex);
// for (HadoopType type : properties.keySet()) schema.add(type);
// for (Edge edge : vertex.getEdges(Direction.BOTH)) {
// schema.add(edge.getLabel());
// for (String key : edge.getPropertyKeys()) schema.add(key);
// }
//
// WritableUtils.writeVLong(out, elementIdHandler.convertIdentifier(vertex));
// schema.writeSchema(out);
// WritableUtils.writeVLong(out, elementIdHandler.convertIdentifier(vertex));
// if (trackState) out.writeByte(ElementState.NEW.getByteValue());
// writeProperties(properties, schema, out);
// out.writeBoolean(false);
// WritableUtils.writeVLong(out, 0);
// writeEdges(vertex, Direction.IN, elementIdHandler, schema, out);
// writeEdges(vertex, Direction.OUT, elementIdHandler, schema, out);
//
// }
//
// private Multimap<HadoopType, FaunusProperty> getProperties(Element element) {
// Multimap<HadoopType, FaunusProperty> properties = HashMultimap.create();
// for (String key : element.getPropertyKeys()) {
// HadoopType type = types.get(key);
// properties.put(type, new FaunusProperty(type, element.getProperty(key)));
// }
// return properties;
// }
//
// private void writeEdges(final Vertex vertex, final Direction direction, final ElementIdHandler elementIdHandler,
// final Schema schema, final DataOutput out) throws IOException {
// final Multiset<String> labelCount = HashMultiset.create();
// for (final Edge edge : vertex.getEdges(direction)) {
// labelCount.add(edge.getLabel());
// }
// WritableUtils.writeVInt(out, labelCount.elementSet().size());
// for (String label : labelCount.elementSet()) {
// HadoopType type = types.get(label);
// WritableUtils.writeVLong(out, schema.getTypeId(type));
// WritableUtils.writeVInt(out, labelCount.count(label));
// for (final Edge edge : vertex.getEdges(direction, label)) {
// WritableUtils.writeVLong(out, elementIdHandler.convertIdentifier(edge));
// if (trackState) out.writeByte(ElementState.NEW.getByteValue());
// writeProperties(getProperties(edge), schema, out);
// out.writeBoolean(false);
// WritableUtils.writeVLong(out, 0);
// WritableUtils.writeVLong(out, elementIdHandler.convertIdentifier(edge.getVertex(direction.opposite())));
// }
// }
// }
} | 1no label
| titan-hadoop-parent_titan-hadoop-core_src_main_java_com_thinkaurelius_titan_hadoop_FaunusSerializer.java |
4,235 | static class CustomRAMDirectory extends RAMDirectory {
public synchronized void renameTo(String from, String to) throws IOException {
RAMFile fromFile = fileMap.get(from);
if (fromFile == null)
throw new FileNotFoundException(from);
RAMFile toFile = fileMap.get(to);
if (toFile != null) {
sizeInBytes.addAndGet(-fileLength(from));
fileMap.remove(from);
}
fileMap.put(to, fromFile);
}
@Override
public String toString() {
return "ram";
}
} | 1no label
| src_main_java_org_elasticsearch_index_store_ram_RamDirectoryService.java |
1,082 | public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest> {
private String type;
private String id;
@Nullable
private String routing;
@Nullable
String script;
@Nullable
String scriptLang;
@Nullable
Map<String, Object> scriptParams;
private String[] fields;
private long version = Versions.MATCH_ANY;
private VersionType versionType = VersionType.INTERNAL;
private int retryOnConflict = 0;
private boolean refresh = false;
private ReplicationType replicationType = ReplicationType.DEFAULT;
private WriteConsistencyLevel consistencyLevel = WriteConsistencyLevel.DEFAULT;
private IndexRequest upsertRequest;
private boolean docAsUpsert = false;
@Nullable
private IndexRequest doc;
public UpdateRequest() {
}
public UpdateRequest(String index, String type, String id) {
this.index = index;
this.type = type;
this.id = id;
}
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = super.validate();
if (type == null) {
validationException = addValidationError("type is missing", validationException);
}
if (id == null) {
validationException = addValidationError("id is missing", validationException);
}
if (version != Versions.MATCH_ANY && retryOnConflict > 0) {
validationException = addValidationError("can't provide both retry_on_conflict and a specific version", validationException);
}
if (script == null && doc == null) {
validationException = addValidationError("script or doc is missing", validationException);
}
if (script != null && doc != null) {
validationException = addValidationError("can't provide both script and doc", validationException);
}
if (doc == null && docAsUpsert) {
validationException = addValidationError("doc must be specified if doc_as_upsert is enabled", validationException);
}
return validationException;
}
/**
* The type of the indexed document.
*/
public String type() {
return type;
}
/**
* Sets the type of the indexed document.
*/
public UpdateRequest type(String type) {
this.type = type;
return this;
}
/**
* The id of the indexed document.
*/
public String id() {
return id;
}
/**
* Sets the id of the indexed document.
*/
public UpdateRequest id(String id) {
this.id = id;
return this;
}
/**
* Controls the shard routing of the request. Using this value to hash the shard
* and not the id.
*/
public UpdateRequest routing(String routing) {
if (routing != null && routing.length() == 0) {
this.routing = null;
} else {
this.routing = routing;
}
return this;
}
/**
* Sets the parent id of this document. Will simply set the routing to this value, as it is only
* used for routing with delete requests.
*/
public UpdateRequest parent(String parent) {
if (routing == null) {
routing = parent;
}
return this;
}
/**
* Controls the shard routing of the request. Using this value to hash the shard
* and not the id.
*/
public String routing() {
return this.routing;
}
int shardId() {
return this.shardId;
}
public String script() {
return this.script;
}
public Map<String, Object> scriptParams() {
return this.scriptParams;
}
/**
* The script to execute. Note, make sure not to send different script each times and instead
* use script params if possible with the same (automatically compiled) script.
*/
public UpdateRequest script(String script) {
this.script = script;
return this;
}
/**
* The language of the script to execute.
*/
public UpdateRequest scriptLang(String scriptLang) {
this.scriptLang = scriptLang;
return this;
}
public String scriptLang() {
return scriptLang;
}
/**
* Add a script parameter.
*/
public UpdateRequest addScriptParam(String name, Object value) {
if (scriptParams == null) {
scriptParams = Maps.newHashMap();
}
scriptParams.put(name, value);
return this;
}
/**
* Sets the script parameters to use with the script.
*/
public UpdateRequest scriptParams(Map<String, Object> scriptParams) {
if (this.scriptParams == null) {
this.scriptParams = scriptParams;
} else {
this.scriptParams.putAll(scriptParams);
}
return this;
}
/**
* The script to execute. Note, make sure not to send different script each times and instead
* use script params if possible with the same (automatically compiled) script.
*/
public UpdateRequest script(String script, @Nullable Map<String, Object> scriptParams) {
this.script = script;
if (this.scriptParams != null) {
this.scriptParams.putAll(scriptParams);
} else {
this.scriptParams = scriptParams;
}
return this;
}
/**
* The script to execute. Note, make sure not to send different script each times and instead
* use script params if possible with the same (automatically compiled) script.
*
* @param script The script to execute
* @param scriptLang The script language
* @param scriptParams The script parameters
*/
public UpdateRequest script(String script, @Nullable String scriptLang, @Nullable Map<String, Object> scriptParams) {
this.script = script;
this.scriptLang = scriptLang;
if (this.scriptParams != null) {
this.scriptParams.putAll(scriptParams);
} else {
this.scriptParams = scriptParams;
}
return this;
}
/**
* Explicitly specify the fields that will be returned. By default, nothing is returned.
*/
public UpdateRequest fields(String... fields) {
this.fields = fields;
return this;
}
/**
* Get the fields to be returned.
*/
public String[] fields() {
return this.fields;
}
/**
* Sets the number of retries of a version conflict occurs because the document was updated between
* getting it and updating it. Defaults to 0.
*/
public UpdateRequest retryOnConflict(int retryOnConflict) {
this.retryOnConflict = retryOnConflict;
return this;
}
public int retryOnConflict() {
return this.retryOnConflict;
}
/**
* Sets the version, which will cause the index operation to only be performed if a matching
* version exists and no changes happened on the doc since then.
*/
public UpdateRequest version(long version) {
this.version = version;
return this;
}
public long version() {
return this.version;
}
/**
* Sets the versioning type. Defaults to {@link VersionType#INTERNAL}.
*/
public UpdateRequest versionType(VersionType versionType) {
this.versionType = versionType;
return this;
}
public VersionType versionType() {
return this.versionType;
}
/**
* Should a refresh be executed post this update operation causing the operation to
* be searchable. Note, heavy indexing should not set this to <tt>true</tt>. Defaults
* to <tt>false</tt>.
*/
public UpdateRequest refresh(boolean refresh) {
this.refresh = refresh;
return this;
}
public boolean refresh() {
return this.refresh;
}
/**
* The replication type.
*/
public ReplicationType replicationType() {
return this.replicationType;
}
/**
* Sets the replication type.
*/
public UpdateRequest replicationType(ReplicationType replicationType) {
this.replicationType = replicationType;
return this;
}
public WriteConsistencyLevel consistencyLevel() {
return this.consistencyLevel;
}
/**
* Sets the consistency level of write. Defaults to {@link org.elasticsearch.action.WriteConsistencyLevel#DEFAULT}
*/
public UpdateRequest consistencyLevel(WriteConsistencyLevel consistencyLevel) {
this.consistencyLevel = consistencyLevel;
return this;
}
/**
* Sets the doc to use for updates when a script is not specified.
*/
public UpdateRequest doc(IndexRequest doc) {
this.doc = doc;
return this;
}
/**
* Sets the doc to use for updates when a script is not specified.
*/
public UpdateRequest doc(XContentBuilder source) {
safeDoc().source(source);
return this;
}
/**
* Sets the doc to use for updates when a script is not specified.
*/
public UpdateRequest doc(Map source) {
safeDoc().source(source);
return this;
}
/**
* Sets the doc to use for updates when a script is not specified.
*/
public UpdateRequest doc(Map source, XContentType contentType) {
safeDoc().source(source, contentType);
return this;
}
/**
* Sets the doc to use for updates when a script is not specified.
*/
public UpdateRequest doc(String source) {
safeDoc().source(source);
return this;
}
/**
* Sets the doc to use for updates when a script is not specified.
*/
public UpdateRequest doc(byte[] source) {
safeDoc().source(source);
return this;
}
/**
* Sets the doc to use for updates when a script is not specified.
*/
public UpdateRequest doc(byte[] source, int offset, int length) {
safeDoc().source(source, offset, length);
return this;
}
/**
* Sets the doc to use for updates when a script is not specified, the doc provided
* is a field and value pairs.
*/
public UpdateRequest doc(Object... source) {
safeDoc().source(source);
return this;
}
/**
* Sets the doc to use for updates when a script is not specified.
*/
public UpdateRequest doc(String field, Object value) {
safeDoc().source(field, value);
return this;
}
public IndexRequest doc() {
return this.doc;
}
private IndexRequest safeDoc() {
if (doc == null) {
doc = new IndexRequest();
}
return doc;
}
/**
* Sets the index request to be used if the document does not exists. Otherwise, a {@link org.elasticsearch.index.engine.DocumentMissingException}
* is thrown.
*/
public UpdateRequest upsert(IndexRequest upsertRequest) {
this.upsertRequest = upsertRequest;
return this;
}
/**
* Sets the doc source of the update request to be used when the document does not exists.
*/
public UpdateRequest upsert(XContentBuilder source) {
safeUpsertRequest().source(source);
return this;
}
/**
* Sets the doc source of the update request to be used when the document does not exists.
*/
public UpdateRequest upsert(Map source) {
safeUpsertRequest().source(source);
return this;
}
/**
* Sets the doc source of the update request to be used when the document does not exists.
*/
public UpdateRequest upsert(Map source, XContentType contentType) {
safeUpsertRequest().source(source, contentType);
return this;
}
/**
* Sets the doc source of the update request to be used when the document does not exists.
*/
public UpdateRequest upsert(String source) {
safeUpsertRequest().source(source);
return this;
}
/**
* Sets the doc source of the update request to be used when the document does not exists.
*/
public UpdateRequest upsert(byte[] source) {
safeUpsertRequest().source(source);
return this;
}
/**
* Sets the doc source of the update request to be used when the document does not exists.
*/
public UpdateRequest upsert(byte[] source, int offset, int length) {
safeUpsertRequest().source(source, offset, length);
return this;
}
/**
* Sets the doc source of the update request to be used when the document does not exists. The doc
* includes field and value pairs.
*/
public UpdateRequest upsert(Object... source) {
safeUpsertRequest().source(source);
return this;
}
public IndexRequest upsertRequest() {
return this.upsertRequest;
}
private IndexRequest safeUpsertRequest() {
if (upsertRequest == null) {
upsertRequest = new IndexRequest();
}
return upsertRequest;
}
public UpdateRequest source(XContentBuilder source) throws Exception {
return source(source.bytes());
}
public UpdateRequest source(byte[] source) throws Exception {
return source(source, 0, source.length);
}
public UpdateRequest source(byte[] source, int offset, int length) throws Exception {
return source(new BytesArray(source, offset, length));
}
public UpdateRequest source(BytesReference source) throws Exception {
XContentType xContentType = XContentFactory.xContentType(source);
XContentParser parser = XContentFactory.xContent(xContentType).createParser(source);
try {
XContentParser.Token t = parser.nextToken();
if (t == null) {
return this;
}
String currentFieldName = null;
while ((t = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (t == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if ("script".equals(currentFieldName)) {
script = parser.textOrNull();
} else if ("params".equals(currentFieldName)) {
scriptParams = parser.map();
} else if ("lang".equals(currentFieldName)) {
scriptLang = parser.text();
} else if ("upsert".equals(currentFieldName)) {
XContentBuilder builder = XContentFactory.contentBuilder(xContentType);
builder.copyCurrentStructure(parser);
safeUpsertRequest().source(builder);
} else if ("doc".equals(currentFieldName)) {
XContentBuilder docBuilder = XContentFactory.contentBuilder(xContentType);
docBuilder.copyCurrentStructure(parser);
safeDoc().source(docBuilder);
} else if ("doc_as_upsert".equals(currentFieldName)) {
docAsUpsert(parser.booleanValue());
}
}
} finally {
parser.close();
}
return this;
}
public boolean docAsUpsert() {
return this.docAsUpsert;
}
public void docAsUpsert(boolean shouldUpsertDoc) {
this.docAsUpsert = shouldUpsertDoc;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
replicationType = ReplicationType.fromId(in.readByte());
consistencyLevel = WriteConsistencyLevel.fromId(in.readByte());
type = in.readSharedString();
id = in.readString();
routing = in.readOptionalString();
script = in.readOptionalString();
scriptLang = in.readOptionalString();
scriptParams = in.readMap();
retryOnConflict = in.readVInt();
refresh = in.readBoolean();
if (in.readBoolean()) {
doc = new IndexRequest();
doc.readFrom(in);
}
int size = in.readInt();
if (size >= 0) {
fields = new String[size];
for (int i = 0; i < size; i++) {
fields[i] = in.readString();
}
}
if (in.readBoolean()) {
upsertRequest = new IndexRequest();
upsertRequest.readFrom(in);
}
docAsUpsert = in.readBoolean();
version = in.readLong();
versionType = VersionType.fromValue(in.readByte());
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeByte(replicationType.id());
out.writeByte(consistencyLevel.id());
out.writeSharedString(type);
out.writeString(id);
out.writeOptionalString(routing);
out.writeOptionalString(script);
out.writeOptionalString(scriptLang);
out.writeMap(scriptParams);
out.writeVInt(retryOnConflict);
out.writeBoolean(refresh);
if (doc == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
// make sure the basics are set
doc.index(index);
doc.type(type);
doc.id(id);
doc.writeTo(out);
}
if (fields == null) {
out.writeInt(-1);
} else {
out.writeInt(fields.length);
for (String field : fields) {
out.writeString(field);
}
}
if (upsertRequest == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
// make sure the basics are set
upsertRequest.index(index);
upsertRequest.type(type);
upsertRequest.id(id);
upsertRequest.writeTo(out);
}
out.writeBoolean(docAsUpsert);
out.writeLong(version);
out.writeByte(versionType.getValue());
}
} | 1no label
| src_main_java_org_elasticsearch_action_update_UpdateRequest.java |
425 | restoreService.addListener(new RestoreService.RestoreCompletionListener() {
SnapshotId snapshotId = new SnapshotId(request.repository(), request.snapshot());
@Override
public void onRestoreCompletion(SnapshotId snapshotId, RestoreInfo snapshot) {
if (this.snapshotId.equals(snapshotId)) {
listener.onResponse(new RestoreSnapshotResponse(snapshot));
restoreService.removeListener(this);
}
}
}); | 0true
| src_main_java_org_elasticsearch_action_admin_cluster_snapshots_restore_TransportRestoreSnapshotAction.java |
3,687 | return new Comparator<Map.Entry>() {
public int compare(Map.Entry entry1, Map.Entry entry2) {
return SortingUtil.compare(pagingPredicate.getComparator(),
pagingPredicate.getIterationType(), entry1, entry2);
}
}; | 1no label
| hazelcast_src_main_java_com_hazelcast_util_SortingUtil.java |
888 | threadPool.executor(ThreadPool.Names.SEARCH).execute(new Runnable() {
@Override
public void run() {
Tuple<String, Long>[] context1 = scrollId.getContext();
for (int i = 0; i < context1.length; i++) {
Tuple<String, Long> target = context1[i];
DiscoveryNode node = nodes.get(target.v1());
if (node != null && nodes.localNodeId().equals(node.id())) {
executeQueryPhase(i, counter, node, target.v2());
}
}
}
}); | 1no label
| src_main_java_org_elasticsearch_action_search_type_TransportSearchScrollQueryThenFetchAction.java |
217 | public class OTableFormatter {
protected final static String MORE = "...";
protected final OConsoleApplication out;
protected int minColumnSize = 4;
protected int maxWidthSize = 132;
protected final static Set<String> prefixedColumns = new HashSet<String>(Arrays.asList(new String[] { "#", "@RID" }));
protected final SimpleDateFormat DEF_DATEFORMAT = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
public OTableFormatter(final OConsoleApplication iConsole) {
this.out = iConsole;
}
public OTableFormatter hideRID(final boolean iValue) {
if (iValue)
prefixedColumns.remove("@RID");
else
prefixedColumns.add("@RID");
return this;
}
public void writeRecords(final Collection<OIdentifiable> resultSet, final int limit) {
writeRecords(resultSet, limit, null);
}
public void writeRecords(final Collection<OIdentifiable> resultSet, final int limit,
final OCallable<Object, OIdentifiable> iAfterDump) {
final Map<String, Integer> columns = parseColumns(resultSet, limit);
int fetched = 0;
for (OIdentifiable record : resultSet) {
dumpRecordInTable(fetched++, record, columns);
if (iAfterDump != null)
iAfterDump.call(record);
if (limit > -1 && fetched >= limit) {
printHeaderLine(columns);
out.message("\nLIMIT EXCEEDED: resultset contains more items not displayed (limit=" + limit + ")");
return;
}
}
if (fetched > 0)
printHeaderLine(columns);
}
public int getMaxWidthSize() {
return maxWidthSize;
}
public OTableFormatter setMaxWidthSize(int maxWidthSize) {
this.maxWidthSize = maxWidthSize;
return this;
}
public void dumpRecordInTable(final int iIndex, final OIdentifiable iRecord, final Map<String, Integer> iColumns) {
if (iIndex == 0)
printHeader(iColumns);
// FORMAT THE LINE DYNAMICALLY
List<Object> vargs = new ArrayList<Object>();
try {
if (iRecord instanceof ODocument)
((ODocument) iRecord).setLazyLoad(false);
final StringBuilder format = new StringBuilder(maxWidthSize);
for (Entry<String, Integer> col : iColumns.entrySet()) {
if (format.length() > 0)
format.append('|');
format.append("%-" + col.getValue() + "s");
Object value = getFieldValue(iIndex, iRecord, col.getKey());
if (value != null) {
value = value.toString();
if (((String) value).length() > col.getValue()) {
// APPEND ...
value = ((String) value).substring(0, col.getValue() - 3) + MORE;
}
}
vargs.add(value);
}
out.message(format.toString() + "\n", vargs.toArray());
} catch (Throwable t) {
out.message("%3d|%9s|%s\n", iIndex, iRecord.getIdentity(), "Error on loading record dued to: " + t);
}
}
private Object getFieldValue(final int iIndex, final OIdentifiable iRecord, final String iColumnName) {
Object value = null;
if (iColumnName.equals("#"))
// RECORD NUMBER
value = iIndex;
else if (iColumnName.equals("@RID"))
// RID
value = iRecord.getIdentity().toString();
else if (iRecord instanceof ORecordSchemaAwareAbstract<?>)
value = ((ORecordSchemaAwareAbstract<?>) iRecord).field(iColumnName);
else if (iRecord instanceof ORecordBytes)
value = "<binary> (size=" + ((ORecordBytes) iRecord).toStream().length + " bytes)";
if (value instanceof OMultiCollectionIterator<?>)
value = "[" + ((OMultiCollectionIterator<?>) value).size() + "]";
else if (value instanceof Collection<?>)
value = "[" + ((Collection<?>) value).size() + "]";
else if (value instanceof ORecord<?>) {
if (((ORecord<?>) value).getIdentity().equals(ORecordId.EMPTY_RECORD_ID)) {
value = ((ORecord<?>) value).toString();
} else {
value = ((ORecord<?>) value).getIdentity().toString();
}
} else if (value instanceof Date) {
final ODatabaseRecord db = ODatabaseRecordThreadLocal.INSTANCE.getIfDefined();
if (db != null)
value = db.getStorage().getConfiguration().getDateTimeFormatInstance().format((Date) value);
else {
value = DEF_DATEFORMAT.format((Date) value);
}
} else if (value instanceof byte[])
value = "byte[" + ((byte[]) value).length + "]";
return value;
}
private void printHeader(final Map<String, Integer> iColumns) {
final StringBuilder buffer = new StringBuilder();
printHeaderLine(iColumns);
int i = 0;
for (Entry<String, Integer> column : iColumns.entrySet()) {
if (i++ > 0)
buffer.append('|');
String colName = column.getKey();
if (colName.length() > column.getValue())
colName = colName.substring(0, column.getValue());
buffer.append(String.format("%-" + column.getValue() + "s", colName));
}
buffer.append("\n");
out.message(buffer.toString());
printHeaderLine(iColumns);
}
private void printHeaderLine(final Map<String, Integer> iColumns) {
final StringBuilder buffer = new StringBuilder("\n");
if (iColumns.size() > 0) {
int i = 0;
for (Entry<String, Integer> col : iColumns.entrySet()) {
if (i++ > 0)
buffer.append("+");
for (int k = 0; k < col.getValue(); ++k)
buffer.append("-");
}
}
buffer.append("\n");
out.message(buffer.toString());
}
/**
* Fill the column map computing the maximum size for a field.
*
* @param resultSet
* @param limit
* @return
*/
private Map<String, Integer> parseColumns(final Collection<OIdentifiable> resultSet, final int limit) {
final Map<String, Integer> columns = new LinkedHashMap<String, Integer>();
for (String c : prefixedColumns)
columns.put(c, minColumnSize);
int fetched = 0;
for (OIdentifiable id : resultSet) {
ORecord<?> rec = id.getRecord();
for (String c : prefixedColumns)
columns.put(c, getColumnSize(fetched, rec, c, columns.get(c)));
if (rec instanceof ODocument) {
((ODocument) rec).setLazyLoad(false);
// PARSE ALL THE DOCUMENT'S FIELDS
ODocument doc = (ODocument) rec;
for (String fieldName : doc.fieldNames()) {
columns.put(fieldName, getColumnSize(fetched, doc, fieldName, columns.get(fieldName)));
}
} else if (rec instanceof ORecordBytes) {
// UNIQUE BINARY FIELD
columns.put("value", maxWidthSize - 15);
}
if (limit > -1 && fetched++ >= limit)
break;
}
// COMPUTE MAXIMUM WIDTH
int width = 0;
for (Entry<String, Integer> col : columns.entrySet())
width += col.getValue();
if (width > maxWidthSize) {
// SCALE COLUMNS AUTOMATICALLY
final List<Map.Entry<String, Integer>> orderedColumns = new ArrayList<Map.Entry<String, Integer>>();
orderedColumns.addAll(columns.entrySet());
Collections.sort(orderedColumns, new Comparator<Map.Entry<String, Integer>>() {
public int compare(Map.Entry<String, Integer> o1, Map.Entry<String, Integer> o2) {
return o1.getValue().compareTo(o2.getValue());
}
});
// START CUTTING THE BIGGEST ONES
Collections.reverse(orderedColumns);
while (width > maxWidthSize) {
int oldWidth = width;
for (Map.Entry<String, Integer> entry : orderedColumns) {
final int redux = entry.getValue() * 10 / 100;
if (entry.getValue() - redux < minColumnSize)
// RESTART FROM THE LARGEST COLUMN
break;
entry.setValue(entry.getValue() - redux);
width -= redux;
if (width <= maxWidthSize)
break;
}
if (width == oldWidth)
// REACHED THE MINIMUM
break;
}
// POPULATE THE COLUMNS WITH THE REDUXED VALUES
columns.clear();
for (String c : prefixedColumns)
columns.put(c, minColumnSize);
Collections.reverse(orderedColumns);
for (Entry<String, Integer> col : orderedColumns)
// if (!col.getKey().equals("#") && !col.getKey().equals("@RID"))
columns.put(col.getKey(), col.getValue());
}
return columns;
}
private Integer getColumnSize(final Integer iIndex, final ORecord<?> iRecord, final String fieldName, final Integer origSize) {
Integer newColumnSize;
if (origSize == null)
// START FROM THE FIELD NAME SIZE
newColumnSize = fieldName.length();
else
newColumnSize = Math.max(origSize, fieldName.length());
final Object fieldValue = getFieldValue(iIndex, iRecord, fieldName);
if (fieldValue != null) {
final String fieldValueAsString = fieldValue.toString();
if (fieldValueAsString.length() > newColumnSize)
newColumnSize = fieldValueAsString.length();
}
if (newColumnSize < minColumnSize)
// SET THE MINIMUM SIZE
newColumnSize = minColumnSize;
return newColumnSize;
}
} | 0true
| tools_src_main_java_com_orientechnologies_orient_console_OTableFormatter.java |
4,628 | public final class InternalNode implements Node {
private final Lifecycle lifecycle = new Lifecycle();
private final Injector injector;
private final Settings settings;
private final Environment environment;
private final PluginsService pluginsService;
private final Client client;
public InternalNode() throws ElasticsearchException {
this(ImmutableSettings.Builder.EMPTY_SETTINGS, true);
}
public InternalNode(Settings pSettings, boolean loadConfigSettings) throws ElasticsearchException {
Tuple<Settings, Environment> tuple = InternalSettingsPreparer.prepareSettings(pSettings, loadConfigSettings);
tuple = new Tuple<Settings, Environment>(TribeService.processSettings(tuple.v1()), tuple.v2());
Version version = Version.CURRENT;
ESLogger logger = Loggers.getLogger(Node.class, tuple.v1().get("name"));
logger.info("version[{}], pid[{}], build[{}/{}]", version, JvmInfo.jvmInfo().pid(), Build.CURRENT.hashShort(), Build.CURRENT.timestamp());
logger.info("initializing ...");
if (logger.isDebugEnabled()) {
Environment env = tuple.v2();
logger.debug("using home [{}], config [{}], data [{}], logs [{}], work [{}], plugins [{}]",
env.homeFile(), env.configFile(), Arrays.toString(env.dataFiles()), env.logsFile(),
env.workFile(), env.pluginsFile());
}
this.pluginsService = new PluginsService(tuple.v1(), tuple.v2());
this.settings = pluginsService.updatedSettings();
// create the environment based on the finalized (processed) view of the settings
this.environment = new Environment(this.settings());
CompressorFactory.configure(settings);
NodeEnvironment nodeEnvironment = new NodeEnvironment(this.settings, this.environment);
ModulesBuilder modules = new ModulesBuilder();
modules.add(new Version.Module(version));
modules.add(new CacheRecyclerModule(settings));
modules.add(new PageCacheRecyclerModule(settings));
modules.add(new PluginsModule(settings, pluginsService));
modules.add(new SettingsModule(settings));
modules.add(new NodeModule(this));
modules.add(new NetworkModule());
modules.add(new ScriptModule(settings));
modules.add(new EnvironmentModule(environment));
modules.add(new NodeEnvironmentModule(nodeEnvironment));
modules.add(new ClusterNameModule(settings));
modules.add(new ThreadPoolModule(settings));
modules.add(new DiscoveryModule(settings));
modules.add(new ClusterModule(settings));
modules.add(new RestModule(settings));
modules.add(new TransportModule(settings));
if (settings.getAsBoolean("http.enabled", true)) {
modules.add(new HttpServerModule(settings));
}
modules.add(new RiversModule(settings));
modules.add(new IndicesModule(settings));
modules.add(new SearchModule());
modules.add(new ActionModule(false));
modules.add(new MonitorModule(settings));
modules.add(new GatewayModule(settings));
modules.add(new NodeClientModule());
modules.add(new BulkUdpModule());
modules.add(new ShapeModule());
modules.add(new PercolatorModule());
modules.add(new ResourceWatcherModule());
modules.add(new RepositoriesModule());
modules.add(new TribeModule());
injector = modules.createInjector();
client = injector.getInstance(Client.class);
logger.info("initialized");
}
@Override
public Settings settings() {
return this.settings;
}
@Override
public Client client() {
return client;
}
public Node start() {
if (!lifecycle.moveToStarted()) {
return this;
}
ESLogger logger = Loggers.getLogger(Node.class, settings.get("name"));
logger.info("starting ...");
// hack around dependency injection problem (for now...)
injector.getInstance(Discovery.class).setAllocationService(injector.getInstance(AllocationService.class));
for (Class<? extends LifecycleComponent> plugin : pluginsService.services()) {
injector.getInstance(plugin).start();
}
injector.getInstance(IndicesService.class).start();
injector.getInstance(IndexingMemoryController.class).start();
injector.getInstance(IndicesClusterStateService.class).start();
injector.getInstance(IndicesTTLService.class).start();
injector.getInstance(RiversManager.class).start();
injector.getInstance(ClusterService.class).start();
injector.getInstance(RoutingService.class).start();
injector.getInstance(SearchService.class).start();
injector.getInstance(MonitorService.class).start();
injector.getInstance(RestController.class).start();
injector.getInstance(TransportService.class).start();
DiscoveryService discoService = injector.getInstance(DiscoveryService.class).start();
// gateway should start after disco, so it can try and recovery from gateway on "start"
injector.getInstance(GatewayService.class).start();
if (settings.getAsBoolean("http.enabled", true)) {
injector.getInstance(HttpServer.class).start();
}
injector.getInstance(BulkUdpService.class).start();
injector.getInstance(ResourceWatcherService.class).start();
injector.getInstance(TribeService.class).start();
logger.info("started");
return this;
}
@Override
public Node stop() {
if (!lifecycle.moveToStopped()) {
return this;
}
ESLogger logger = Loggers.getLogger(Node.class, settings.get("name"));
logger.info("stopping ...");
injector.getInstance(TribeService.class).stop();
injector.getInstance(BulkUdpService.class).stop();
injector.getInstance(ResourceWatcherService.class).stop();
if (settings.getAsBoolean("http.enabled", true)) {
injector.getInstance(HttpServer.class).stop();
}
injector.getInstance(RiversManager.class).stop();
// stop any changes happening as a result of cluster state changes
injector.getInstance(IndicesClusterStateService.class).stop();
// we close indices first, so operations won't be allowed on it
injector.getInstance(IndexingMemoryController.class).stop();
injector.getInstance(IndicesTTLService.class).stop();
injector.getInstance(IndicesService.class).stop();
// sleep a bit to let operations finish with indices service
// try {
// Thread.sleep(500);
// } catch (InterruptedException e) {
// // ignore
// }
injector.getInstance(RoutingService.class).stop();
injector.getInstance(ClusterService.class).stop();
injector.getInstance(DiscoveryService.class).stop();
injector.getInstance(MonitorService.class).stop();
injector.getInstance(GatewayService.class).stop();
injector.getInstance(SearchService.class).stop();
injector.getInstance(RestController.class).stop();
injector.getInstance(TransportService.class).stop();
for (Class<? extends LifecycleComponent> plugin : pluginsService.services()) {
injector.getInstance(plugin).stop();
}
logger.info("stopped");
return this;
}
public void close() {
if (lifecycle.started()) {
stop();
}
if (!lifecycle.moveToClosed()) {
return;
}
ESLogger logger = Loggers.getLogger(Node.class, settings.get("name"));
logger.info("closing ...");
StopWatch stopWatch = new StopWatch("node_close");
stopWatch.start("tribe");
injector.getInstance(TribeService.class).close();
stopWatch.stop().start("bulk.udp");
injector.getInstance(BulkUdpService.class).close();
stopWatch.stop().start("http");
if (settings.getAsBoolean("http.enabled", true)) {
injector.getInstance(HttpServer.class).close();
}
stopWatch.stop().start("rivers");
injector.getInstance(RiversManager.class).close();
stopWatch.stop().start("client");
injector.getInstance(Client.class).close();
stopWatch.stop().start("indices_cluster");
injector.getInstance(IndicesClusterStateService.class).close();
stopWatch.stop().start("indices");
injector.getInstance(IndicesFilterCache.class).close();
injector.getInstance(IndicesFieldDataCache.class).close();
injector.getInstance(IndexingMemoryController.class).close();
injector.getInstance(IndicesTTLService.class).close();
injector.getInstance(IndicesService.class).close();
stopWatch.stop().start("routing");
injector.getInstance(RoutingService.class).close();
stopWatch.stop().start("cluster");
injector.getInstance(ClusterService.class).close();
stopWatch.stop().start("discovery");
injector.getInstance(DiscoveryService.class).close();
stopWatch.stop().start("monitor");
injector.getInstance(MonitorService.class).close();
stopWatch.stop().start("gateway");
injector.getInstance(GatewayService.class).close();
stopWatch.stop().start("search");
injector.getInstance(SearchService.class).close();
stopWatch.stop().start("rest");
injector.getInstance(RestController.class).close();
stopWatch.stop().start("transport");
injector.getInstance(TransportService.class).close();
stopWatch.stop().start("percolator_service");
injector.getInstance(PercolatorService.class).close();
for (Class<? extends LifecycleComponent> plugin : pluginsService.services()) {
stopWatch.stop().start("plugin(" + plugin.getName() + ")");
injector.getInstance(plugin).close();
}
stopWatch.stop().start("script");
injector.getInstance(ScriptService.class).close();
stopWatch.stop().start("thread_pool");
injector.getInstance(ThreadPool.class).shutdown();
try {
injector.getInstance(ThreadPool.class).awaitTermination(10, TimeUnit.SECONDS);
} catch (InterruptedException e) {
// ignore
}
stopWatch.stop().start("thread_pool_force_shutdown");
try {
injector.getInstance(ThreadPool.class).shutdownNow();
} catch (Exception e) {
// ignore
}
stopWatch.stop();
if (logger.isTraceEnabled()) {
logger.trace("Close times for each service:\n{}", stopWatch.prettyPrint());
}
injector.getInstance(NodeEnvironment.class).close();
injector.getInstance(CacheRecycler.class).close();
injector.getInstance(PageCacheRecycler.class).close();
Injectors.close(injector);
CachedStreams.clear();
logger.info("closed");
}
@Override
public boolean isClosed() {
return lifecycle.closed();
}
public Injector injector() {
return this.injector;
}
public static void main(String[] args) throws Exception {
final InternalNode node = new InternalNode();
node.start();
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
node.close();
}
});
}
} | 1no label
| src_main_java_org_elasticsearch_node_internal_InternalNode.java |
721 | public class DeleteResponse extends ActionResponse {
private String index;
private String id;
private String type;
private long version;
private boolean found;
public DeleteResponse() {
}
public DeleteResponse(String index, String type, String id, long version, boolean found) {
this.index = index;
this.id = id;
this.type = type;
this.version = version;
this.found = found;
}
/**
* The index the document was deleted from.
*/
public String getIndex() {
return this.index;
}
/**
* The type of the document deleted.
*/
public String getType() {
return this.type;
}
/**
* The id of the document deleted.
*/
public String getId() {
return this.id;
}
/**
* The version of the delete operation.
*/
public long getVersion() {
return this.version;
}
/**
* Returns <tt>true</tt> if a doc was found to delete.
*/
public boolean isFound() {
return found;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
index = in.readSharedString();
type = in.readSharedString();
id = in.readString();
version = in.readLong();
found = in.readBoolean();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeSharedString(index);
out.writeSharedString(type);
out.writeString(id);
out.writeLong(version);
out.writeBoolean(found);
}
} | 0true
| src_main_java_org_elasticsearch_action_delete_DeleteResponse.java |
184 | ItemListener listener = new ItemListener() {
public void itemAdded(ItemEvent itemEvent) {
latch.countDown();
}
public void itemRemoved(ItemEvent item) {
}
}; | 0true
| hazelcast-client_src_test_java_com_hazelcast_client_collections_ClientListTest.java |
142 | public static class Tab {
public static class Name {
public static final String Rules = "StructuredContentImpl_Rules_Tab";
}
public static class Order {
public static final int Rules = 1000;
}
} | 0true
| admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_structure_domain_StructuredContentImpl.java |
3,612 | public class TransactionManagerServiceImpl implements TransactionManagerService, ManagedService,
MembershipAwareService, ClientAwareService {
public static final String SERVICE_NAME = "hz:core:txManagerService";
public static final int RECOVER_TIMEOUT = 5000;
private final NodeEngineImpl nodeEngine;
private final ILogger logger;
private final ConcurrentMap<String, TxBackupLog> txBackupLogs = new ConcurrentHashMap<String, TxBackupLog>();
private final ConcurrentMap<SerializableXID, Transaction>
managedTransactions = new ConcurrentHashMap<SerializableXID, Transaction>();
private final ConcurrentMap<SerializableXID, RecoveredTransaction>
clientRecoveredTransactions = new ConcurrentHashMap<SerializableXID, RecoveredTransaction>();
public TransactionManagerServiceImpl(NodeEngineImpl nodeEngine) {
this.nodeEngine = nodeEngine;
logger = nodeEngine.getLogger(TransactionManagerService.class);
}
@Override
public <T> T executeTransaction(TransactionOptions options, TransactionalTask<T> task) throws TransactionException {
if (task == null) {
throw new NullPointerException("TransactionalTask is required!");
}
final TransactionContextImpl context = new TransactionContextImpl(this, nodeEngine, options, null);
context.beginTransaction();
try {
final T value = task.execute(context);
context.commitTransaction();
return value;
} catch (Throwable e) {
context.rollbackTransaction();
if (e instanceof TransactionException) {
throw (TransactionException) e;
}
if (e.getCause() instanceof TransactionException) {
throw (TransactionException) e.getCause();
}
if (e instanceof RuntimeException) {
throw (RuntimeException) e;
}
throw new TransactionException(e);
}
}
@Override
public TransactionContext newTransactionContext(TransactionOptions options) {
return new TransactionContextImpl(this, nodeEngine, options, null);
}
@Override
public TransactionContext newClientTransactionContext(TransactionOptions options, String clientUuid) {
return new TransactionContextImpl(this, nodeEngine, options, clientUuid);
}
@Override
public void init(NodeEngine nodeEngine, Properties properties) {
}
@Override
public void reset() {
txBackupLogs.clear();
}
@Override
public void shutdown(boolean terminate) {
reset();
}
@Override
public void memberAdded(MembershipServiceEvent event) {
}
public void addClientRecoveredTransaction(RecoveredTransaction rt) {
clientRecoveredTransactions.put(rt.getXid(), rt);
}
public void recoverClientTransaction(SerializableXID sXid, boolean commit) {
final RecoveredTransaction rt = clientRecoveredTransactions.remove(sXid);
if (rt == null) {
return;
}
TransactionImpl tx = new TransactionImpl(this, nodeEngine, rt.getTxnId(), rt.getTxLogs(),
rt.getTimeoutMillis(), rt.getStartTime(), rt.getCallerUuid());
if (commit) {
try {
tx.commit();
} catch (Throwable e) {
logger.warning("Error during committing recovered client transaction!", e);
}
} else {
try {
tx.rollback();
} catch (Throwable e) {
logger.warning("Error during rolling-back recovered client transaction!", e);
}
}
}
@Override
public void memberRemoved(MembershipServiceEvent event) {
final MemberImpl member = event.getMember();
String uuid = member.getUuid();
finalizeTransactionsOf(uuid);
}
@Override
public void memberAttributeChanged(MemberAttributeServiceEvent event) {
}
public void addManagedTransaction(Xid xid, Transaction transaction) {
final SerializableXID sXid = new SerializableXID(xid.getFormatId(),
xid.getGlobalTransactionId(), xid.getBranchQualifier());
((TransactionImpl) transaction).setXid(sXid);
managedTransactions.put(sXid, transaction);
}
public Transaction getManagedTransaction(Xid xid) {
final SerializableXID sXid = new SerializableXID(xid.getFormatId(),
xid.getGlobalTransactionId(), xid.getBranchQualifier());
return managedTransactions.get(sXid);
}
public void removeManagedTransaction(Xid xid) {
final SerializableXID sXid = new SerializableXID(xid.getFormatId(),
xid.getGlobalTransactionId(), xid.getBranchQualifier());
managedTransactions.remove(sXid);
}
private void finalizeTransactionsOf(String uuid) {
for (Map.Entry<String, TxBackupLog> entry : txBackupLogs.entrySet()) {
finalize(uuid, entry.getKey(), entry.getValue());
}
}
private void finalize(String uuid, String txnId, TxBackupLog log) {
OperationService operationService = nodeEngine.getOperationService();
if (!uuid.equals(log.callerUuid)) {
return;
}
//TODO shouldn't we remove TxBackupLog from map ?
if (log.state == State.ACTIVE) {
Collection<MemberImpl> memberList = nodeEngine.getClusterService().getMemberList();
Collection<Future> futures = new ArrayList<Future>(memberList.size());
for (MemberImpl member : memberList) {
Operation op = new BroadcastTxRollbackOperation(txnId);
Future f = operationService.invokeOnTarget(SERVICE_NAME, op, member.getAddress());
futures.add(f);
}
for (Future future : futures) {
try {
future.get(TransactionOptions.getDefault().getTimeoutMillis(), TimeUnit.MILLISECONDS);
} catch (Exception e) {
logger.warning("Error while rolling-back tx!");
}
}
} else {
if (log.state == State.COMMITTING && log.xid != null) {
logger.warning("This log is XA Managed " + log);
//Marking for recovery
log.state = State.NO_TXN;
return;
}
TransactionImpl tx = new TransactionImpl(this, nodeEngine, txnId, log.txLogs,
log.timeoutMillis, log.startTime, log.callerUuid);
if (log.state == State.COMMITTING) {
try {
tx.commit();
} catch (Throwable e) {
logger.warning("Error during committing from tx backup!", e);
}
} else {
try {
tx.rollback();
} catch (Throwable e) {
logger.warning("Error during rolling-back from tx backup!", e);
}
}
}
}
@Override
public void clientDisconnected(String clientUuid) {
finalizeTransactionsOf(clientUuid);
}
Address[] pickBackupAddresses(int durability) {
final ClusterService clusterService = nodeEngine.getClusterService();
final List<MemberImpl> members = new ArrayList<MemberImpl>(clusterService.getMemberList());
members.remove(nodeEngine.getLocalMember());
final int c = Math.min(members.size(), durability);
Collections.shuffle(members);
Address[] addresses = new Address[c];
for (int i = 0; i < c; i++) {
addresses[i] = members.get(i).getAddress();
}
return addresses;
}
public void addTxBackupLogForClientRecovery(Transaction transaction) {
TransactionImpl txnImpl = (TransactionImpl) transaction;
final String callerUuid = txnImpl.getOwnerUuid();
final SerializableXID xid = txnImpl.getXid();
final List<TransactionLog> txLogs = txnImpl.getTxLogs();
final long timeoutMillis = txnImpl.getTimeoutMillis();
final long startTime = txnImpl.getStartTime();
TxBackupLog log = new TxBackupLog(txLogs, callerUuid, State.COMMITTING, timeoutMillis, startTime, xid);
txBackupLogs.put(txnImpl.getTxnId(), log);
}
void beginTxBackupLog(String callerUuid, String txnId, SerializableXID xid) {
TxBackupLog log
= new TxBackupLog(Collections.<TransactionLog>emptyList(), callerUuid, State.ACTIVE, -1, -1, xid);
if (txBackupLogs.putIfAbsent(txnId, log) != null) {
throw new TransactionException("TxLog already exists!");
}
}
void prepareTxBackupLog(List<TransactionLog> txLogs, String callerUuid, String txnId,
long timeoutMillis, long startTime) {
TxBackupLog beginLog = txBackupLogs.get(txnId);
if (beginLog == null) {
throw new TransactionException("Could not find begin tx log!");
}
if (beginLog.state != State.ACTIVE) {
throw new TransactionException("TxLog already exists!");
}
TxBackupLog newTxBackupLog
= new TxBackupLog(txLogs, callerUuid, State.COMMITTING, timeoutMillis, startTime, beginLog.xid);
if (!txBackupLogs.replace(txnId, beginLog, newTxBackupLog)) {
throw new TransactionException("TxLog already exists!");
}
}
void rollbackTxBackupLog(String txnId) {
final TxBackupLog log = txBackupLogs.get(txnId);
if (log != null) {
log.state = State.ROLLING_BACK;
} else {
logger.warning("No tx backup log is found, tx -> " + txnId);
}
}
void purgeTxBackupLog(String txnId) {
txBackupLogs.remove(txnId);
}
public Xid[] recover() {
List<Future<SerializableCollection>> futures = invokeRecoverOperations();
Set<SerializableXID> xidSet = new HashSet<SerializableXID>();
for (Future<SerializableCollection> future : futures) {
try {
final SerializableCollection collectionWrapper = future.get(RECOVER_TIMEOUT, TimeUnit.MILLISECONDS);
for (Data data : collectionWrapper) {
final RecoveredTransaction rt = (RecoveredTransaction) nodeEngine.toObject(data);
final SerializableXID xid = rt.getXid();
TransactionImpl tx = new TransactionImpl(this, nodeEngine, rt.getTxnId(), rt.getTxLogs(),
rt.getTimeoutMillis(), rt.getStartTime(), rt.getCallerUuid());
tx.setXid(xid);
xidSet.add(xid);
managedTransactions.put(xid, tx);
}
} catch (MemberLeftException e) {
logger.warning("Member left while recovering: " + e);
} catch (Throwable e) {
if (e instanceof ExecutionException) {
e = e.getCause() != null ? e.getCause() : e;
}
if (e instanceof TargetNotMemberException) {
nodeEngine.getLogger(Transaction.class).warning("Member left while recovering: " + e);
} else {
throw ExceptionUtil.rethrow(e);
}
}
}
final Set<RecoveredTransaction> localSet = recoverLocal();
for (RecoveredTransaction rt : localSet) {
xidSet.add(rt.getXid());
}
return xidSet.toArray(new Xid[xidSet.size()]);
}
private List<Future<SerializableCollection>> invokeRecoverOperations() {
final OperationService operationService = nodeEngine.getOperationService();
final ClusterService clusterService = nodeEngine.getClusterService();
final Collection<MemberImpl> memberList = clusterService.getMemberList();
List<Future<SerializableCollection>> futures
= new ArrayList<Future<SerializableCollection>>(memberList.size() - 1);
for (MemberImpl member : memberList) {
if (member.localMember()) {
continue;
}
final Future f = operationService.createInvocationBuilder(TransactionManagerServiceImpl.SERVICE_NAME,
new RecoverTxnOperation(), member.getAddress()).invoke();
futures.add(f);
}
return futures;
}
public Set<RecoveredTransaction> recoverLocal() {
Set<RecoveredTransaction> recovered = new HashSet<RecoveredTransaction>();
if (!txBackupLogs.isEmpty()) {
final Set<Map.Entry<String, TxBackupLog>> entries = txBackupLogs.entrySet();
final Iterator<Map.Entry<String, TxBackupLog>> iter = entries.iterator();
while (iter.hasNext()) {
final Map.Entry<String, TxBackupLog> entry = iter.next();
final TxBackupLog log = entry.getValue();
final String txnId = entry.getKey();
if (log.state == State.NO_TXN && log.xid != null) {
final RecoveredTransaction rt = new RecoveredTransaction();
rt.setTxLogs(log.txLogs);
rt.setXid(log.xid);
rt.setCallerUuid(log.callerUuid);
rt.setStartTime(log.startTime);
rt.setTimeoutMillis(log.timeoutMillis);
rt.setTxnId(txnId);
recovered.add(rt);
iter.remove();
}
}
}
return recovered;
}
private static final class TxBackupLog {
private final List<TransactionLog> txLogs;
private final String callerUuid;
private final long timeoutMillis;
private final long startTime;
private final SerializableXID xid;
private volatile State state;
private TxBackupLog(List<TransactionLog> txLogs, String callerUuid, State state, long timeoutMillis,
long startTime, SerializableXID xid) {
this.txLogs = txLogs;
this.callerUuid = callerUuid;
this.state = state;
this.timeoutMillis = timeoutMillis;
this.startTime = startTime;
this.xid = xid;
}
}
} | 1no label
| hazelcast_src_main_java_com_hazelcast_transaction_impl_TransactionManagerServiceImpl.java |
406 | public class AbstractClientCollectionProxy<E> extends ClientProxy implements ICollection<E> {
protected final String partitionKey;
public AbstractClientCollectionProxy(String instanceName, String serviceName, String name) {
super(instanceName, serviceName, name);
partitionKey = getPartitionKey();
}
public int size() {
CollectionSizeRequest request = new CollectionSizeRequest(getName());
final Integer result = invoke(request);
return result;
}
public boolean isEmpty() {
return size() == 0;
}
public boolean contains(Object o) {
throwExceptionIfNull(o);
final CollectionContainsRequest request = new CollectionContainsRequest(getName(), toData(o));
final Boolean result = invoke(request);
return result;
}
public Iterator<E> iterator() {
return getAll().iterator();
}
public Object[] toArray() {
return getAll().toArray();
}
public <T> T[] toArray(T[] a) {
return getAll().toArray(a);
}
public boolean add(E e) {
throwExceptionIfNull(e);
final CollectionAddRequest request = new CollectionAddRequest(getName(), toData(e));
final Boolean result = invoke(request);
return result;
}
public boolean remove(Object o) {
throwExceptionIfNull(o);
final CollectionRemoveRequest request = new CollectionRemoveRequest(getName(), toData(o));
final Boolean result = invoke(request);
return result;
}
public boolean containsAll(Collection<?> c) {
throwExceptionIfNull(c);
Set<Data> valueSet = new HashSet<Data>(c.size());
for (Object o : c) {
throwExceptionIfNull(o);
valueSet.add(toData(o));
}
final CollectionContainsRequest request = new CollectionContainsRequest(getName(), valueSet);
final Boolean result = invoke(request);
return result;
}
public boolean addAll(Collection<? extends E> c) {
throwExceptionIfNull(c);
final List<Data> valueList = new ArrayList<Data>(c.size());
for (E e : c) {
throwExceptionIfNull(e);
valueList.add(toData(e));
}
final CollectionAddAllRequest request = new CollectionAddAllRequest(getName(), valueList);
final Boolean result = invoke(request);
return result;
}
public boolean removeAll(Collection<?> c) {
return compareAndRemove(false, c);
}
public boolean retainAll(Collection<?> c) {
return compareAndRemove(true, c);
}
private boolean compareAndRemove(boolean retain, Collection<?> c) {
throwExceptionIfNull(c);
final Set<Data> valueSet = new HashSet<Data>();
for (Object o : c) {
throwExceptionIfNull(o);
valueSet.add(toData(o));
}
final CollectionCompareAndRemoveRequest request = new CollectionCompareAndRemoveRequest(getName(), valueSet, retain);
final Boolean result = invoke(request);
return result;
}
public void clear() {
final CollectionClearRequest request = new CollectionClearRequest(getName());
invoke(request);
}
public String addItemListener(final ItemListener<E> listener, final boolean includeValue) {
final CollectionAddListenerRequest request = new CollectionAddListenerRequest(getName(), includeValue);
request.setServiceName(getServiceName());
EventHandler<PortableItemEvent> eventHandler = new EventHandler<PortableItemEvent>() {
public void handle(PortableItemEvent portableItemEvent) {
E item = includeValue ? (E) getContext().getSerializationService().toObject(portableItemEvent.getItem()) : null;
Member member = getContext().getClusterService().getMember(portableItemEvent.getUuid());
ItemEvent<E> itemEvent = new ItemEvent<E>(getName(), portableItemEvent.getEventType(), item, member);
if (portableItemEvent.getEventType() == ItemEventType.ADDED) {
listener.itemAdded(itemEvent);
} else {
listener.itemRemoved(itemEvent);
}
}
@Override
public void onListenerRegister() {
}
};
return listen(request, getPartitionKey(), eventHandler);
}
public boolean removeItemListener(String registrationId) {
final CollectionRemoveListenerRequest request = new CollectionRemoveListenerRequest(getName(),
registrationId, getServiceName());
return stopListening(request, registrationId);
}
protected void onDestroy() {
}
protected <T> T invoke(ClientRequest req) {
if (req instanceof CollectionRequest) {
CollectionRequest request = (CollectionRequest) req;
request.setServiceName(getServiceName());
}
return super.invoke(req, getPartitionKey());
}
private Collection<E> getAll() {
final CollectionGetAllRequest request = new CollectionGetAllRequest(getName());
final SerializableCollection result = invoke(request);
final Collection<Data> collection = result.getCollection();
final ArrayList<E> list = new ArrayList<E>(collection.size());
for (Data value : collection) {
list.add((E) toObject(value));
}
return list;
}
} | 1no label
| hazelcast-client_src_main_java_com_hazelcast_client_proxy_AbstractClientCollectionProxy.java |
842 | searchServiceTransportAction.sendFreeContext(node, target.v2(), request, new ActionListener<Boolean>() {
@Override
public void onResponse(Boolean success) {
onFreedContext();
}
@Override
public void onFailure(Throwable e) {
onFailedFreedContext(e, node);
}
}); | 1no label
| src_main_java_org_elasticsearch_action_search_TransportClearScrollAction.java |
42 | static final class ModuleProposal extends CompletionProposal {
private final int len;
private final String versioned;
private final ModuleDetails module;
private final boolean withBody;
private final ModuleVersionDetails version;
private final String name;
private Node node;
ModuleProposal(int offset, String prefix, int len,
String versioned, ModuleDetails module,
boolean withBody, ModuleVersionDetails version,
String name, Node node) {
super(offset, prefix, MODULE, versioned,
versioned.substring(len));
this.len = len;
this.versioned = versioned;
this.module = module;
this.withBody = withBody;
this.version = version;
this.name = name;
this.node = node;
}
@Override
public String getDisplayString() {
String str = super.getDisplayString();
/*if (withBody &&
EditorsUI.getPreferenceStore()
.getBoolean(LINKED_MODE)) {
str = str.replaceAll("\".*\"", "\"<...>\"");
}*/
return str;
}
@Override
public Point getSelection(IDocument document) {
final int off = offset+versioned.length()-prefix.length()-len;
if (withBody) {
final int verlen = version.getVersion().length();
return new Point(off-verlen-2, verlen);
}
else {
return new Point(off, 0);
}
}
@Override
public void apply(IDocument document) {
super.apply(document);
if (withBody && //module.getVersions().size()>1 && //TODO: put this back in when sure it works
EditorsUI.getPreferenceStore()
.getBoolean(LINKED_MODE)) {
final LinkedModeModel linkedModeModel = new LinkedModeModel();
final Point selection = getSelection(document);
List<ICompletionProposal> proposals = new ArrayList<ICompletionProposal>();
for (final ModuleVersionDetails d: module.getVersions()) {
proposals.add(new ICompletionProposal() {
@Override
public Point getSelection(IDocument document) {
return null;
}
@Override
public Image getImage() {
return CeylonResources.VERSION;
}
@Override
public String getDisplayString() {
return d.getVersion();
}
@Override
public IContextInformation getContextInformation() {
return null;
}
@Override
public String getAdditionalProposalInfo() {
return "Repository: " + d.getOrigin();
}
@Override
public void apply(IDocument document) {
try {
document.replace(selection.x, selection.y,
d.getVersion());
}
catch (BadLocationException e) {
e.printStackTrace();
}
linkedModeModel.exit(ILinkedModeListener.UPDATE_CARET);
}
});
}
ProposalPosition linkedPosition =
new ProposalPosition(document, selection.x, selection.y, 0,
proposals.toArray(NO_COMPLETIONS));
try {
LinkedMode.addLinkedPosition(linkedModeModel, linkedPosition);
LinkedMode.installLinkedMode((CeylonEditor) EditorUtil.getCurrentEditor(),
document, linkedModeModel, this, new LinkedMode.NullExitPolicy(),
1, selection.x+selection.y+2);
}
catch (BadLocationException ble) {
ble.printStackTrace();
}
}
}
@Override
public String getAdditionalProposalInfo() {
Scope scope = node.getScope();
Unit unit = node.getUnit();
return JDKUtils.isJDKModule(name) ?
getDocumentationForModule(name, JDKUtils.jdk.version,
"This module forms part of the Java SDK.",
scope, unit) :
getDocumentationFor(module, version.getVersion(),
scope, unit);
}
@Override
protected boolean qualifiedNameIsPath() {
return true;
}
} | 0true
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_complete_ModuleCompletions.java |
1,112 | public class OSQLFunctionAverage extends OSQLFunctionMathAbstract {
public static final String NAME = "avg";
private Number sum;
private int total = 0;
public OSQLFunctionAverage() {
super(NAME, 1, -1);
}
public Object execute(OIdentifiable iCurrentRecord, Object iCurrentResult, final Object[] iParameters, OCommandContext iContext) {
if (iParameters.length == 1) {
if (iParameters[0] instanceof Number)
sum((Number) iParameters[0]);
else if (OMultiValue.isMultiValue(iParameters[0]))
for (Object n : OMultiValue.getMultiValueIterable(iParameters[0]))
sum((Number) n);
} else {
sum = null;
for (int i = 0; i < iParameters.length; ++i)
sum((Number) iParameters[i]);
}
return getResult();
}
protected void sum(Number value) {
if (value != null) {
total++;
if (sum == null)
// FIRST TIME
sum = value;
else
sum = OType.increment(sum, value);
}
}
public String getSyntax() {
return "Syntax error: avg(<field> [,<field>*])";
}
@Override
public Object getResult() {
if (returnDistributedResult()) {
final Map<String, Object> doc = new HashMap<String, Object>();
doc.put("sum", sum);
doc.put("total", total);
return doc;
} else {
if (sum instanceof Integer)
return sum.intValue() / total;
else if (sum instanceof Long)
return sum.longValue() / total;
else if (sum instanceof Float)
return sum.floatValue() / total;
else if (sum instanceof Double)
return sum.doubleValue() / total;
else if (sum instanceof BigDecimal)
return ((BigDecimal) sum).divide(new BigDecimal(total));
}
return null;
}
@SuppressWarnings("unchecked")
@Override
public Object mergeDistributedResult(List<Object> resultsToMerge) {
Number sum = null;
int total = 0;
for (Object iParameter : resultsToMerge) {
final Map<String, Object> item = (Map<String, Object>) iParameter;
if (sum == null)
sum = (Number) item.get("sum");
else
sum = OType.increment(sum, (Number) item.get("sum"));
total += (Integer) item.get("total");
}
if (sum instanceof Integer)
return sum.intValue() / total;
else if (sum instanceof Long)
return sum.longValue() / total;
else if (sum instanceof Float)
return sum.floatValue() / total;
else if (sum instanceof Double)
return sum.doubleValue() / total;
else if (sum instanceof BigDecimal)
return ((BigDecimal) sum).divide(new BigDecimal(total));
return null;
}
@Override
public boolean aggregateResults() {
return configuredParameters.length == 1;
}
} | 1no label
| core_src_main_java_com_orientechnologies_orient_core_sql_functions_math_OSQLFunctionAverage.java |
611 | public class UpdateSettingsResponse extends AcknowledgedResponse {
UpdateSettingsResponse() {
}
UpdateSettingsResponse(boolean acknowledged) {
super(acknowledged);
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
readAcknowledged(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
writeAcknowledged(out);
}
} | 0true
| src_main_java_org_elasticsearch_action_admin_indices_settings_put_UpdateSettingsResponse.java |
580 | executionService.scheduleWithFixedDelay(executorName, new Runnable() {
public void run() {
heartBeater();
}
}, heartbeatInterval, heartbeatInterval, TimeUnit.SECONDS); | 1no label
| hazelcast_src_main_java_com_hazelcast_cluster_ClusterServiceImpl.java |
3,387 | public class EventServiceImpl implements EventService {
private static final EventRegistration[] EMPTY_REGISTRATIONS = new EventRegistration[0];
private final ILogger logger;
private final NodeEngineImpl nodeEngine;
private final ConcurrentMap<String, EventServiceSegment> segments;
private final StripedExecutor eventExecutor;
private final int eventQueueTimeoutMs;
private final int eventThreadCount;
private final int eventQueueCapacity;
EventServiceImpl(NodeEngineImpl nodeEngine) {
this.nodeEngine = nodeEngine;
this.logger = nodeEngine.getLogger(EventService.class.getName());
final Node node = nodeEngine.getNode();
GroupProperties groupProperties = node.getGroupProperties();
this.eventThreadCount = groupProperties.EVENT_THREAD_COUNT.getInteger();
this.eventQueueCapacity = groupProperties.EVENT_QUEUE_CAPACITY.getInteger();
this.eventQueueTimeoutMs = groupProperties.EVENT_QUEUE_TIMEOUT_MILLIS.getInteger();
this.eventExecutor = new StripedExecutor(
node.getLogger(EventServiceImpl.class),
node.getThreadNamePrefix("event"),
node.threadGroup,
eventThreadCount,
eventQueueCapacity);
this.segments = new ConcurrentHashMap<String, EventServiceSegment>();
}
@Override
public int getEventThreadCount() {
return eventThreadCount;
}
@Override
public int getEventQueueCapacity() {
return eventQueueCapacity;
}
@Override
public int getEventQueueSize() {
return eventExecutor.getWorkQueueSize();
}
@Override
public EventRegistration registerLocalListener(String serviceName, String topic, Object listener) {
return registerListenerInternal(serviceName, topic, new EmptyFilter(), listener, true);
}
@Override
public EventRegistration registerLocalListener(String serviceName, String topic, EventFilter filter, Object listener) {
return registerListenerInternal(serviceName, topic, filter, listener, true);
}
@Override
public EventRegistration registerListener(String serviceName, String topic, Object listener) {
return registerListenerInternal(serviceName, topic, new EmptyFilter(), listener, false);
}
@Override
public EventRegistration registerListener(String serviceName, String topic, EventFilter filter, Object listener) {
return registerListenerInternal(serviceName, topic, filter, listener, false);
}
private EventRegistration registerListenerInternal(String serviceName, String topic, EventFilter filter,
Object listener, boolean localOnly) {
if (listener == null) {
throw new IllegalArgumentException("Listener required!");
}
if (filter == null) {
throw new IllegalArgumentException("EventFilter required!");
}
EventServiceSegment segment = getSegment(serviceName, true);
Registration reg = new Registration(UUID.randomUUID().toString(), serviceName, topic, filter,
nodeEngine.getThisAddress(), listener, localOnly);
if (segment.addRegistration(topic, reg)) {
if (!localOnly) {
invokeRegistrationOnOtherNodes(serviceName, reg);
}
return reg;
} else {
return null;
}
}
private boolean handleRegistration(Registration reg) {
if (nodeEngine.getThisAddress().equals(reg.getSubscriber())) {
return false;
}
EventServiceSegment segment = getSegment(reg.serviceName, true);
return segment.addRegistration(reg.topic, reg);
}
@Override
public boolean deregisterListener(String serviceName, String topic, Object id) {
final EventServiceSegment segment = getSegment(serviceName, false);
if (segment != null) {
final Registration reg = segment.removeRegistration(topic, String.valueOf(id));
if (reg != null && !reg.isLocalOnly()) {
invokeDeregistrationOnOtherNodes(serviceName, topic, String.valueOf(id));
}
return reg != null;
}
return false;
}
@Override
public void deregisterAllListeners(String serviceName, String topic) {
final EventServiceSegment segment = getSegment(serviceName, false);
if (segment != null) {
segment.removeRegistrations(topic);
}
}
private void deregisterSubscriber(String serviceName, String topic, String id) {
final EventServiceSegment segment = getSegment(serviceName, false);
if (segment != null) {
segment.removeRegistration(topic, id);
}
}
private void invokeRegistrationOnOtherNodes(String serviceName, Registration reg) {
Collection<MemberImpl> members = nodeEngine.getClusterService().getMemberList();
Collection<Future> calls = new ArrayList<Future>(members.size());
for (MemberImpl member : members) {
if (!member.localMember()) {
Future f = nodeEngine.getOperationService().invokeOnTarget(serviceName,
new RegistrationOperation(reg), member.getAddress());
calls.add(f);
}
}
for (Future f : calls) {
try {
f.get(5, TimeUnit.SECONDS);
} catch (InterruptedException ignored) {
} catch (TimeoutException ignored) {
} catch (MemberLeftException e) {
logger.finest("Member left while registering listener...", e);
} catch (ExecutionException e) {
throw new HazelcastException(e);
}
}
}
private void invokeDeregistrationOnOtherNodes(String serviceName, String topic, String id) {
Collection<MemberImpl> members = nodeEngine.getClusterService().getMemberList();
Collection<Future> calls = new ArrayList<Future>(members.size());
for (MemberImpl member : members) {
if (!member.localMember()) {
Future f = nodeEngine.getOperationService().invokeOnTarget(serviceName,
new DeregistrationOperation(topic, id), member.getAddress());
calls.add(f);
}
}
for (Future f : calls) {
try {
f.get(5, TimeUnit.SECONDS);
} catch (InterruptedException ignored) {
} catch (TimeoutException ignored) {
} catch (MemberLeftException e) {
logger.finest("Member left while de-registering listener...", e);
} catch (ExecutionException e) {
throw new HazelcastException(e);
}
}
}
@Override
public EventRegistration[] getRegistrationsAsArray(String serviceName, String topic) {
final EventServiceSegment segment = getSegment(serviceName, false);
if (segment != null) {
final Collection<Registration> registrations = segment.getRegistrations(topic, false);
return registrations != null && !registrations.isEmpty()
? registrations.toArray(new Registration[registrations.size()])
: EMPTY_REGISTRATIONS;
}
return EMPTY_REGISTRATIONS;
}
@Override
public Collection<EventRegistration> getRegistrations(String serviceName, String topic) {
final EventServiceSegment segment = getSegment(serviceName, false);
if (segment != null) {
final Collection<Registration> registrations = segment.getRegistrations(topic, false);
return registrations != null && !registrations.isEmpty()
? Collections.<EventRegistration>unmodifiableCollection(registrations)
: Collections.<EventRegistration>emptySet();
}
return Collections.emptySet();
}
@Override
public void publishEvent(String serviceName, EventRegistration registration, Object event, int orderKey) {
if (!(registration instanceof Registration)) {
throw new IllegalArgumentException();
}
final Registration reg = (Registration) registration;
if (isLocal(reg)) {
executeLocal(serviceName, event, reg, orderKey);
} else {
final Address subscriber = registration.getSubscriber();
sendEventPacket(subscriber, new EventPacket(registration.getId(), serviceName, event), orderKey);
}
}
@Override
public void publishEvent(String serviceName, Collection<EventRegistration> registrations, Object event, int orderKey) {
final Iterator<EventRegistration> iter = registrations.iterator();
Data eventData = null;
while (iter.hasNext()) {
EventRegistration registration = iter.next();
if (!(registration instanceof Registration)) {
throw new IllegalArgumentException();
}
final Registration reg = (Registration) registration;
if (isLocal(reg)) {
executeLocal(serviceName, event, reg, orderKey);
} else {
if (eventData == null) {
eventData = nodeEngine.toData(event);
}
final Address subscriber = registration.getSubscriber();
sendEventPacket(subscriber, new EventPacket(registration.getId(), serviceName, eventData), orderKey);
}
}
}
private void executeLocal(String serviceName, Object event, Registration reg, int orderKey) {
if (nodeEngine.isActive()) {
try {
if (reg.listener != null) {
eventExecutor.execute(new LocalEventDispatcher(serviceName, event, reg.listener, orderKey, eventQueueTimeoutMs));
} else {
logger.warning("Something seems wrong! Listener instance is null! -> " + reg);
}
} catch (RejectedExecutionException e) {
if (eventExecutor.isLive()) {
logger.warning("EventQueue overloaded! " + event + " failed to publish to " + reg.serviceName + ":" + reg.topic);
}
}
}
}
private void sendEventPacket(Address subscriber, EventPacket eventPacket, int orderKey) {
final String serviceName = eventPacket.serviceName;
final EventServiceSegment segment = getSegment(serviceName, true);
boolean sync = segment.incrementPublish() % 100000 == 0;
if (sync) {
Future f = nodeEngine.getOperationService().createInvocationBuilder(serviceName,
new SendEventOperation(eventPacket, orderKey), subscriber).setTryCount(50).invoke();
try {
f.get(3, TimeUnit.SECONDS);
} catch (Exception ignored) {
}
} else {
final Packet packet = new Packet(nodeEngine.toData(eventPacket), orderKey, nodeEngine.getSerializationContext());
packet.setHeader(Packet.HEADER_EVENT);
nodeEngine.send(packet, subscriber);
}
}
private EventServiceSegment getSegment(String service, boolean forceCreate) {
EventServiceSegment segment = segments.get(service);
if (segment == null && forceCreate) {
return ConcurrencyUtil.getOrPutIfAbsent(segments, service, new ConstructorFunction<String, EventServiceSegment>() {
public EventServiceSegment createNew(String key) {
return new EventServiceSegment(key);
}
});
}
return segment;
}
private boolean isLocal(Registration reg) {
return nodeEngine.getThisAddress().equals(reg.getSubscriber());
}
@PrivateApi
void executeEvent(Runnable eventRunnable) {
if (nodeEngine.isActive()) {
try {
eventExecutor.execute(eventRunnable);
} catch (RejectedExecutionException e) {
if (eventExecutor.isLive()) {
logger.warning("EventQueue overloaded! Failed to execute event process: " + eventRunnable);
}
}
}
}
@PrivateApi
void handleEvent(Packet packet) {
try {
eventExecutor.execute(new RemoteEventPacketProcessor(packet));
} catch (RejectedExecutionException e) {
if (eventExecutor.isLive()) {
final Connection conn = packet.getConn();
String endpoint = conn.getEndPoint() != null ? conn.getEndPoint().toString() : conn.toString();
logger.warning("EventQueue overloaded! Failed to process event packet sent from: " + endpoint);
}
}
}
public PostJoinRegistrationOperation getPostJoinOperation() {
final Collection<Registration> registrations = new LinkedList<Registration>();
for (EventServiceSegment segment : segments.values()) {
for (Registration reg : segment.registrationIdMap.values()) {
if (!reg.isLocalOnly()) {
registrations.add(reg);
}
}
}
return registrations.isEmpty() ? null : new PostJoinRegistrationOperation(registrations);
}
void shutdown() {
logger.finest("Stopping event executor...");
eventExecutor.shutdown();
for (EventServiceSegment segment : segments.values()) {
segment.clear();
}
segments.clear();
}
void onMemberLeft(MemberImpl member) {
final Address address = member.getAddress();
for (EventServiceSegment segment : segments.values()) {
segment.onMemberLeft(address);
}
}
private static class EventServiceSegment {
final String serviceName;
final ConcurrentMap<String, Collection<Registration>> registrations
= new ConcurrentHashMap<String, Collection<Registration>>();
final ConcurrentMap<String, Registration> registrationIdMap = new ConcurrentHashMap<String, Registration>();
final AtomicInteger totalPublishes = new AtomicInteger();
EventServiceSegment(String serviceName) {
this.serviceName = serviceName;
}
private Collection<Registration> getRegistrations(String topic, boolean forceCreate) {
Collection<Registration> listenerList = registrations.get(topic);
if (listenerList == null && forceCreate) {
return ConcurrencyUtil.getOrPutIfAbsent(registrations, topic, new ConstructorFunction<String, Collection<Registration>>() {
public Collection<Registration> createNew(String key) {
return Collections.newSetFromMap(new ConcurrentHashMap<Registration, Boolean>());
}
});
}
return listenerList;
}
private boolean addRegistration(String topic, Registration registration) {
final Collection<Registration> registrations = getRegistrations(topic, true);
if (registrations.add(registration)) {
registrationIdMap.put(registration.id, registration);
return true;
}
return false;
}
private Registration removeRegistration(String topic, String id) {
final Registration registration = registrationIdMap.remove(id);
if (registration != null) {
final Collection<Registration> all = registrations.get(topic);
if (all != null) {
all.remove(registration);
}
}
return registration;
}
void removeRegistrations(String topic) {
final Collection<Registration> all = registrations.remove(topic);
if (all != null) {
for (Registration reg : all) {
registrationIdMap.remove(reg.getId());
}
}
}
void clear() {
registrations.clear();
registrationIdMap.clear();
}
void onMemberLeft(Address address) {
for (Collection<Registration> all : registrations.values()) {
Iterator<Registration> iter = all.iterator();
while (iter.hasNext()) {
Registration reg = iter.next();
if (address.equals(reg.getSubscriber())) {
iter.remove();
registrationIdMap.remove(reg.id);
}
}
}
}
int incrementPublish() {
return totalPublishes.incrementAndGet();
}
}
private class EventPacketProcessor implements StripedRunnable {
private EventPacket eventPacket;
int orderKey;
private EventPacketProcessor() {
}
public EventPacketProcessor(EventPacket packet, int orderKey) {
this.eventPacket = packet;
this.orderKey = orderKey;
}
@Override
public void run() {
process(eventPacket);
}
void process(EventPacket eventPacket) {
Object eventObject = eventPacket.event;
if (eventObject instanceof Data) {
eventObject = nodeEngine.toObject(eventObject);
}
final String serviceName = eventPacket.serviceName;
EventPublishingService<Object, Object> service = nodeEngine.getService(serviceName);
if (service == null) {
if (nodeEngine.isActive()) {
logger.warning("There is no service named: " + serviceName);
}
return;
}
EventServiceSegment segment = getSegment(serviceName, false);
if (segment == null) {
if (nodeEngine.isActive()) {
logger.warning("No service registration found for " + serviceName);
}
return;
}
Registration registration = segment.registrationIdMap.get(eventPacket.id);
if (registration == null) {
if (nodeEngine.isActive()) {
if (logger.isFinestEnabled()) {
logger.finest("No registration found for " + serviceName + " / " + eventPacket.id);
}
}
return;
}
if (!isLocal(registration)) {
logger.severe("Invalid target for " + registration);
return;
}
if (registration.listener == null) {
logger.warning("Something seems wrong! Subscriber is local but listener instance is null! -> " + registration);
return;
}
service.dispatchEvent(eventObject, registration.listener);
}
@Override
public int getKey() {
return orderKey;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder("EventPacketProcessor{");
sb.append("eventPacket=").append(eventPacket);
sb.append('}');
return sb.toString();
}
}
private class RemoteEventPacketProcessor extends EventPacketProcessor implements StripedRunnable {
private Packet packet;
public RemoteEventPacketProcessor(Packet packet) {
this.packet = packet;
this.orderKey = packet.getPartitionId();
}
@Override
public void run() {
Data data = packet.getData();
EventPacket eventPacket = (EventPacket) nodeEngine.toObject(data);
process(eventPacket);
}
}
private class LocalEventDispatcher implements StripedRunnable, TimeoutRunnable {
final String serviceName;
final Object event;
final Object listener;
final int orderKey;
final long timeoutMs;
private LocalEventDispatcher(String serviceName, Object event, Object listener, int orderKey, long timeoutMs) {
this.serviceName = serviceName;
this.event = event;
this.listener = listener;
this.orderKey = orderKey;
this.timeoutMs = timeoutMs;
}
@Override
public long getTimeout() {
return timeoutMs;
}
@Override
public TimeUnit getTimeUnit() {
return TimeUnit.MILLISECONDS;
}
@Override
public final void run() {
final EventPublishingService<Object, Object> service = nodeEngine.getService(serviceName);
if (service != null) {
service.dispatchEvent(event, listener);
} else {
if (nodeEngine.isActive()) {
throw new IllegalArgumentException("Service[" + serviceName + "] could not be found!");
}
}
}
@Override
public int getKey() {
return orderKey;
}
}
public static class Registration implements EventRegistration {
private String id;
private String serviceName;
private String topic;
private EventFilter filter;
private Address subscriber;
private transient boolean localOnly;
private transient Object listener;
public Registration() {
}
public Registration(String id, String serviceName, String topic,
EventFilter filter, Address subscriber, Object listener, boolean localOnly) {
this.filter = filter;
this.id = id;
this.listener = listener;
this.serviceName = serviceName;
this.topic = topic;
this.subscriber = subscriber;
this.localOnly = localOnly;
}
@Override
public EventFilter getFilter() {
return filter;
}
@Override
public String getId() {
return id;
}
@Override
public Address getSubscriber() {
return subscriber;
}
@Override
public boolean isLocalOnly() {
return localOnly;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Registration that = (Registration) o;
if (id != null ? !id.equals(that.id) : that.id != null) return false;
if (serviceName != null ? !serviceName.equals(that.serviceName) : that.serviceName != null) return false;
if (topic != null ? !topic.equals(that.topic) : that.topic != null) return false;
if (filter != null ? !filter.equals(that.filter) : that.filter != null) return false;
if (subscriber != null ? !subscriber.equals(that.subscriber) : that.subscriber != null) return false;
return true;
}
@Override
public int hashCode() {
int result = id != null ? id.hashCode() : 0;
result = 31 * result + (serviceName != null ? serviceName.hashCode() : 0);
result = 31 * result + (topic != null ? topic.hashCode() : 0);
result = 31 * result + (filter != null ? filter.hashCode() : 0);
result = 31 * result + (subscriber != null ? subscriber.hashCode() : 0);
return result;
}
@Override
public void writeData(ObjectDataOutput out) throws IOException {
out.writeUTF(id);
out.writeUTF(serviceName);
out.writeUTF(topic);
subscriber.writeData(out);
out.writeObject(filter);
}
@Override
public void readData(ObjectDataInput in) throws IOException {
id = in.readUTF();
serviceName = in.readUTF();
topic = in.readUTF();
subscriber = new Address();
subscriber.readData(in);
filter = in.readObject();
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder();
sb.append("Registration");
sb.append("{filter=").append(filter);
sb.append(", id='").append(id).append('\'');
sb.append(", serviceName='").append(serviceName).append('\'');
sb.append(", subscriber=").append(subscriber);
sb.append(", listener=").append(listener);
sb.append('}');
return sb.toString();
}
}
public final static class EventPacket implements IdentifiedDataSerializable {
private String id;
private String serviceName;
private Object event;
public EventPacket() {
}
EventPacket(String id, String serviceName, Object event) {
this.event = event;
this.id = id;
this.serviceName = serviceName;
}
@Override
public void writeData(ObjectDataOutput out) throws IOException {
out.writeUTF(id);
out.writeUTF(serviceName);
out.writeObject(event);
}
@Override
public void readData(ObjectDataInput in) throws IOException {
id = in.readUTF();
serviceName = in.readUTF();
event = in.readObject();
}
@Override
public int getFactoryId() {
return SpiDataSerializerHook.F_ID;
}
@Override
public int getId() {
return SpiDataSerializerHook.EVENT_PACKET;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder("EventPacket{");
sb.append("id='").append(id).append('\'');
sb.append(", serviceName='").append(serviceName).append('\'');
sb.append(", event=").append(event);
sb.append('}');
return sb.toString();
}
}
public static final class EmptyFilter implements EventFilter, DataSerializable {
public boolean eval(Object arg) {
return true;
}
@Override
public void writeData(ObjectDataOutput out) throws IOException {
}
@Override
public void readData(ObjectDataInput in) throws IOException {
}
@Override
public boolean equals(Object obj) {
return obj instanceof EmptyFilter;
}
@Override
public int hashCode() {
return 0;
}
}
public static class SendEventOperation extends AbstractOperation {
private EventPacket eventPacket;
private int orderKey;
public SendEventOperation() {
}
public SendEventOperation(EventPacket eventPacket, int orderKey) {
this.eventPacket = eventPacket;
this.orderKey = orderKey;
}
@Override
public void run() throws Exception {
EventServiceImpl eventService = (EventServiceImpl) getNodeEngine().getEventService();
eventService.executeEvent(eventService.new EventPacketProcessor(eventPacket, orderKey));
}
@Override
public boolean returnsResponse() {
return true;
}
@Override
protected void writeInternal(ObjectDataOutput out) throws IOException {
super.writeInternal(out);
eventPacket.writeData(out);
out.writeInt(orderKey);
}
@Override
protected void readInternal(ObjectDataInput in) throws IOException {
super.readInternal(in);
eventPacket = new EventPacket();
eventPacket.readData(in);
orderKey = in.readInt();
}
}
public static class RegistrationOperation extends AbstractOperation {
private Registration registration;
private boolean response = false;
public RegistrationOperation() {
}
private RegistrationOperation(Registration registration) {
this.registration = registration;
}
@Override
public void run() throws Exception {
EventServiceImpl eventService = (EventServiceImpl) getNodeEngine().getEventService();
response = eventService.handleRegistration(registration);
}
@Override
public Object getResponse() {
return response;
}
@Override
public boolean returnsResponse() {
return true;
}
@Override
protected void writeInternal(ObjectDataOutput out) throws IOException {
registration.writeData(out);
}
@Override
protected void readInternal(ObjectDataInput in) throws IOException {
registration = new Registration();
registration.readData(in);
}
}
public static class DeregistrationOperation extends AbstractOperation {
private String topic;
private String id;
DeregistrationOperation() {
}
private DeregistrationOperation(String topic, String id) {
this.topic = topic;
this.id = id;
}
@Override
public void run() throws Exception {
EventServiceImpl eventService = (EventServiceImpl) getNodeEngine().getEventService();
eventService.deregisterSubscriber(getServiceName(), topic, id);
}
@Override
public Object getResponse() {
return true;
}
@Override
public boolean returnsResponse() {
return true;
}
@Override
protected void writeInternal(ObjectDataOutput out) throws IOException {
out.writeUTF(topic);
out.writeUTF(id);
}
@Override
protected void readInternal(ObjectDataInput in) throws IOException {
topic = in.readUTF();
id = in.readUTF();
}
}
public static class PostJoinRegistrationOperation extends AbstractOperation {
private Collection<Registration> registrations;
public PostJoinRegistrationOperation() {
}
public PostJoinRegistrationOperation(Collection<Registration> registrations) {
this.registrations = registrations;
}
@Override
public void run() throws Exception {
if (registrations != null && registrations.size() > 0) {
NodeEngineImpl nodeEngine = (NodeEngineImpl) getNodeEngine();
EventServiceImpl eventService = nodeEngine.eventService;
for (Registration reg : registrations) {
eventService.handleRegistration(reg);
}
}
}
@Override
public boolean returnsResponse() {
return false;
}
@Override
protected void writeInternal(ObjectDataOutput out) throws IOException {
super.writeInternal(out);
int len = registrations != null ? registrations.size() : 0;
out.writeInt(len);
if (len > 0) {
for (Registration reg : registrations) {
reg.writeData(out);
}
}
}
@Override
protected void readInternal(ObjectDataInput in) throws IOException {
super.readInternal(in);
int len = in.readInt();
if (len > 0) {
registrations = new ArrayList<Registration>(len);
for (int i = 0; i < len; i++) {
Registration reg = new Registration();
registrations.add(reg);
reg.readData(in);
}
}
}
}
} | 1no label
| hazelcast_src_main_java_com_hazelcast_spi_impl_EventServiceImpl.java |
111 | @Entity
@Table(name = "BLC_PAGE_ITEM_CRITERIA")
@Inheritance(strategy=InheritanceType.JOINED)
@AdminPresentationClass(friendlyName = "PageItemCriteriaImpl_basePageItemCriteria")
public class PageItemCriteriaImpl implements PageItemCriteria {
public static final long serialVersionUID = 1L;
@Id
@GeneratedValue(generator= "PageItemCriteriaId")
@GenericGenerator(
name="PageItemCriteriaId",
strategy="org.broadleafcommerce.common.persistence.IdOverrideTableGenerator",
parameters = {
@Parameter(name="segment_value", value="PageItemCriteriaImpl"),
@Parameter(name="entity_name", value="org.broadleafcommerce.cms.page.domain.PageItemCriteriaImpl")
}
)
@Column(name = "PAGE_ITEM_CRITERIA_ID")
@AdminPresentation(friendlyName = "PageItemCriteriaImpl_Item_Criteria_Id", group = "PageItemCriteriaImpl_Description", visibility =VisibilityEnum.HIDDEN_ALL)
protected Long id;
@Column(name = "QUANTITY", nullable=false)
@AdminPresentation(friendlyName = "PageItemCriteriaImpl_Quantity", group = "PageItemCriteriaImpl_Description", visibility =VisibilityEnum.HIDDEN_ALL)
protected Integer quantity;
@Lob
@Type(type = "org.hibernate.type.StringClobType")
@Column(name = "ORDER_ITEM_MATCH_RULE", length = Integer.MAX_VALUE - 1)
@AdminPresentation(friendlyName = "PageItemCriteriaImpl_Order_Item_Match_Rule", group = "PageItemCriteriaImpl_Description", visibility = VisibilityEnum.HIDDEN_ALL)
protected String orderItemMatchRule;
@ManyToOne(targetEntity = PageImpl.class)
@JoinTable(name = "BLC_QUAL_CRIT_PAGE_XREF", joinColumns = @JoinColumn(name = "PAGE_ITEM_CRITERIA_ID"), inverseJoinColumns = @JoinColumn(name = "PAGE_ID"))
protected Page page;
@Override
public Long getId() {
return id;
}
@Override
public void setId(Long id) {
this.id = id;
}
@Override
public Integer getQuantity() {
return quantity;
}
@Override
public void setQuantity(Integer receiveQuantity) {
this.quantity = receiveQuantity;
}
@Override
public String getMatchRule() {
return orderItemMatchRule;
}
@Override
public void setMatchRule(String matchRule) {
this.orderItemMatchRule = matchRule;
}
@Override
public Page getPage() {
return page;
}
@Override
public void setPage(Page page) {
this.page = page;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((id == null) ? 0 : id.hashCode());
result = prime * result + ((orderItemMatchRule == null) ? 0 : orderItemMatchRule.hashCode());
result = prime * result + ((quantity == null) ? 0 : quantity.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
PageItemCriteriaImpl other = (PageItemCriteriaImpl) obj;
if (id != null && other.id != null) {
return id.equals(other.id);
}
if (orderItemMatchRule == null) {
if (other.orderItemMatchRule != null)
return false;
} else if (!orderItemMatchRule.equals(other.orderItemMatchRule))
return false;
if (quantity == null) {
if (other.quantity != null)
return false;
} else if (!quantity.equals(other.quantity))
return false;
return true;
}
@Override
public PageItemCriteria cloneEntity() {
PageItemCriteriaImpl newField = new PageItemCriteriaImpl();
newField.quantity = quantity;
newField.orderItemMatchRule = orderItemMatchRule;
return newField;
}
} | 1no label
| admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_page_domain_PageItemCriteriaImpl.java |
308 | public class MergeClassPathXMLApplicationContext extends AbstractMergeXMLApplicationContext {
public MergeClassPathXMLApplicationContext(ApplicationContext parent) {
super(parent);
}
/**
* Create a new MergeClassPathXMLApplicationContext, loading the definitions from the given definitions. Note,
* all sourceLocation files will be merged using standard Spring configuration override rules. However, the patch
* files are fully merged into the result of the sourceLocations simple merge. Patch merges are first executed according
* to beans with the same id. Subsequent merges within a bean are executed against tagnames - ignoring any
* further id attributes.
*
* @param sourceLocations array of relative (or absolute) paths within the class path for the source application context files
* @param patchLocations array of relative (or absolute) paths within the class path for the patch application context files
* @throws BeansException
*/
public MergeClassPathXMLApplicationContext(String[] sourceLocations, String[] patchLocations) throws BeansException {
this(sourceLocations, patchLocations, null);
}
/**
* Create a new MergeClassPathXMLApplicationContext, loading the definitions from the given definitions. Note,
* all sourceLocation files will be merged using standard Spring configuration override rules. However, the patch
* files are fully merged into the result of the sourceLocations simple merge. Patch merges are first executed according
* to beans with the same id. Subsequent merges within a bean are executed against tagnames - ignoring any
* further id attributes.
*
* @param sourceLocations array of relative (or absolute) paths within the class path for the source application context files
* @param patchLocations array of relative (or absolute) paths within the class path for the patch application context files
* @param parent the parent context
* @throws BeansException
*/
public MergeClassPathXMLApplicationContext(String[] sourceLocations, String[] patchLocations, ApplicationContext parent) throws BeansException {
this(parent);
ResourceInputStream[] sources = new ResourceInputStream[sourceLocations.length];
for (int j=0;j<sourceLocations.length;j++){
sources[j] = new ResourceInputStream(getClassLoader(parent).getResourceAsStream(sourceLocations[j]), sourceLocations[j]);
}
ResourceInputStream[] patches = new ResourceInputStream[patchLocations.length];
for (int j=0;j<patches.length;j++){
patches[j] = new ResourceInputStream(getClassLoader(parent).getResourceAsStream(patchLocations[j]), patchLocations[j]);
}
ImportProcessor importProcessor = new ImportProcessor(this);
try {
sources = importProcessor.extract(sources);
patches = importProcessor.extract(patches);
} catch (MergeException e) {
throw new FatalBeanException("Unable to merge source and patch locations", e);
}
this.configResources = new MergeApplicationContextXmlConfigResource().getConfigResources(sources, patches);
refresh();
}
/**
* This could be advantageous for subclasses to override in order to utilize the parent application context. By default,
* this utilizes the class loader for the current class.
*/
protected ClassLoader getClassLoader(ApplicationContext parent) {
return MergeClassPathXMLApplicationContext.class.getClassLoader();
}
} | 0true
| common_src_main_java_org_broadleafcommerce_common_extensibility_context_MergeClassPathXMLApplicationContext.java |
52 | public class OTimeoutException extends ONeedRetryException {
private static final long serialVersionUID = 1L;
public OTimeoutException() {
super();
}
public OTimeoutException(String message, Throwable cause) {
super(message, cause);
}
public OTimeoutException(String message) {
super(message);
}
public OTimeoutException(Throwable cause) {
super(cause);
}
} | 0true
| commons_src_main_java_com_orientechnologies_common_concur_OTimeoutException.java |
809 | public class PlotViewFactory {
private final static Logger logger = LoggerFactory.getLogger(PlotViewFactory.class);
/**
* Create the plot from persisted settings if available. Otherwise, create a default plot.
*/
static PlotView createPlot(PlotSettings settings, long currentTime, PlotViewManifestation parentManifestation,
int numberOfSubPlots, PlotView oldPlot, AbbreviatingPlotLabelingAlgorithm plotLabelingAlgorithm) {
PlotView thePlot;
// Insure we always have at least one plot.
numberOfSubPlots = Math.max(1,numberOfSubPlots);
if (!settings.isNull()) {
// The plot has persisted settings so apply them.
if (!settings.pinTimeAxis) {
adjustPlotStartAndEndTimeToMatchCurrentTime(settings, currentTime);
}
thePlot = createPlotFromSettings(settings, numberOfSubPlots, plotLabelingAlgorithm);
} else {
// Setup a default plot to view while the user is configuring it.
thePlot = new PlotView.Builder(PlotterPlot.class).
numberOfSubPlots(numberOfSubPlots).
timeVariableAxisMaxValue(currentTime).
timeVariableAxisMinValue(currentTime - PlotConstants.DEFAUlT_PLOT_SPAN).
plotLabelingAlgorithm(plotLabelingAlgorithm).build();
}
thePlot.setManifestation(parentManifestation);
thePlot.setPlotLabelingAlgorithm(plotLabelingAlgorithm);
assert thePlot!=null : "Plot labeling algorithm should NOT be NULL at this point.";
logger.debug("plotLabelingAlgorithm.getPanelContextTitleList().size()="
+ plotLabelingAlgorithm.getPanelContextTitleList().size()
+ ", plotLabelingAlgorithm.getCanvasContextTitleList().size()=" + plotLabelingAlgorithm.getCanvasContextTitleList().size());
// Copy across feed mapping from old plot, unless structure is different
if (oldPlot!=null && oldPlot.subPlots.size() == numberOfSubPlots) {
for (String dataSetName: oldPlot.dataSetNameToSubGroupMap.keySet()) {
String nameLower = dataSetName.toLowerCase();
for(AbstractPlottingPackage plot : oldPlot.dataSetNameToSubGroupMap.get(dataSetName)) {
int indexInOldPlot = oldPlot.subPlots.indexOf(plot);
thePlot.addDataSet(indexInOldPlot, dataSetName, oldPlot.dataSetNameToDisplayMap.get(nameLower));
}
}
}
return thePlot;
}
/**
* Create the plot using the persisted settings.
*/
static PlotView createPlotFromSettings(PlotSettings settings, int numberOfSubPlots, AbbreviatingPlotLabelingAlgorithm plotLabelingAlgorithm) {
PlotView newPlot = new PlotView.Builder(PlotterPlot.class)
.axisOrientation(settings.timeAxisSetting)
.xAxisMaximumLocation(settings.xAxisMaximumLocation)
.yAxisMaximumLocation(settings.yAxisMaximumLocation)
.nonTimeVaribleAxisMaxValue(settings.maxNonTime)
.nonTimeVaribleAxisMinValue(settings.minNonTime)
.timeAxisBoundsSubsequentSetting(settings.timeAxisSubsequent)
.nonTimeAxisMinSubsequentSetting(settings.nonTimeAxisSubsequentMinSetting)
.nonTimeAxisMaxSubsequentSetting(settings.nonTimeAxisSubsequentMaxSetting)
.timeVariableAxisMaxValue(settings.maxTime)
.timeVariableAxisMinValue(settings.minTime)
.scrollRescaleMarginTimeAxis(settings.timePadding)
.scrollRescaleMarginNonTimeMaxAxis(settings.nonTimeMaxPadding)
.scrollRescaleMarginNonTimeMinAxis(settings.nonTimeMinPadding)
.numberOfSubPlots(numberOfSubPlots)
.useOrdinalPositionForSubplots(settings.ordinalPositionForStackedPlots)
.pinTimeAxis(settings.pinTimeAxis)
.plotLineDraw(settings.plotLineDraw)
.plotLineConnectionType(settings.plotLineConnectionType)
.plotLabelingAlgorithm(plotLabelingAlgorithm)
.build();
newPlot.setPlotLabelingAlgorithm(plotLabelingAlgorithm);
newPlot.setPlotLineDraw(settings.plotLineDraw);
newPlot.setPlotLineConnectionType(settings.plotLineConnectionType);
return newPlot;
}
private static void adjustPlotStartAndEndTimeToMatchCurrentTime(PlotSettings settings, long currentTime) {
if (settings.timeAxisSubsequent == TimeAxisSubsequentBoundsSetting.SCRUNCH) {
if (currentTime > settings.maxTime) {
// Fast forward to now on the upper bound.
settings.maxTime = currentTime;
}
}
}
} | 1no label
| fastPlotViews_src_main_java_gov_nasa_arc_mct_fastplot_view_PlotViewFactory.java |
339 | public class TransportNodesRestartAction extends TransportNodesOperationAction<NodesRestartRequest, NodesRestartResponse, TransportNodesRestartAction.NodeRestartRequest, NodesRestartResponse.NodeRestartResponse> {
private final Node node;
private final boolean disabled;
private AtomicBoolean restartRequested = new AtomicBoolean();
@Inject
public TransportNodesRestartAction(Settings settings, ClusterName clusterName, ThreadPool threadPool,
ClusterService clusterService, TransportService transportService,
Node node) {
super(settings, clusterName, threadPool, clusterService, transportService);
this.node = node;
disabled = componentSettings.getAsBoolean("disabled", false);
}
@Override
protected void doExecute(NodesRestartRequest nodesRestartRequest, ActionListener<NodesRestartResponse> listener) {
listener.onFailure(new ElasticsearchIllegalStateException("restart is disabled (for now) ...."));
}
@Override
protected String executor() {
return ThreadPool.Names.GENERIC;
}
@Override
protected String transportAction() {
return NodesRestartAction.NAME;
}
@Override
protected NodesRestartResponse newResponse(NodesRestartRequest nodesShutdownRequest, AtomicReferenceArray responses) {
final List<NodesRestartResponse.NodeRestartResponse> nodeRestartResponses = newArrayList();
for (int i = 0; i < responses.length(); i++) {
Object resp = responses.get(i);
if (resp instanceof NodesRestartResponse.NodeRestartResponse) {
nodeRestartResponses.add((NodesRestartResponse.NodeRestartResponse) resp);
}
}
return new NodesRestartResponse(clusterName, nodeRestartResponses.toArray(new NodesRestartResponse.NodeRestartResponse[nodeRestartResponses.size()]));
}
@Override
protected NodesRestartRequest newRequest() {
return new NodesRestartRequest();
}
@Override
protected NodeRestartRequest newNodeRequest() {
return new NodeRestartRequest();
}
@Override
protected NodeRestartRequest newNodeRequest(String nodeId, NodesRestartRequest request) {
return new NodeRestartRequest(nodeId, request);
}
@Override
protected NodesRestartResponse.NodeRestartResponse newNodeResponse() {
return new NodesRestartResponse.NodeRestartResponse();
}
@Override
protected NodesRestartResponse.NodeRestartResponse nodeOperation(NodeRestartRequest request) throws ElasticsearchException {
if (disabled) {
throw new ElasticsearchIllegalStateException("Restart is disabled");
}
if (!restartRequested.compareAndSet(false, true)) {
return new NodesRestartResponse.NodeRestartResponse(clusterService.localNode());
}
logger.info("Restarting in [{}]", request.delay);
threadPool.schedule(request.delay, ThreadPool.Names.GENERIC, new Runnable() {
@Override
public void run() {
boolean restartWithWrapper = false;
if (System.getProperty("elasticsearch-service") != null) {
try {
Class wrapperManager = settings.getClassLoader().loadClass("org.tanukisoftware.wrapper.WrapperManager");
logger.info("Initiating requested restart (using service)");
wrapperManager.getMethod("restartAndReturn").invoke(null);
restartWithWrapper = true;
} catch (Throwable e) {
logger.error("failed to initial restart on service wrapper", e);
}
}
if (!restartWithWrapper) {
logger.info("Initiating requested restart");
try {
node.stop();
node.start();
} catch (Exception e) {
logger.warn("Failed to restart", e);
} finally {
restartRequested.set(false);
}
}
}
});
return new NodesRestartResponse.NodeRestartResponse(clusterService.localNode());
}
@Override
protected boolean accumulateExceptions() {
return false;
}
protected static class NodeRestartRequest extends NodeOperationRequest {
TimeValue delay;
private NodeRestartRequest() {
}
private NodeRestartRequest(String nodeId, NodesRestartRequest request) {
super(request, nodeId);
this.delay = request.delay;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
delay = readTimeValue(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
delay.writeTo(out);
}
}
} | 0true
| src_main_java_org_elasticsearch_action_admin_cluster_node_restart_TransportNodesRestartAction.java |
347 | @RunWith(HazelcastSerialClassRunner.class)
@Category(QuickTest.class)
public class ClientSortLimitTest extends HazelcastTestSupport {
static HazelcastInstance client;
static IMap map;
static int pageSize = 5;
static int size = 50;
@BeforeClass
public static void createInstances(){
Hazelcast.newHazelcastInstance();
Hazelcast.newHazelcastInstance();
client = HazelcastClient.newHazelcastClient();
}
@AfterClass
public static void shutdownInstances(){
HazelcastClient.shutdownAll();
Hazelcast.shutdownAll();
}
@Before
public void init(){
map = client.getMap(randomString());
for (int i = 0; i < size; i++) {
map.put(i, i);
}
}
@After
public void reset(){
map.destroy();
}
@Test
public void testWithoutAnchor() {
final PagingPredicate predicate = new PagingPredicate(pageSize);
predicate.nextPage();
predicate.nextPage();
Collection<Integer> values = map.values(predicate);
assertIterableEquals(values, 10, 11, 12, 13, 14);
predicate.previousPage();
values = map.values(predicate);
assertIterableEquals(values, 5, 6, 7, 8, 9);
predicate.previousPage();
values = map.values(predicate);
assertIterableEquals(values, 0, 1, 2, 3, 4);
}
@Test
public void testGoTo_previousPage_BeforeTheStart() {
final PagingPredicate predicate = new PagingPredicate(pageSize);
predicate.previousPage();
Collection<Integer> values = map.values(predicate);
values = map.values(predicate);
assertIterableEquals(values, 0, 1, 2, 3, 4);
}
@Test
public void testGoTo_NextPage_AfterTheEnd() {
final PagingPredicate predicate = new PagingPredicate(pageSize);
for ( int i=0; i < size/pageSize; i++ ) {
predicate.nextPage();
}
Collection<Integer> values = map.values(predicate);
values = map.values(predicate);
assertEquals( size/pageSize - 1, predicate.getPage());
assertIterableEquals(values, 45, 46, 47, 48, 49);
}
@Test
public void testPagingWithoutFilteringAndComparator() {
Set<Integer> set = new HashSet<Integer>();
final PagingPredicate predicate = new PagingPredicate(pageSize);
Collection<Integer> values = map.values(predicate);
while (values.size() > 0) {
assertEquals(pageSize, values.size());
set.addAll(values);
predicate.nextPage();
values = map.values(predicate);
}
assertEquals(size, set.size());
}
@Test
public void testPagingWithFilteringAndComparator() {
final Predicate lessEqual = Predicates.lessEqual("this", 8);
final TestComparator comparator = new TestComparator(false, IterationType.VALUE);
final PagingPredicate predicate = new PagingPredicate(lessEqual, comparator, pageSize);
Collection<Integer> values = map.values(predicate);
assertIterableEquals(values, 8, 7, 6, 5, 4);
predicate.nextPage();
assertEquals(4, predicate.getAnchor().getValue());
values = map.values(predicate);
assertIterableEquals(values, 3, 2, 1, 0);
predicate.nextPage();
assertEquals(0, predicate.getAnchor().getValue());
values = map.values(predicate);
assertEquals(0, values.size());
}
@Test
public void testKeyPaging() {
map.clear();
for (int i = 0; i < size; i++) { // keys [50-1] values [0-49]
map.put(size - i, i);
}
final Predicate lessEqual = Predicates.lessEqual("this", 8); // less than 8
final TestComparator comparator = new TestComparator(true, IterationType.KEY); //ascending keys
final PagingPredicate predicate = new PagingPredicate(lessEqual, comparator, pageSize);
Set<Integer> keySet = map.keySet(predicate);
assertIterableEquals(keySet, 42, 43, 44, 45, 46);
predicate.nextPage();
assertEquals(46, predicate.getAnchor().getKey());
keySet = map.keySet(predicate);
assertIterableEquals(keySet, 47, 48, 49, 50);
predicate.nextPage();
assertEquals(50, predicate.getAnchor().getKey());
keySet = map.keySet(predicate);
assertEquals(0, keySet.size());
}
@Test
public void testEqualValuesPaging() {
for (int i = size; i < 2 * size; i++) { //keys[50-99] values[0-49]
map.put(i, i - size);
}
final Predicate lessEqual = Predicates.lessEqual("this", 8); // entries which has value less than 8
final TestComparator comparator = new TestComparator(true, IterationType.VALUE); //ascending values
final PagingPredicate predicate = new PagingPredicate(lessEqual, comparator, pageSize); //pageSize = 5
Collection<Integer> values = map.values(predicate);
assertIterableEquals(values, 0, 0, 1, 1, 2);
predicate.nextPage();
values = map.values(predicate);
assertIterableEquals(values, 2, 3, 3, 4, 4);
predicate.nextPage();
values = map.values(predicate);
assertIterableEquals(values, 5, 5, 6, 6, 7);
predicate.nextPage();
values = map.values(predicate);
assertIterableEquals(values, 7, 8, 8);
}
@Test
public void testNextPageAfterResultSetEmpty(){
final Predicate lessEqual = Predicates.lessEqual("this", 3); // entries which has value less than 3
final TestComparator comparator = new TestComparator(true, IterationType.VALUE); //ascending values
final PagingPredicate predicate = new PagingPredicate(lessEqual, comparator, pageSize); //pageSize = 5
Collection<Integer> values = map.values(predicate);
assertIterableEquals(values, 0, 1, 2, 3);
predicate.nextPage();
values = map.values(predicate);
assertEquals(0, values.size());
predicate.nextPage();
values = map.values(predicate);
assertEquals(0, values.size());
}
static class TestComparator implements Comparator<Map.Entry>, Serializable {
int ascending = 1;
IterationType iterationType = IterationType.ENTRY;
TestComparator() {
}
TestComparator(boolean ascending, IterationType iterationType) {
this.ascending = ascending ? 1 : -1;
this.iterationType = iterationType;
}
public int compare(Map.Entry e1, Map.Entry e2) {
Map.Entry<Integer, Integer> o1 = e1;
Map.Entry<Integer, Integer> o2 = e2;
switch (iterationType) {
case KEY:
return (o1.getKey() - o2.getKey()) * ascending;
case VALUE:
return (o1.getValue() - o2.getValue()) * ascending;
default:
int result = (o1.getValue() - o2.getValue()) * ascending;
if (result != 0) {
return result;
}
return (o1.getKey() - o2.getKey()) * ascending;
}
}
}
} | 0true
| hazelcast-client_src_test_java_com_hazelcast_client_map_ClientSortLimitTest.java |
32 | public abstract class MemcacheCommandProcessor<T> extends AbstractTextCommandProcessor<T> {
public static final String MAP_NAME_PRECEDER = "hz_memcache_";
public static final String DEFAULT_MAP_NAME = "hz_memcache_default";
protected MemcacheCommandProcessor(TextCommandService textCommandService) {
super(textCommandService);
}
public static byte[] longToByteArray(long v) {
int len = (int) (v / 256) + 1;
final byte[] bytes = new byte[len];
for (int i = len - 1; i >= 0; i--) {
final long t = v % 256;
bytes[i] = t < 128 ? (byte) t : (byte) (t - 256);
v = (v - t) / 256;
}
return bytes;
}
public static int byteArrayToLong(byte[] v) {
if (v.length > 8) {
return -1;
}
int r = 0;
for (int i = 0; i < v.length; i++) {
int t = (int) v[i];
t = t >= 0 ? t : t + 256;
r = r * 256 + t;
}
return r;
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_ascii_memcache_MemcacheCommandProcessor.java |
250 | public class CustomFieldQuery extends FieldQuery {
private static Field multiTermQueryWrapperFilterQueryField;
static {
try {
multiTermQueryWrapperFilterQueryField = MultiTermQueryWrapperFilter.class.getDeclaredField("query");
multiTermQueryWrapperFilterQueryField.setAccessible(true);
} catch (NoSuchFieldException e) {
// ignore
}
}
public static final ThreadLocal<Boolean> highlightFilters = new ThreadLocal<Boolean>();
public CustomFieldQuery(Query query, IndexReader reader, FastVectorHighlighter highlighter) throws IOException {
this(query, reader, highlighter.isPhraseHighlight(), highlighter.isFieldMatch());
}
public CustomFieldQuery(Query query, IndexReader reader, boolean phraseHighlight, boolean fieldMatch) throws IOException {
super(query, reader, phraseHighlight, fieldMatch);
highlightFilters.remove();
}
@Override
void flatten(Query sourceQuery, IndexReader reader, Collection<Query> flatQueries) throws IOException {
if (sourceQuery instanceof SpanTermQuery) {
super.flatten(new TermQuery(((SpanTermQuery) sourceQuery).getTerm()), reader, flatQueries);
} else if (sourceQuery instanceof ConstantScoreQuery) {
ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) sourceQuery;
if (constantScoreQuery.getFilter() != null) {
flatten(constantScoreQuery.getFilter(), reader, flatQueries);
} else {
flatten(constantScoreQuery.getQuery(), reader, flatQueries);
}
} else if (sourceQuery instanceof FunctionScoreQuery) {
flatten(((FunctionScoreQuery) sourceQuery).getSubQuery(), reader, flatQueries);
} else if (sourceQuery instanceof FilteredQuery) {
flatten(((FilteredQuery) sourceQuery).getQuery(), reader, flatQueries);
flatten(((FilteredQuery) sourceQuery).getFilter(), reader, flatQueries);
} else if (sourceQuery instanceof XFilteredQuery) {
flatten(((XFilteredQuery) sourceQuery).getQuery(), reader, flatQueries);
flatten(((XFilteredQuery) sourceQuery).getFilter(), reader, flatQueries);
} else if (sourceQuery instanceof MultiPhrasePrefixQuery) {
flatten(sourceQuery.rewrite(reader), reader, flatQueries);
} else if (sourceQuery instanceof FiltersFunctionScoreQuery) {
flatten(((FiltersFunctionScoreQuery) sourceQuery).getSubQuery(), reader, flatQueries);
} else if (sourceQuery instanceof MultiPhraseQuery) {
MultiPhraseQuery q = ((MultiPhraseQuery) sourceQuery);
convertMultiPhraseQuery(0, new int[q.getTermArrays().size()] , q, q.getTermArrays(), q.getPositions(), reader, flatQueries);
} else {
super.flatten(sourceQuery, reader, flatQueries);
}
}
private void convertMultiPhraseQuery(int currentPos, int[] termsIdx, MultiPhraseQuery orig, List<Term[]> terms, int[] pos, IndexReader reader, Collection<Query> flatQueries) throws IOException {
if (currentPos == 0) {
// if we have more than 16 terms
int numTerms = 0;
for (Term[] currentPosTerm : terms) {
numTerms += currentPosTerm.length;
}
if (numTerms > 16) {
for (Term[] currentPosTerm : terms) {
for (Term term : currentPosTerm) {
super.flatten(new TermQuery(term), reader, flatQueries);
}
}
return;
}
}
/*
* we walk all possible ways and for each path down the MPQ we create a PhraseQuery this is what FieldQuery supports.
* It seems expensive but most queries will pretty small.
*/
if (currentPos == terms.size()) {
PhraseQuery query = new PhraseQuery();
query.setBoost(orig.getBoost());
query.setSlop(orig.getSlop());
for (int i = 0; i < termsIdx.length; i++) {
query.add(terms.get(i)[termsIdx[i]], pos[i]);
}
this.flatten(query, reader, flatQueries);
} else {
Term[] t = terms.get(currentPos);
for (int i = 0; i < t.length; i++) {
termsIdx[currentPos] = i;
convertMultiPhraseQuery(currentPos+1, termsIdx, orig, terms, pos, reader, flatQueries);
}
}
}
void flatten(Filter sourceFilter, IndexReader reader, Collection<Query> flatQueries) throws IOException {
Boolean highlight = highlightFilters.get();
if (highlight == null || highlight.equals(Boolean.FALSE)) {
return;
}
if (sourceFilter instanceof TermFilter) {
flatten(new TermQuery(((TermFilter) sourceFilter).getTerm()), reader, flatQueries);
} else if (sourceFilter instanceof MultiTermQueryWrapperFilter) {
if (multiTermQueryWrapperFilterQueryField != null) {
try {
flatten((Query) multiTermQueryWrapperFilterQueryField.get(sourceFilter), reader, flatQueries);
} catch (IllegalAccessException e) {
// ignore
}
}
} else if (sourceFilter instanceof XBooleanFilter) {
XBooleanFilter booleanFilter = (XBooleanFilter) sourceFilter;
for (FilterClause clause : booleanFilter.clauses()) {
if (clause.getOccur() == BooleanClause.Occur.MUST || clause.getOccur() == BooleanClause.Occur.SHOULD) {
flatten(clause.getFilter(), reader, flatQueries);
}
}
}
}
} | 1no label
| src_main_java_org_apache_lucene_search_vectorhighlight_CustomFieldQuery.java |
315 | clusterService.submitStateUpdateTask("cluster_health (wait_for_events [" + request.waitForEvents() + "])", request.waitForEvents(), new ProcessedClusterStateUpdateTask() {
@Override
public ClusterState execute(ClusterState currentState) {
return currentState;
}
@Override
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
latch.countDown();
}
@Override
public void onFailure(String source, Throwable t) {
logger.error("unexpected failure during [{}]", t, source);
}
}); | 0true
| src_main_java_org_elasticsearch_action_admin_cluster_health_TransportClusterHealthAction.java |
1,057 | public class ListenerConfig {
protected String className = null;
protected EventListener implementation = null;
private ListenerConfigReadOnly readOnly;
/**
* Creates a ListenerConfig without className/implementation.
*/
public ListenerConfig() {
}
/**
* Creates a ListenerConfig with the given className.
*
* @param className the name of the EventListener class.
* @throws IllegalArgumentException if className is null or an empty String.
*/
public ListenerConfig(String className) {
setClassName(className);
}
public ListenerConfig(ListenerConfig config) {
implementation = config.getImplementation();
className = config.getClassName();
}
/**
* Creates a ListenerConfig with the given implementation.
*
* @param implementation the implementation to use as EventListener.
* @throws IllegalArgumentException if the implementation is null.
*/
public ListenerConfig(EventListener implementation) {
this.implementation = isNotNull(implementation,"implementation");
}
public ListenerConfig getAsReadOnly() {
if (readOnly == null) {
readOnly = new ListenerConfigReadOnly(this);
}
return readOnly;
}
/**
* Returns the name of the class of the EventListener. If no class is specified, null is returned.
*
* @return the class name of the EventListener.
* @see #setClassName(String)
*/
public String getClassName() {
return className;
}
/**
* Sets the class name of the EventListener.
*
* If a implementation was set, it will be removed.
*
* @param className the name of the class of the EventListener.
* @return the updated ListenerConfig.
* @throws IllegalArgumentException if className is null or an empty String.
* @see #setImplementation(java.util.EventListener)
* @see #getClassName()
*/
public ListenerConfig setClassName(String className) {
this.className = hasText(className, "className");
this.implementation = null;
return this;
}
/**
* Returns the EventListener implementation. If none has been specified, null is returned.
*
* @return the EventListener implementation.
* @see #setImplementation(java.util.EventListener)
*/
public EventListener getImplementation() {
return implementation;
}
/**
* Sets the EventListener implementation.
*
* If a className was set, it will be removed.
*
* @param implementation the EventListener implementation.
* @return the updated ListenerConfig.
* @throws IllegalArgumentException the implementation is null.
* @see #setClassName(String)
* @see #getImplementation()
*/
public ListenerConfig setImplementation(EventListener implementation) {
this.implementation = isNotNull(implementation,"implementation");
this.className = null;
return this;
}
public boolean isIncludeValue() {
return true;
}
public boolean isLocal() {
return false;
}
@Override
public String toString() {
return "ListenerConfig [className=" + className + ", implementation=" + implementation + ", includeValue="
+ isIncludeValue() + ", local=" + isLocal() + "]";
}
} | 1no label
| hazelcast_src_main_java_com_hazelcast_config_ListenerConfig.java |
1,447 | public class TitanHadoopGraph {
private final TitanHadoopSetup setup;
private final TypeInspector typeManager;
private final SystemTypeInspector systemTypes;
private final VertexReader vertexReader;
private final boolean verifyVertexExistence = false;
private static final Logger log =
LoggerFactory.getLogger(TitanHadoopGraph.class);
public TitanHadoopGraph(final TitanHadoopSetup setup) {
this.setup = setup;
this.typeManager = setup.getTypeInspector();
this.systemTypes = setup.getSystemTypeInspector();
this.vertexReader = setup.getVertexReader();
}
protected FaunusVertex readHadoopVertex(final Configuration configuration, final StaticBuffer key, Iterable<Entry> entries) {
final long vertexId = this.vertexReader.getVertexId(key);
Preconditions.checkArgument(vertexId > 0);
FaunusVertex vertex = new FaunusVertex(configuration, vertexId);
boolean foundVertexState = !verifyVertexExistence;
for (final Entry data : entries) {
try {
RelationReader relationReader = setup.getRelationReader(vertex.getLongId());
final RelationCache relation = relationReader.parseRelation(data, false, typeManager);
if (this.systemTypes.isVertexExistsSystemType(relation.typeId)) {
foundVertexState = true;
} else if (this.systemTypes.isVertexLabelSystemType(relation.typeId)) {
//Vertex Label
long vertexLabelId = relation.getOtherVertexId();
VertexLabel vl = typeManager.getExistingVertexLabel(vertexLabelId);
vertex.setVertexLabel(vertex.getTypeManager().getVertexLabel(vl.getName()));
}
if (systemTypes.isSystemType(relation.typeId)) continue; //Ignore system types
final RelationType type = typeManager.getExistingRelationType(relation.typeId);
if (((InternalRelationType)type).isHiddenType()) continue; //Ignore hidden types
StandardFaunusRelation frel;
if (type.isPropertyKey()) {
Object value = relation.getValue();
Preconditions.checkNotNull(value);
final StandardFaunusProperty fprop = new StandardFaunusProperty(relation.relationId, vertex, type.getName(), value);
vertex.addProperty(fprop);
frel = fprop;
} else {
assert type.isEdgeLabel();
StandardFaunusEdge fedge;
if (relation.direction.equals(Direction.IN))
fedge = new StandardFaunusEdge(configuration, relation.relationId, relation.getOtherVertexId(), vertexId, type.getName());
else if (relation.direction.equals(Direction.OUT))
fedge = new StandardFaunusEdge(configuration, relation.relationId, vertexId, relation.getOtherVertexId(), type.getName());
else
throw ExceptionFactory.bothIsNotSupported();
vertex.addEdge(fedge);
frel = fedge;
}
if (relation.hasProperties()) {
// load relation properties
for (final LongObjectCursor<Object> next : relation) {
assert next.value != null;
RelationType rt = typeManager.getExistingRelationType(next.key);
if (rt.isPropertyKey()) {
PropertyKey pkey = (PropertyKey)vertex.getTypeManager().getPropertyKey(rt.getName());
log.debug("Retrieved key {} for name \"{}\"", pkey, rt.getName());
frel.setProperty(pkey, next.value);
} else {
assert next.value instanceof Long;
EdgeLabel el = (EdgeLabel)vertex.getTypeManager().getEdgeLabel(rt.getName());
log.debug("Retrieved ege label {} for name \"{}\"", el, rt.getName());
frel.setProperty(el, new FaunusVertex(configuration,(Long)next.value));
}
}
for (TitanRelation rel : frel.query().queryAll().relations())
((FaunusRelation)rel).setLifeCycle(ElementLifeCycle.Loaded);
}
frel.setLifeCycle(ElementLifeCycle.Loaded);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
vertex.setLifeCycle(ElementLifeCycle.Loaded);
/*Since we are filtering out system relation types, we might end up with vertices that have no incident relations.
This is especially true for schema vertices. Those are filtered out. */
if (!foundVertexState || !vertex.query().relations().iterator().hasNext()) return null;
return vertex;
}
public void close() {
setup.close();
}
} | 1no label
| titan-hadoop-parent_titan-hadoop-core_src_main_java_com_thinkaurelius_titan_hadoop_formats_util_TitanHadoopGraph.java |
14 | .withPredicateProvider(new PredicateProvider() {
@Override
public Predicate buildPredicate(CriteriaBuilder builder, FieldPathBuilder fieldPathBuilder,
From root, String ceilingEntity, String fullPropertyName,
Path explicitPath, List directValues) {
FieldPath fieldPath = fieldPathBuilder.getFieldPath(root, fullPropertyName);
if ((StringUtils.isNotEmpty(skuPropertyPrefix) && fullPropertyName.startsWith(skuPropertyPrefix))
|| (StringUtils.isEmpty(skuPropertyPrefix) && CollectionUtils.isEmpty(fieldPath.getAssociationPath()))) {
Path skuIdPath = fieldPathBuilder.getPath(root, getSkuPropertyPrefix() + "id", builder);
// Build the first subquery based on searching from product.defaultSku.name
Subquery<Long> additionalSkusSubQuery = fieldPathBuilder.getCriteria().subquery(Long.class);
Root<SkuImpl> additionalSkusRoot = additionalSkusSubQuery.from(SkuImpl.class);
additionalSkusSubQuery.select(additionalSkusRoot.get("id").as(Long.class));
Path additionalSkusTargetPropertyPath = fieldPathBuilder.getPath(additionalSkusRoot, fullPropertyName.replace(getSkuPropertyPrefix(), ""), builder);
// The path to the defaultSku property from a sku, available via the 'product' attribute
Path defaultSkuPropertyPath = fieldPathBuilder.getPath(additionalSkusRoot,
DEFAULT_SKU_PATH_PREFIX + fullPropertyName.replace(getSkuPropertyPrefix(), ""), builder);
Path additionalSkusProductPath = fieldPathBuilder.getPath(additionalSkusRoot, "product", builder);
//Build the second subquery based on the value being set on the Sku itself, like just 'name'
Subquery<Long> hardcodedPropertySubquery = fieldPathBuilder.getCriteria().subquery(Long.class);
Root<SkuImpl> hardcodedPropertyRoot = hardcodedPropertySubquery.from(SkuImpl.class);
hardcodedPropertySubquery.select(hardcodedPropertyRoot.get("id").as(Long.class));
Path hardcodedPropertyTargetPath = fieldPathBuilder.getPath(hardcodedPropertyRoot, fullPropertyName.replace(getSkuPropertyPrefix(), ""), builder);
Predicate propertyExpression;
Predicate defaultSkuExpression;
if (delegateRestriction.getPredicateProvider() instanceof LikePredicateProvider) {
propertyExpression = builder.like(builder.lower(hardcodedPropertyTargetPath),
((String) directValues.get(0)).toLowerCase());
defaultSkuExpression = builder.like(builder.lower(defaultSkuPropertyPath),
((String) directValues.get(0)).toLowerCase());
} else if (delegateRestriction.getPredicateProvider() instanceof IsNullPredicateProvider) {
propertyExpression = builder.isNull(hardcodedPropertyTargetPath);
defaultSkuExpression = builder.isNull(defaultSkuPropertyPath);
} else if (delegateRestriction.getPredicateProvider() instanceof BetweenDatePredicateProvider) {
if (directValues.size() == 2) {
if (directValues.get(0) == null) {
propertyExpression = builder.lessThan(hardcodedPropertyTargetPath, (Comparable) directValues.get(1));
defaultSkuExpression = builder.lessThan(defaultSkuPropertyPath, (Comparable) directValues.get(1));
} else if (directValues.get(1) == null) {
propertyExpression = builder.greaterThanOrEqualTo(hardcodedPropertyTargetPath,
(Comparable) directValues.get(0));
defaultSkuExpression = builder.greaterThanOrEqualTo(defaultSkuPropertyPath,
(Comparable) directValues.get(0));
} else {
propertyExpression = builder.between(hardcodedPropertyTargetPath, (Comparable) directValues.get(0),
(Comparable) directValues.get(1));
defaultSkuExpression = builder.between(defaultSkuPropertyPath, (Comparable) directValues.get(0),
(Comparable) directValues.get(1));
}
} else {
propertyExpression = builder.equal(hardcodedPropertyTargetPath, directValues.get(0));
defaultSkuExpression = builder.equal(defaultSkuPropertyPath, directValues.get(0));
}
} else if (delegateRestriction.getPredicateProvider() instanceof BetweenPredicateProvider) {
if (directValues.size() > 1) {
propertyExpression = builder.between(hardcodedPropertyTargetPath, (Comparable) directValues.get(0),
(Comparable) directValues.get(1));
defaultSkuExpression = builder.between(defaultSkuPropertyPath, (Comparable) directValues.get(0),
(Comparable) directValues.get(1));
} else {
propertyExpression = builder.equal(hardcodedPropertyTargetPath, directValues.get(0));
defaultSkuExpression = builder.equal(defaultSkuPropertyPath, directValues.get(0));
}
} else if (delegateRestriction.getPredicateProvider() instanceof CollectionSizeEqualPredicateProvider) {
propertyExpression = builder.equal(builder.size(hardcodedPropertyTargetPath), directValues.get(0));
defaultSkuExpression = builder.equal(builder.size(defaultSkuPropertyPath), directValues.get(0));
} else if (delegateRestriction.getPredicateProvider() instanceof EqPredicateProvider) {
propertyExpression = hardcodedPropertyTargetPath.in(directValues);
defaultSkuExpression = defaultSkuPropertyPath.in(directValues);
} else {
throw new IllegalArgumentException("Unknown PredicateProvider instance: " +
delegateRestriction.getPredicateProvider().getClass().getName());
}
// First do a subquery to find all of the additional Skus that match the given expression
// This subquery will return all of the sku IDs that match the additional Sku expression. The WHERE clause of
// this is basically something like:
// WHERE sku.name == null AND sku.product != null AND sku.product.defaultSku.name LIKE %<val>%
List<Predicate> subRestrictions = new ArrayList<Predicate>();
subRestrictions.add(builder.and(
builder.isNull(additionalSkusTargetPropertyPath),
builder.isNotNull(additionalSkusProductPath),
defaultSkuExpression
));
additionalSkusSubQuery.where(subRestrictions.toArray(new Predicate[subRestrictions.size()]));
// Now do another sub query to get all the Skus that actually have the name explicitly set. This will return
// all of the default Skus or additional Skus where the name has been explicitly set
// This query is something like:
// WHERE sku.name != null AND sku.name LIKE %<val>%
List<Predicate> hardcodedPropertyRestrictions = new ArrayList<Predicate>();
hardcodedPropertyRestrictions.add(builder.and(
builder.isNotNull(hardcodedPropertyTargetPath),
propertyExpression
));
hardcodedPropertySubquery.where(hardcodedPropertyRestrictions.toArray(new Predicate[hardcodedPropertyRestrictions.size()]));
// Now that we've built the subqueries, do an OR to select all Skus whose IDs are apart of the additional Skus
// or defaultSkus subquery
return builder.or(
skuIdPath.in(additionalSkusSubQuery),
skuIdPath.in(hardcodedPropertySubquery)
);
}
return delegateRestriction.getPredicateProvider().buildPredicate(builder, fieldPathBuilder, root,
ceilingEntity, fullPropertyName, explicitPath, directValues);
}
}); | 1no label
| admin_broadleaf-admin-module_src_main_java_org_broadleafcommerce_admin_server_service_handler_SkuRestrictionFactoryImpl.java |
3,433 | private static class NoResponseHandler implements ResponseHandler {
@Override
public void sendResponse(final Object obj) {
}
@Override
public boolean isLocal() {
return false;
}
} | 1no label
| hazelcast_src_main_java_com_hazelcast_spi_impl_ResponseHandlerFactory.java |
499 | public class TransportCloseIndexAction extends TransportMasterNodeOperationAction<CloseIndexRequest, CloseIndexResponse> {
private final MetaDataIndexStateService indexStateService;
private final DestructiveOperations destructiveOperations;
@Inject
public TransportCloseIndexAction(Settings settings, TransportService transportService, ClusterService clusterService,
ThreadPool threadPool, MetaDataIndexStateService indexStateService, NodeSettingsService nodeSettingsService) {
super(settings, transportService, clusterService, threadPool);
this.indexStateService = indexStateService;
this.destructiveOperations = new DestructiveOperations(logger, settings, nodeSettingsService);
}
@Override
protected String executor() {
// no need to use a thread pool, we go async right away
return ThreadPool.Names.SAME;
}
@Override
protected String transportAction() {
return CloseIndexAction.NAME;
}
@Override
protected CloseIndexRequest newRequest() {
return new CloseIndexRequest();
}
@Override
protected CloseIndexResponse newResponse() {
return new CloseIndexResponse();
}
@Override
protected void doExecute(CloseIndexRequest request, ActionListener<CloseIndexResponse> listener) {
destructiveOperations.failDestructive(request.indices());
super.doExecute(request, listener);
}
@Override
protected ClusterBlockException checkBlock(CloseIndexRequest request, ClusterState state) {
return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA, request.indices());
}
@Override
protected void masterOperation(final CloseIndexRequest request, final ClusterState state, final ActionListener<CloseIndexResponse> listener) throws ElasticsearchException {
request.indices(state.metaData().concreteIndices(request.indices(), request.indicesOptions()));
CloseIndexClusterStateUpdateRequest updateRequest = new CloseIndexClusterStateUpdateRequest()
.ackTimeout(request.timeout()).masterNodeTimeout(request.masterNodeTimeout())
.indices(request.indices());
indexStateService.closeIndex(updateRequest, new ClusterStateUpdateListener() {
@Override
public void onResponse(ClusterStateUpdateResponse response) {
listener.onResponse(new CloseIndexResponse(response.isAcknowledged()));
}
@Override
public void onFailure(Throwable t) {
logger.debug("failed to close indices [{}]", t, request.indices());
listener.onFailure(t);
}
});
}
} | 1no label
| src_main_java_org_elasticsearch_action_admin_indices_close_TransportCloseIndexAction.java |