Unnamed: 0
int64 0
6.45k
| func
stringlengths 37
161k
| target
class label 2
classes | project
stringlengths 33
167
|
---|---|---|---|
1,488 | @SuppressWarnings("unchecked")
public class OObjectDatabaseTxPooled extends OObjectDatabaseTx implements ODatabasePooled {
private OObjectDatabasePool ownerPool;
public OObjectDatabaseTxPooled(final OObjectDatabasePool iOwnerPool, final String iURL, final String iUserName,
final String iUserPassword) {
super(iURL);
ownerPool = iOwnerPool;
super.open(iUserName, iUserPassword);
}
public void reuse(final Object iOwner, final Object[] iAdditionalArgs) {
ownerPool = (OObjectDatabasePool) iOwner;
if (isClosed())
open((String) iAdditionalArgs[0], (String) iAdditionalArgs[1]);
init();
// getMetadata().reload();
ODatabaseRecordThreadLocal.INSTANCE.set(getUnderlying());
try {
ODatabase current = underlying;
while (!(current instanceof ODatabaseRaw) && ((ODatabaseComplex<?>) current).getUnderlying() != null)
current = ((ODatabaseComplex<?>) current).getUnderlying();
((ODatabaseRaw) current).callOnOpenListeners();
} catch (Exception e) {
OLogManager.instance().error(this, "Error on reusing database '%s' in pool", e, getName());
}
}
@Override
public OObjectDatabaseTxPooled open(String iUserName, String iUserPassword) {
throw new UnsupportedOperationException(
"Database instance was retrieved from a pool. You cannot open the database in this way. Use directly a OObjectDatabaseTx instance if you want to manually open the connection");
}
@Override
public OObjectDatabaseTxPooled create() {
throw new UnsupportedOperationException(
"Database instance was retrieved from a pool. You cannot open the database in this way. Use directly a OObjectDatabaseTx instance if you want to manually open the connection");
}
@Override
public boolean isClosed() {
return ownerPool == null || super.isClosed();
}
/**
* Avoid to close it but rather release itself to the owner pool.
*/
@Override
public void close() {
if (isClosed())
return;
objects2Records.clear();
records2Objects.clear();
rid2Records.clear();
checkOpeness();
try {
rollback();
} catch (Exception e) {
OLogManager.instance().error(this, "Error on releasing database '%s' in pool", e, getName());
}
try {
ODatabase current = underlying;
while (!(current instanceof ODatabaseRaw) && ((ODatabaseComplex<?>) current).getUnderlying() != null)
current = ((ODatabaseComplex<?>) current).getUnderlying();
((ODatabaseRaw) current).callOnCloseListeners();
} catch (Exception e) {
OLogManager.instance().error(this, "Error on releasing database '%s' in pool", e, getName());
}
getLevel1Cache().clear();
if (ownerPool != null) {
final OObjectDatabasePool localCopy = ownerPool;
ownerPool = null;
localCopy.release(this);
}
}
public void forceClose() {
super.close();
}
@Override
protected void checkOpeness() {
if (ownerPool == null)
throw new ODatabaseException(
"Database instance has been released to the pool. Get another database instance from the pool with the right username and password");
super.checkOpeness();
}
public boolean isUnderlyingOpen() {
return !super.isClosed();
}
} | 1no label
| object_src_main_java_com_orientechnologies_orient_object_db_OObjectDatabaseTxPooled.java |
395 | public final class BankersRounding {
public static final int DEFAULT_SCALE = 2;
public static final BigDecimal ZERO = setScale(0);
public static int getScaleForCurrency(Currency currency) {
if (currency != null) {
return currency.getDefaultFractionDigits();
} else {
return DEFAULT_SCALE;
}
}
public static BigDecimal setScale(int scale, BigDecimal amount) {
return amount.setScale(scale, BigDecimal.ROUND_HALF_EVEN);
}
public static BigDecimal setScale(int scale, double amount) {
return setScale(scale, new BigDecimal(amount));
}
public static double multiply(int scale, double multiplicand, double multiplier) {
return setScale(scale, multiplicand).multiply(setScale(scale, multiplier)).doubleValue();
}
public static BigDecimal divide(int scale, BigDecimal dividend, BigDecimal divisor) {
return dividend.divide(divisor, scale, BigDecimal.ROUND_HALF_EVEN);
}
public static double divide(int scale, double dividend, double divisor) {
return divide(setScale(scale, dividend), setScale(scale, divisor)).doubleValue();
}
public static BigDecimal setScale(BigDecimal amount) {
return setScale(DEFAULT_SCALE, amount);
}
public static BigDecimal setScale(BigDecimal amount, int scale) {
return setScale(scale, amount);
}
public static BigDecimal setScale(double amount) {
return setScale(DEFAULT_SCALE, new BigDecimal(amount));
}
public static BigDecimal divide(BigDecimal dividend, BigDecimal divisor) {
return divide(DEFAULT_SCALE, dividend, divisor);
}
public static BigDecimal zeroAmount() {
return ZERO;
}
} | 0true
| common_src_main_java_org_broadleafcommerce_common_money_BankersRounding.java |
385 | public class DocumentationHover
implements ITextHover, ITextHoverExtension, ITextHoverExtension2 {
private CeylonEditor editor;
public DocumentationHover(CeylonEditor editor) {
this.editor = editor;
}
public IRegion getHoverRegion(ITextViewer textViewer, int offset) {
IDocument document = textViewer.getDocument();
int start= -2;
int end= -1;
try {
int pos= offset;
char c;
while (pos >= 0) {
c= document.getChar(pos);
if (!Character.isJavaIdentifierPart(c)) {
break;
}
--pos;
}
start= pos;
pos= offset;
int length= document.getLength();
while (pos < length) {
c= document.getChar(pos);
if (!Character.isJavaIdentifierPart(c)) {
break;
}
++pos;
}
end= pos;
} catch (BadLocationException x) {
}
if (start >= -1 && end > -1) {
if (start == offset && end == offset)
return new Region(offset, 0);
else if (start == offset)
return new Region(start, end - start);
else
return new Region(start + 1, end - start - 1);
}
return null;
}
final class CeylonLocationListener implements LocationListener {
private final BrowserInformationControl control;
CeylonLocationListener(BrowserInformationControl control) {
this.control = control;
}
@Override
public void changing(LocationEvent event) {
String location = event.location;
//necessary for windows environment (fix for blank page)
//somehow related to this: https://bugs.eclipse.org/bugs/show_bug.cgi?id=129236
if (!"about:blank".equals(location)) {
event.doit= false;
}
handleLink(location);
/*else if (location.startsWith("javadoc:")) {
final DocBrowserInformationControlInput input = (DocBrowserInformationControlInput) control.getInput();
int beginIndex = input.getHtml().indexOf("javadoc:")+8;
final String handle = input.getHtml().substring(beginIndex, input.getHtml().indexOf("\"",beginIndex));
new Job("Fetching Javadoc") {
@Override
protected IStatus run(IProgressMonitor monitor) {
final IJavaElement elem = JavaCore.create(handle);
try {
final String javadoc = JavadocContentAccess2.getHTMLContent((IMember) elem, true);
if (javadoc!=null) {
PlatformUI.getWorkbench().getProgressService()
.runInUI(editor.getSite().getWorkbenchWindow(), new IRunnableWithProgress() {
@Override
public void run(IProgressMonitor monitor)
throws InvocationTargetException, InterruptedException {
StringBuilder sb = new StringBuilder();
HTMLPrinter.insertPageProlog(sb, 0, getStyleSheet());
appendJavadoc(elem, javadoc, sb);
HTMLPrinter.addPageEpilog(sb);
control.setInput(new DocBrowserInformationControlInput(input, null, sb.toString(), 0));
}
}, null);
}
}
catch (Exception e) {
e.printStackTrace();
}
return Status.OK_STATUS;
}
}.schedule();
}*/
}
private void handleLink(String location) {
if (location.startsWith("dec:")) {
Referenceable target = getLinkedModel(editor, location);
if (target!=null) {
close(control); //FIXME: should have protocol to hide, rather than dispose
gotoDeclaration(target, editor);
}
}
else if (location.startsWith("doc:")) {
Referenceable target = getLinkedModel(editor, location);
if (target!=null) {
control.setInput(getHoverInfo(target, control.getInput(), editor, null));
}
}
else if (location.startsWith("ref:")) {
Referenceable target = getLinkedModel(editor, location);
close(control);
new FindReferencesAction(editor, (Declaration) target).run();
}
else if (location.startsWith("sub:")) {
Referenceable target = getLinkedModel(editor, location);
close(control);
new FindSubtypesAction(editor, (Declaration) target).run();
}
else if (location.startsWith("act:")) {
Referenceable target = getLinkedModel(editor, location);
close(control);
new FindRefinementsAction(editor, (Declaration) target).run();
}
else if (location.startsWith("ass:")) {
Referenceable target = getLinkedModel(editor, location);
close(control);
new FindAssignmentsAction(editor, (Declaration) target).run();
}
else if (location.startsWith("stp:")) {
close(control);
CompilationUnit rn = editor.getParseController().getRootNode();
Node node = Nodes.findNode(rn, Integer.parseInt(location.substring(4)));
for (SpecifyTypeProposal stp: SpecifyTypeProposal.createProposals(rn, node, editor)) {
stp.apply(editor.getParseController().getDocument());
break;
}
}
else if (location.startsWith("exv:")) {
close(control);
new ExtractValueProposal(editor).apply(editor.getParseController().getDocument());
}
else if (location.startsWith("exf:")) {
close(control);
new ExtractFunctionProposal(editor).apply(editor.getParseController().getDocument());
}
}
@Override
public void changed(LocationEvent event) {}
}
/**
* Action to go back to the previous input in the hover control.
*/
static final class BackAction extends Action {
private final BrowserInformationControl fInfoControl;
public BackAction(BrowserInformationControl infoControl) {
fInfoControl= infoControl;
setText("Back");
ISharedImages images= getWorkbench().getSharedImages();
setImageDescriptor(images.getImageDescriptor(IMG_TOOL_BACK));
setDisabledImageDescriptor(images.getImageDescriptor(IMG_TOOL_BACK_DISABLED));
update();
}
@Override
public void run() {
BrowserInput previous= (BrowserInput) fInfoControl.getInput().getPrevious();
if (previous != null) {
fInfoControl.setInput(previous);
}
}
public void update() {
BrowserInput current= fInfoControl.getInput();
if (current != null && current.getPrevious() != null) {
BrowserInput previous= current.getPrevious();
setToolTipText("Back to " + previous.getInputName());
setEnabled(true);
} else {
setToolTipText("Back");
setEnabled(false);
}
}
}
/**
* Action to go forward to the next input in the hover control.
*/
static final class ForwardAction extends Action {
private final BrowserInformationControl fInfoControl;
public ForwardAction(BrowserInformationControl infoControl) {
fInfoControl= infoControl;
setText("Forward");
ISharedImages images= getWorkbench().getSharedImages();
setImageDescriptor(images.getImageDescriptor(IMG_TOOL_FORWARD));
setDisabledImageDescriptor(images.getImageDescriptor(IMG_TOOL_FORWARD_DISABLED));
update();
}
@Override
public void run() {
BrowserInput next= (BrowserInput) fInfoControl.getInput().getNext();
if (next != null) {
fInfoControl.setInput(next);
}
}
public void update() {
BrowserInput current= fInfoControl.getInput();
if (current != null && current.getNext() != null) {
setToolTipText("Forward to " + current.getNext().getInputName());
setEnabled(true);
} else {
setToolTipText("Forward");
setEnabled(false);
}
}
}
/**
* Action that shows the current hover contents in the Javadoc view.
*/
/*private static final class ShowInDocViewAction extends Action {
private final BrowserInformationControl fInfoControl;
public ShowInJavadocViewAction(BrowserInformationControl infoControl) {
fInfoControl= infoControl;
setText("Show in Ceylondoc View");
setImageDescriptor(JavaPluginImages.DESC_OBJS_JAVADOCTAG); //TODO: better image
}
@Override
public void run() {
DocBrowserInformationControlInput infoInput= (DocBrowserInformationControlInput) fInfoControl.getInput(); //TODO: check cast
fInfoControl.notifyDelayedInputChange(null);
fInfoControl.dispose(); //FIXME: should have protocol to hide, rather than dispose
try {
JavadocView view= (JavadocView) JavaPlugin.getActivePage().showView(JavaUI.ID_JAVADOC_VIEW);
view.setInput(infoInput);
} catch (PartInitException e) {
JavaPlugin.log(e);
}
}
}*/
/**
* Action that opens the current hover input element.
*/
final class OpenDeclarationAction extends Action {
private final BrowserInformationControl fInfoControl;
public OpenDeclarationAction(BrowserInformationControl infoControl) {
fInfoControl = infoControl;
setText("Open Declaration");
setLocalImageDescriptors(this, "goto_input.gif");
}
@Override
public void run() {
close(fInfoControl); //FIXME: should have protocol to hide, rather than dispose
CeylonBrowserInput input = (CeylonBrowserInput) fInfoControl.getInput();
gotoDeclaration(getLinkedModel(editor, input.getAddress()), editor);
}
}
private static void close(BrowserInformationControl control) {
control.notifyDelayedInputChange(null);
control.dispose();
}
/**
* The hover control creator.
*/
private IInformationControlCreator fHoverControlCreator;
/**
* The presentation control creator.
*/
private IInformationControlCreator fPresenterControlCreator;
private IInformationControlCreator getInformationPresenterControlCreator() {
if (fPresenterControlCreator == null)
fPresenterControlCreator= new PresenterControlCreator(this);
return fPresenterControlCreator;
}
@Override
public IInformationControlCreator getHoverControlCreator() {
return getHoverControlCreator("F2 for focus");
}
public IInformationControlCreator getHoverControlCreator(
String statusLineMessage) {
if (fHoverControlCreator == null) {
fHoverControlCreator= new HoverControlCreator(this,
getInformationPresenterControlCreator(),
statusLineMessage);
}
return fHoverControlCreator;
}
void addLinkListener(final BrowserInformationControl control) {
control.addLocationListener(new CeylonLocationListener(control));
}
public static Referenceable getLinkedModel(CeylonEditor editor, String location) {
if (location==null) {
return null;
}
else if (location.equals("doc:ceylon.language:ceylon.language:Nothing")) {
return editor.getParseController().getRootNode().getUnit().getNothingDeclaration();
}
TypeChecker tc = editor.getParseController().getTypeChecker();
String[] bits = location.split(":");
JDTModelLoader modelLoader = getModelLoader(tc);
String moduleName = bits[1];
Module module = modelLoader.getLoadedModule(moduleName);
if (module==null || bits.length==2) {
return module;
}
Referenceable target = module.getPackage(bits[2]);
for (int i=3; i<bits.length; i++) {
Scope scope;
if (target instanceof Scope) {
scope = (Scope) target;
}
else if (target instanceof TypedDeclaration) {
scope = ((TypedDeclaration) target).getType().getDeclaration();
}
else {
return null;
}
if (scope instanceof Value) {
TypeDeclaration val = ((Value) scope).getTypeDeclaration();
if (val.isAnonymous()) {
scope = val;
}
}
target = scope.getDirectMember(bits[i], null, false);
}
return target;
}
public String getHoverInfo(ITextViewer textViewer, IRegion hoverRegion) {
CeylonBrowserInput info = (CeylonBrowserInput)
getHoverInfo2(textViewer, hoverRegion);
return info!=null ? info.getHtml() : null;
}
@Override
public CeylonBrowserInput getHoverInfo2(ITextViewer textViewer,
IRegion hoverRegion) {
return internalGetHoverInfo(editor, hoverRegion);
}
static CeylonBrowserInput internalGetHoverInfo(CeylonEditor editor,
IRegion hoverRegion) {
if (editor==null || editor.getSelectionProvider()==null) {
return null;
}
CeylonBrowserInput result =
getExpressionHover(editor, hoverRegion);
if (result==null) {
result = getDeclarationHover(editor, hoverRegion);
}
return result;
}
static CeylonBrowserInput getExpressionHover(CeylonEditor editor,
IRegion hoverRegion) {
CeylonParseController parseController =
editor.getParseController();
if (parseController==null) {
return null;
}
Tree.CompilationUnit rn =
parseController.getRootNode();
if (rn!=null) {
int hoffset = hoverRegion.getOffset();
ITextSelection selection =
editor.getSelectionFromThread();
if (selection!=null &&
selection.getOffset()<=hoffset &&
selection.getOffset()+selection.getLength()>=hoffset) {
Node node = findNode(rn,
selection.getOffset(),
selection.getOffset()+selection.getLength()-1);
if (node instanceof Tree.Type) {
return getTypeHoverInfo(node,
selection.getText(),
editor.getCeylonSourceViewer().getDocument(),
editor.getParseController().getProject());
}
if (node instanceof Tree.Expression) {
node = ((Tree.Expression) node).getTerm();
}
if (node instanceof Tree.Term) {
return getTermTypeHoverInfo(node,
selection.getText(),
editor.getCeylonSourceViewer().getDocument(),
editor.getParseController().getProject());
}
else {
return null;
}
}
else {
return null;
}
}
else {
return null;
}
}
static CeylonBrowserInput getDeclarationHover(CeylonEditor editor,
IRegion hoverRegion) {
CeylonParseController parseController =
editor.getParseController();
if (parseController==null) {
return null;
}
Tree.CompilationUnit rootNode =
parseController.getRootNode();
if (rootNode!=null) {
Node node = findNode(rootNode,
hoverRegion.getOffset());
if (node instanceof Tree.ImportPath) {
Referenceable r =
((Tree.ImportPath) node).getModel();
if (r!=null) {
return getHoverInfo(r, null, editor, node);
}
else {
return null;
}
}
else if (node instanceof Tree.LocalModifier) {
return getInferredTypeHoverInfo(node,
parseController.getProject());
}
else if (node instanceof Tree.Literal) {
return getTermTypeHoverInfo(node, null,
editor.getCeylonSourceViewer().getDocument(),
parseController.getProject());
}
else {
return getHoverInfo(getReferencedDeclaration(node),
null, editor, node);
}
}
else {
return null;
}
}
private static CeylonBrowserInput getInferredTypeHoverInfo(Node node,
IProject project) {
ProducedType t = ((Tree.LocalModifier) node).getTypeModel();
if (t==null) return null;
StringBuilder buffer = new StringBuilder();
HTMLPrinter.insertPageProlog(buffer, 0, HTML.getStyleSheet());
HTML.addImageAndLabel(buffer, null,
HTML.fileUrl("types.gif").toExternalForm(),
16, 16,
"<tt>" + producedTypeLink(t, node.getUnit()) + "</tt>",
20, 4);
buffer.append("<br/>");
if (!t.containsUnknowns()) {
buffer.append("One quick assist available:<br/>");
HTML.addImageAndLabel(buffer, null,
HTML.fileUrl("correction_change.gif").toExternalForm(),
16, 16,
"<a href=\"stp:" + node.getStartIndex() + "\">Specify explicit type</a>",
20, 4);
}
//buffer.append(getDocumentationFor(editor.getParseController(), t.getDeclaration()));
HTMLPrinter.addPageEpilog(buffer);
return new CeylonBrowserInput(null, null, buffer.toString());
}
private static CeylonBrowserInput getTypeHoverInfo(Node node, String selectedText,
IDocument doc, IProject project) {
ProducedType t = ((Tree.Type) node).getTypeModel();
if (t==null) return null;
// String expr = "";
// try {
// expr = doc.get(node.getStartIndex(), node.getStopIndex()-node.getStartIndex()+1);
// }
// catch (BadLocationException e) {
// e.printStackTrace();
// }
String abbreviated = PRINTER.getProducedTypeName(t, node.getUnit());
String unabbreviated = VERBOSE_PRINTER.getProducedTypeName(t, node.getUnit());
StringBuilder buffer = new StringBuilder();
HTMLPrinter.insertPageProlog(buffer, 0, HTML.getStyleSheet());
HTML.addImageAndLabel(buffer, null,
HTML.fileUrl("types.gif").toExternalForm(),
16, 16,
"<tt>" + producedTypeLink(t, node.getUnit()) + "</tt> ",
20, 4);
if (!abbreviated.equals(unabbreviated)) {
buffer.append("<br/>")
.append("Abbreviation of: ")
.append(unabbreviated);
}
HTMLPrinter.addPageEpilog(buffer);
return new CeylonBrowserInput(null, null, buffer.toString());
}
private static CeylonBrowserInput getTermTypeHoverInfo(Node node, String selectedText,
IDocument doc, IProject project) {
ProducedType t = ((Tree.Term) node).getTypeModel();
if (t==null) return null;
// String expr = "";
// try {
// expr = doc.get(node.getStartIndex(), node.getStopIndex()-node.getStartIndex()+1);
// }
// catch (BadLocationException e) {
// e.printStackTrace();
// }
StringBuilder buffer = new StringBuilder();
HTMLPrinter.insertPageProlog(buffer, 0, HTML.getStyleSheet());
String desc = node instanceof Tree.Literal ? "literal" : "expression";
HTML.addImageAndLabel(buffer, null,
HTML.fileUrl("types.gif").toExternalForm(),
16, 16,
"<tt>" + producedTypeLink(t, node.getUnit()) + "</tt> " + desc,
20, 4);
if (node instanceof Tree.StringLiteral) {
buffer.append( "<br/>")
.append("<code style='color:")
.append(toHex(getCurrentThemeColor(STRINGS)))
.append("'><pre>")
.append('\"')
.append(convertToHTMLContent(node.getText()))
.append('\"')
.append("</pre></code>");
// If a single char selection, then append info on that character too
if (selectedText != null
&& codePointCount(selectedText, 0, selectedText.length()) == 1) {
appendCharacterHoverInfo(buffer, selectedText);
}
}
else if (node instanceof Tree.CharLiteral) {
String character = node.getText();
if (character.length()>2) {
appendCharacterHoverInfo(buffer,
character.substring(1, character.length()-1));
}
}
else if (node instanceof Tree.NaturalLiteral) {
buffer.append("<br/>")
.append("<code style='color:")
.append(toHex(getCurrentThemeColor(NUMBERS)))
.append("'>");
String text = node.getText().replace("_", "");
switch (text.charAt(0)) {
case '#':
buffer.append(parseInt(text.substring(1),16));
break;
case '$':
buffer.append(parseInt(text.substring(1),2));
break;
default:
buffer.append(parseInt(text));
}
buffer.append("</code>");
}
else if (node instanceof Tree.FloatLiteral) {
buffer.append("<br/>")
.append("<code style='color:")
.append(toHex(getCurrentThemeColor(NUMBERS)))
.append("'>")
.append(parseFloat(node.getText().replace("_", "")))
.append("</code>");
}
if (selectedText!=null) {
buffer.append("<br/>")
.append("Two quick assists available:<br/>");
HTML.addImageAndLabel(buffer, null,
HTML.fileUrl("change.png").toExternalForm(),
16, 16,
"<a href=\"exv:\">Extract value</a>",
20, 4);
HTML.addImageAndLabel(buffer, null,
HTML.fileUrl("change.png").toExternalForm(),
16, 16,
"<a href=\"exf:\">Extract function</a>",
20, 4);
buffer.append("<br/>");
}
HTMLPrinter.addPageEpilog(buffer);
return new CeylonBrowserInput(null, null, buffer.toString());
}
private static void appendCharacterHoverInfo(StringBuilder buffer, String character) {
buffer.append( "<br/>")
.append("<code style='color:")
.append(toHex(getCurrentThemeColor(CHARS)))
.append("'>")
.append('\'')
.append(convertToHTMLContent(character))
.append('\'')
.append("</code>");
int codepoint = Character.codePointAt(character, 0);
String name = Character.getName(codepoint);
buffer.append("<br/>Unicode Name: <code>").append(name).append("</code>");
String hex = Integer.toHexString(codepoint).toUpperCase();
while (hex.length() < 4) {
hex = "0" + hex;
}
buffer.append("<br/>Codepoint: <code>").append("U+").append(hex).append("</code>");
buffer.append("<br/>General Category: <code>").append(getCodepointGeneralCategoryName(codepoint)).append("</code>");
Character.UnicodeScript script = Character.UnicodeScript.of(codepoint);
buffer.append("<br/>Script: <code>").append(script.name()).append("</code>");
Character.UnicodeBlock block = Character.UnicodeBlock.of(codepoint);
buffer.append("<br/>Block: <code>").append(block).append("</code><br/>");
}
private static String getCodepointGeneralCategoryName(int codepoint) {
String gc;
switch (Character.getType(codepoint)) {
case Character.COMBINING_SPACING_MARK:
gc = "Mark, combining spacing"; break;
case Character.CONNECTOR_PUNCTUATION:
gc = "Punctuation, connector"; break;
case Character.CONTROL:
gc = "Other, control"; break;
case Character.CURRENCY_SYMBOL:
gc = "Symbol, currency"; break;
case Character.DASH_PUNCTUATION:
gc = "Punctuation, dash"; break;
case Character.DECIMAL_DIGIT_NUMBER:
gc = "Number, decimal digit"; break;
case Character.ENCLOSING_MARK:
gc = "Mark, enclosing"; break;
case Character.END_PUNCTUATION:
gc = "Punctuation, close"; break;
case Character.FINAL_QUOTE_PUNCTUATION:
gc = "Punctuation, final quote"; break;
case Character.FORMAT:
gc = "Other, format"; break;
case Character.INITIAL_QUOTE_PUNCTUATION:
gc = "Punctuation, initial quote"; break;
case Character.LETTER_NUMBER:
gc = "Number, letter"; break;
case Character.LINE_SEPARATOR:
gc = "Separator, line"; break;
case Character.LOWERCASE_LETTER:
gc = "Letter, lowercase"; break;
case Character.MATH_SYMBOL:
gc = "Symbol, math"; break;
case Character.MODIFIER_LETTER:
gc = "Letter, modifier"; break;
case Character.MODIFIER_SYMBOL:
gc = "Symbol, modifier"; break;
case Character.NON_SPACING_MARK:
gc = "Mark, nonspacing"; break;
case Character.OTHER_LETTER:
gc = "Letter, other"; break;
case Character.OTHER_NUMBER:
gc = "Number, other"; break;
case Character.OTHER_PUNCTUATION:
gc = "Punctuation, other"; break;
case Character.OTHER_SYMBOL:
gc = "Symbol, other"; break;
case Character.PARAGRAPH_SEPARATOR:
gc = "Separator, paragraph"; break;
case Character.PRIVATE_USE:
gc = "Other, private use"; break;
case Character.SPACE_SEPARATOR:
gc = "Separator, space"; break;
case Character.START_PUNCTUATION:
gc = "Punctuation, open"; break;
case Character.SURROGATE:
gc = "Other, surrogate"; break;
case Character.TITLECASE_LETTER:
gc = "Letter, titlecase"; break;
case Character.UNASSIGNED:
gc = "Other, unassigned"; break;
case Character.UPPERCASE_LETTER:
gc = "Letter, uppercase"; break;
default:
gc = "<Unknown>";
}
return gc;
}
private static String getIcon(Object obj) {
if (obj instanceof Module) {
return "jar_l_obj.gif";
}
else if (obj instanceof Package) {
return "package_obj.gif";
}
else if (obj instanceof Declaration) {
Declaration dec = (Declaration) obj;
if (dec instanceof Class) {
String icon = dec.isShared() ?
"class_obj.gif" :
"innerclass_private_obj.gif";
return decorateTypeIcon(dec, icon);
}
else if (dec instanceof Interface) {
String icon = dec.isShared() ?
"int_obj.gif" :
"innerinterface_private_obj.gif";
return decorateTypeIcon(dec, icon);
}
else if (dec instanceof TypeAlias||
dec instanceof NothingType) {
return "type_alias.gif";
}
else if (dec.isParameter()) {
if (dec instanceof Method) {
return "methpro_obj.gif";
}
else {
return "field_protected_obj.gif";
}
}
else if (dec instanceof Method) {
String icon = dec.isShared() ?
"public_co.gif" :
"private_co.gif";
return decorateFunctionIcon(dec, icon);
}
else if (dec instanceof MethodOrValue) {
return dec.isShared() ?
"field_public_obj.gif" :
"field_private_obj.gif";
}
else if (dec instanceof TypeParameter) {
return "typevariable_obj.gif";
}
}
return null;
}
private static String decorateFunctionIcon(Declaration dec, String icon) {
if (dec.isAnnotation()) {
return icon.replace("co", "ann");
}
else {
return icon;
}
}
private static String decorateTypeIcon(Declaration dec, String icon) {
if (((TypeDeclaration) dec).getCaseTypes()!=null) {
return icon.replace("obj", "enum");
}
else if (dec.isAnnotation()) {
return icon.replace("obj", "ann");
}
else if (((TypeDeclaration) dec).isAlias()) {
return icon.replace("obj", "alias");
}
else {
return icon;
}
}
/**
* Computes the hover info.
* @param previousInput the previous input, or <code>null</code>
* @param node
* @param elements the resolved elements
* @param editorInputElement the editor input, or <code>null</code>
*
* @return the HTML hover info for the given element(s) or <code>null</code>
* if no information is available
* @since 3.4
*/
static CeylonBrowserInput getHoverInfo(Referenceable model,
BrowserInput previousInput, CeylonEditor editor, Node node) {
if (model instanceof Declaration) {
Declaration dec = (Declaration) model;
return new CeylonBrowserInput(previousInput, dec,
getDocumentationFor(editor.getParseController(), dec, node, null));
}
else if (model instanceof Package) {
Package dec = (Package) model;
return new CeylonBrowserInput(previousInput, dec,
getDocumentationFor(editor.getParseController(), dec));
}
else if (model instanceof Module) {
Module dec = (Module) model;
return new CeylonBrowserInput(previousInput, dec,
getDocumentationFor(editor.getParseController(), dec));
}
else {
return null;
}
}
private static void appendJavadoc(IJavaElement elem, StringBuilder sb) {
if (elem instanceof IMember) {
try {
//TODO: Javadoc @ icon?
IMember mem = (IMember) elem;
String jd = JavadocContentAccess2.getHTMLContent(mem, true);
if (jd!=null) {
sb.append("<br/>").append(jd);
String base = getBaseURL(mem, mem.isBinary());
int endHeadIdx= sb.indexOf("</head>");
sb.insert(endHeadIdx, "\n<base href='" + base + "'>\n");
}
}
catch (JavaModelException e) {
e.printStackTrace();
}
}
}
private static String getBaseURL(IJavaElement element, boolean isBinary)
throws JavaModelException {
if (isBinary) {
// Source attachment usually does not include Javadoc resources
// => Always use the Javadoc location as base:
URL baseURL = JavaUI.getJavadocLocation(element, false);
if (baseURL != null) {
if (baseURL.getProtocol().equals("jar")) {
// It's a JarURLConnection, which is not known to the browser widget.
// Let's start the help web server:
URL baseURL2 = PlatformUI.getWorkbench().getHelpSystem()
.resolve(baseURL.toExternalForm(), true);
if (baseURL2 != null) { // can be null if org.eclipse.help.ui is not available
baseURL = baseURL2;
}
}
return baseURL.toExternalForm();
}
}
else {
IResource resource = element.getResource();
if (resource != null) {
/*
* Too bad: Browser widget knows nothing about EFS and custom URL handlers,
* so IResource#getLocationURI() does not work in all cases.
* We only support the local file system for now.
* A solution could be https://bugs.eclipse.org/bugs/show_bug.cgi?id=149022 .
*/
IPath location = resource.getLocation();
if (location != null) {
return location.toFile().toURI().toString();
}
}
}
return null;
}
public static String getDocumentationFor(CeylonParseController cpc,
Package pack) {
StringBuilder buffer= new StringBuilder();
addMainPackageDescription(pack, buffer);
addPackageDocumentation(cpc, pack, buffer);
addAdditionalPackageInfo(buffer, pack);
addPackageMembers(buffer, pack);
addPackageModuleInfo(pack, buffer);
insertPageProlog(buffer, 0, HTML.getStyleSheet());
addPageEpilog(buffer);
return buffer.toString();
}
private static void addPackageMembers(StringBuilder buffer,
Package pack) {
boolean first = true;
for (Declaration dec: pack.getMembers()) {
if (dec instanceof Class && ((Class)dec).isOverloaded()) {
continue;
}
if (dec.isShared() && !dec.isAnonymous()) {
if (first) {
buffer.append("<p>Contains: ");
first = false;
}
else {
buffer.append(", ");
}
/*addImageAndLabel(buffer, null, fileUrl(getIcon(dec)).toExternalForm(),
16, 16, "<tt><a " + link(dec) + ">" +
dec.getName() + "</a></tt>", 20, 2);*/
appendLink(buffer, dec);
}
}
if (!first) {
buffer.append(".</p>");
}
}
private static void appendLink(StringBuilder buffer, Referenceable dec) {
buffer.append("<tt><a ").append(HTML.link(dec)).append(">");
if (dec instanceof Declaration) {
buffer.append(((Declaration) dec).getName());
}
else if (dec instanceof Package) {
buffer.append(getLabel((Package)dec));
}
else if (dec instanceof Module) {
buffer.append(getLabel((Module)dec));
}
buffer.append("</a></tt>");
}
private static String link(Referenceable dec) {
StringBuilder builder = new StringBuilder();
appendLink(builder, dec);
return builder.toString();
}
private static void addAdditionalPackageInfo(StringBuilder buffer,
Package pack) {
Module mod = pack.getModule();
if (mod.isJava()) {
buffer.append("<p>This package is implemented in Java.</p>");
}
if (JDKUtils.isJDKModule(mod.getNameAsString())) {
buffer.append("<p>This package forms part of the Java SDK.</p>");
}
}
private static void addMainPackageDescription(Package pack,
StringBuilder buffer) {
if (pack.isShared()) {
String ann = toHex(getCurrentThemeColor(ANNOTATIONS));
HTML.addImageAndLabel(buffer, null,
HTML.fileUrl("annotation_obj.gif").toExternalForm(),
16, 16,
"<tt style='font-size:90%;color:" + ann + "'>shared</tt>"
, 20, 4);
}
HTML.addImageAndLabel(buffer, pack,
HTML.fileUrl(getIcon(pack)).toExternalForm(),
16, 16,
"<tt style='font-size:102%'>" +
HTML.highlightLine(description(pack)) +
"</tt>",
20, 4);
}
private static void addPackageModuleInfo(Package pack,
StringBuilder buffer) {
Module mod = pack.getModule();
HTML.addImageAndLabel(buffer, mod,
HTML.fileUrl(getIcon(mod)).toExternalForm(),
16, 16,
"<span style='font-size:96%'>in module " +
link(mod) + "</span>",
20, 2);
}
private static String description(Package pack) {
return "package " + getLabel(pack);
}
public static String getDocumentationFor(ModuleDetails mod, String version,
Scope scope, Unit unit) {
return getDocumentationForModule(mod.getName(), version, mod.getDoc(),
scope, unit);
}
public static String getDocumentationForModule(String name,
String version, String doc, Scope scope, Unit unit) {
StringBuilder buffer = new StringBuilder();
HTML.addImageAndLabel(buffer, null,
HTML.fileUrl("jar_l_obj.gif").toExternalForm(),
16, 16,
"<tt style='font-size:102%'>" +
HTML.highlightLine(description(name, version)) +
"</tt></b>",
20, 4);
if (doc!=null) {
buffer.append(markdown(doc, scope, unit));
}
insertPageProlog(buffer, 0, HTML.getStyleSheet());
addPageEpilog(buffer);
return buffer.toString();
}
private static String description(String name, String version) {
return "module " + name + " \"" + version + "\"";
}
private static String getDocumentationFor(CeylonParseController cpc,
Module mod) {
StringBuilder buffer = new StringBuilder();
addMainModuleDescription(mod, buffer);
addAdditionalModuleInfo(buffer, mod);
addModuleDocumentation(cpc, mod, buffer);
addModuleMembers(buffer, mod);
insertPageProlog(buffer, 0, HTML.getStyleSheet());
addPageEpilog(buffer);
return buffer.toString();
}
private static void addAdditionalModuleInfo(StringBuilder buffer,
Module mod) {
if (mod.isJava()) {
buffer.append("<p>This module is implemented in Java.</p>");
}
if (mod.isDefault()) {
buffer.append("<p>The default module for packages which do not belong to explicit module.</p>");
}
if (JDKUtils.isJDKModule(mod.getNameAsString())) {
buffer.append("<p>This module forms part of the Java SDK.</p>");
}
}
private static void addMainModuleDescription(Module mod,
StringBuilder buffer) {
HTML.addImageAndLabel(buffer, mod,
HTML.fileUrl(getIcon(mod)).toExternalForm(),
16, 16,
"<tt style='font-size:102%'>" +
HTML.highlightLine(description(mod)) +
"</tt>",
20, 4);
}
private static void addModuleDocumentation(CeylonParseController cpc,
Module mod, StringBuilder buffer) {
Unit unit = mod.getUnit();
PhasedUnit pu = null;
if (unit instanceof CeylonUnit) {
pu = ((CeylonUnit)unit).getPhasedUnit();
}
if (pu!=null) {
List<Tree.ModuleDescriptor> moduleDescriptors =
pu.getCompilationUnit().getModuleDescriptors();
if (!moduleDescriptors.isEmpty()) {
Tree.ModuleDescriptor refnode = moduleDescriptors.get(0);
if (refnode!=null) {
Scope linkScope = mod.getPackage(mod.getNameAsString());
appendDocAnnotationContent(refnode.getAnnotationList(), buffer, linkScope);
appendThrowAnnotationContent(refnode.getAnnotationList(), buffer, linkScope);
appendSeeAnnotationContent(refnode.getAnnotationList(), buffer);
}
}
}
}
private static void addPackageDocumentation(CeylonParseController cpc,
Package pack, StringBuilder buffer) {
Unit unit = pack.getUnit();
PhasedUnit pu = null;
if (unit instanceof CeylonUnit) {
pu = ((CeylonUnit)unit).getPhasedUnit();
}
if (pu!=null) {
List<Tree.PackageDescriptor> packageDescriptors =
pu.getCompilationUnit().getPackageDescriptors();
if (!packageDescriptors.isEmpty()) {
Tree.PackageDescriptor refnode = packageDescriptors.get(0);
if (refnode!=null) {
Scope linkScope = pack;
appendDocAnnotationContent(refnode.getAnnotationList(), buffer, linkScope);
appendThrowAnnotationContent(refnode.getAnnotationList(), buffer, linkScope);
appendSeeAnnotationContent(refnode.getAnnotationList(), buffer);
}
}
}
}
private static void addModuleMembers(StringBuilder buffer,
Module mod) {
boolean first = true;
for (Package pack: mod.getPackages()) {
if (pack.isShared()) {
if (first) {
buffer.append("<p>Contains: ");
first = false;
}
else {
buffer.append(", ");
}
/*addImageAndLabel(buffer, null, fileUrl(getIcon(dec)).toExternalForm(),
16, 16, "<tt><a " + link(dec) + ">" +
dec.getName() + "</a></tt>", 20, 2);*/
appendLink(buffer, pack);
}
}
if (!first) {
buffer.append(".</p>");
}
}
private static String description(Module mod) {
return "module " + getLabel(mod) + " \"" + mod.getVersion() + "\"";
}
public static String getDocumentationFor(CeylonParseController cpc,
Declaration dec) {
return getDocumentationFor(cpc, dec, null, null);
}
public static String getDocumentationFor(CeylonParseController cpc,
Declaration dec, ProducedReference pr) {
return getDocumentationFor(cpc, dec, null, pr);
}
private static String getDocumentationFor(CeylonParseController cpc,
Declaration dec, Node node, ProducedReference pr) {
if (dec==null) return null;
if (dec instanceof Value) {
TypeDeclaration val = ((Value) dec).getTypeDeclaration();
if (val!=null && val.isAnonymous()) {
dec = val;
}
}
Unit unit = cpc.getRootNode().getUnit();
StringBuilder buffer = new StringBuilder();
insertPageProlog(buffer, 0, HTML.getStyleSheet());
addMainDescription(buffer, dec, node, pr, cpc, unit);
boolean obj = addInheritanceInfo(dec, node, pr, buffer, unit);
addContainerInfo(dec, node, buffer); //TODO: use the pr to get the qualifying type??
boolean hasDoc = addDoc(cpc, dec, node, buffer);
addRefinementInfo(cpc, dec, node, buffer, hasDoc, unit); //TODO: use the pr to get the qualifying type??
addReturnType(dec, buffer, node, pr, obj, unit);
addParameters(cpc, dec, node, pr, buffer, unit);
addClassMembersInfo(dec, buffer);
addUnitInfo(dec, buffer);
addPackageInfo(dec, buffer);
if (dec instanceof NothingType) {
addNothingTypeInfo(buffer);
}
else {
appendExtraActions(dec, buffer);
}
addPageEpilog(buffer);
return buffer.toString();
}
private static void addMainDescription(StringBuilder buffer,
Declaration dec, Node node, ProducedReference pr,
CeylonParseController cpc, Unit unit) {
StringBuilder buf = new StringBuilder();
if (dec.isShared()) buf.append("shared ");
if (dec.isActual()) buf.append("actual ");
if (dec.isDefault()) buf.append("default ");
if (dec.isFormal()) buf.append("formal ");
if (dec instanceof Value && ((Value) dec).isLate())
buf.append("late ");
if (isVariable(dec)) buf.append("variable ");
if (dec.isNative()) buf.append("native ");
if (dec instanceof TypeDeclaration) {
TypeDeclaration td = (TypeDeclaration) dec;
if (td.isSealed()) buf.append("sealed ");
if (td.isFinal()) buf.append("final ");
if (td instanceof Class && ((Class)td).isAbstract())
buf.append("abstract ");
}
if (dec.isAnnotation()) buf.append("annotation ");
if (buf.length()!=0) {
String ann = toHex(getCurrentThemeColor(ANNOTATIONS));
HTML.addImageAndLabel(buffer, null,
HTML.fileUrl("annotation_obj.gif").toExternalForm(),
16, 16,
"<tt style='font-size:91%;color:" + ann + "'>" + buf + "</tt>",
20, 4);
}
HTML.addImageAndLabel(buffer, dec,
HTML.fileUrl(getIcon(dec)).toExternalForm(),
16, 16,
"<tt style='font-size:105%'>" +
(dec.isDeprecated() ? "<s>":"") +
description(dec, node, pr, cpc, unit) +
(dec.isDeprecated() ? "</s>":"") +
"</tt>",
20, 4);
}
private static void addClassMembersInfo(Declaration dec,
StringBuilder buffer) {
if (dec instanceof ClassOrInterface) {
if (!dec.getMembers().isEmpty()) {
boolean first = true;
for (Declaration mem: dec.getMembers()) {
if (mem instanceof Method &&
((Method) mem).isOverloaded()) {
continue;
}
if (mem.isShared()) {
if (first) {
buffer.append("<p>Members: ");
first = false;
}
else {
buffer.append(", ");
}
appendLink(buffer, mem);
}
}
if (!first) {
buffer.append(".</p>");
//extraBreak = true;
}
}
}
}
private static void addNothingTypeInfo(StringBuilder buffer) {
buffer.append("Special bottom type defined by the language. "
+ "<code>Nothing</code> is assignable to all types, but has no value. "
+ "A function or value of type <code>Nothing</code> either throws "
+ "an exception, or never returns.");
}
private static boolean addInheritanceInfo(Declaration dec,
Node node, ProducedReference pr, StringBuilder buffer,
Unit unit) {
buffer.append("<p><div style='padding-left:20px'>");
boolean obj=false;
if (dec instanceof TypedDeclaration) {
TypeDeclaration td =
((TypedDeclaration) dec).getTypeDeclaration();
if (td!=null && td.isAnonymous()) {
obj=true;
documentInheritance(td, node, pr, buffer, unit);
}
}
else if (dec instanceof TypeDeclaration) {
documentInheritance((TypeDeclaration) dec, node, pr, buffer, unit);
}
buffer.append("</div></p>");
documentTypeParameters(dec, node, pr, buffer, unit);
buffer.append("</p>");
return obj;
}
private static void addRefinementInfo(CeylonParseController cpc,
Declaration dec, Node node, StringBuilder buffer,
boolean hasDoc, Unit unit) {
Declaration rd = dec.getRefinedDeclaration();
if (dec!=rd && rd!=null) {
buffer.append("<p>");
TypeDeclaration superclass = (TypeDeclaration) rd.getContainer();
ClassOrInterface outer = (ClassOrInterface) dec.getContainer();
ProducedType sup = getQualifyingType(node, outer).getSupertype(superclass);
HTML.addImageAndLabel(buffer, rd,
HTML.fileUrl(rd.isFormal() ? "implm_co.gif" : "over_co.gif").toExternalForm(),
16, 16,
"refines " + link(rd) +
" declared by <tt>" +
producedTypeLink(sup, unit) + "</tt>",
20, 2);
buffer.append("</p>");
if (!hasDoc) {
Tree.Declaration refnode2 =
(Tree.Declaration) getReferencedNode(rd, cpc);
if (refnode2!=null) {
appendDocAnnotationContent(refnode2.getAnnotationList(),
buffer, resolveScope(rd));
}
}
}
}
private static void appendParameters(Declaration d, ProducedReference pr,
Unit unit, StringBuilder result/*, CeylonParseController cpc*/) {
if (d instanceof Functional) {
List<ParameterList> plists = ((Functional) d).getParameterLists();
if (plists!=null) {
for (ParameterList params: plists) {
if (params.getParameters().isEmpty()) {
result.append("()");
}
else {
result.append("(");
for (Parameter p: params.getParameters()) {
appendParameter(result, pr, p, unit);
// if (cpc!=null) {
// result.append(getDefaultValueDescription(p, cpc));
// }
result.append(", ");
}
result.setLength(result.length()-2);
result.append(")");
}
}
}
}
}
private static void appendParameter(StringBuilder result,
ProducedReference pr, Parameter p, Unit unit) {
if (p.getModel() == null) {
result.append(p.getName());
}
else {
ProducedTypedReference ppr = pr==null ?
null : pr.getTypedParameter(p);
if (p.isDeclaredVoid()) {
result.append(HTML.keyword("void"));
}
else {
if (ppr!=null) {
ProducedType pt = ppr.getType();
if (p.isSequenced() && pt!=null) {
pt = p.getDeclaration().getUnit()
.getSequentialElementType(pt);
}
result.append(producedTypeLink(pt, unit));
if (p.isSequenced()) {
result.append(p.isAtLeastOne()?'+':'*');
}
}
else if (p.getModel() instanceof Method) {
result.append(HTML.keyword("function"));
}
else {
result.append(HTML.keyword("value"));
}
}
result.append(" ");
appendLink(result, p.getModel());
appendParameters(p.getModel(), ppr, unit, result);
}
}
private static void addParameters(CeylonParseController cpc,
Declaration dec, Node node, ProducedReference pr,
StringBuilder buffer, Unit unit) {
if (dec instanceof Functional) {
if (pr==null) {
pr = getProducedReference(dec, node);
}
if (pr==null) return;
for (ParameterList pl: ((Functional) dec).getParameterLists()) {
if (!pl.getParameters().isEmpty()) {
buffer.append("<p>");
for (Parameter p: pl.getParameters()) {
MethodOrValue model = p.getModel();
if (model!=null) {
StringBuilder param = new StringBuilder();
param.append("<span style='font-size:96%'>accepts <tt>");
appendParameter(param, pr, p, unit);
param.append(HTML.highlightLine(getInitialValueDescription(model, cpc)))
.append("</tt>");
Tree.Declaration refNode =
(Tree.Declaration) getReferencedNode(model, cpc);
if (refNode!=null) {
appendDocAnnotationContent(refNode.getAnnotationList(),
param, resolveScope(dec));
}
param.append("</span>");
HTML.addImageAndLabel(buffer, model,
HTML.fileUrl("methpro_obj.gif").toExternalForm(),
16, 16, param.toString(), 20, 2);
}
}
buffer.append("</p>");
}
}
}
}
private static void addReturnType(Declaration dec, StringBuilder buffer,
Node node, ProducedReference pr, boolean obj, Unit unit) {
if (dec instanceof TypedDeclaration && !obj) {
if (pr==null) {
pr = getProducedReference(dec, node);
}
if (pr==null) return;
ProducedType ret = pr.getType();
if (ret!=null) {
buffer.append("<p>");
StringBuilder buf = new StringBuilder("returns <tt>");
buf.append(producedTypeLink(ret, unit)).append("|");
buf.setLength(buf.length()-1);
buf.append("</tt>");
HTML.addImageAndLabel(buffer, ret.getDeclaration(),
HTML.fileUrl("stepreturn_co.gif").toExternalForm(),
16, 16, buf.toString(), 20, 2);
buffer.append("</p>");
}
}
}
private static ProducedTypeNamePrinter printer(boolean abbreviate) {
return new ProducedTypeNamePrinter(abbreviate, true, false, true) {
@Override
protected String getSimpleDeclarationName(Declaration declaration, Unit unit) {
return "<a " + HTML.link(declaration) + ">" +
super.getSimpleDeclarationName(declaration, unit) +
"</a>";
}
@Override
protected String amp() {
return "&";
}
@Override
protected String lt() {
return "<";
}
@Override
protected String gt() {
return ">";
}
};
}
private static ProducedTypeNamePrinter PRINTER = printer(true);
private static ProducedTypeNamePrinter VERBOSE_PRINTER = printer(false);
private static String producedTypeLink(ProducedType pt, Unit unit) {
return PRINTER.getProducedTypeName(pt, unit);
}
private static ProducedReference getProducedReference(Declaration dec,
Node node) {
if (node instanceof Tree.MemberOrTypeExpression) {
return ((Tree.MemberOrTypeExpression) node).getTarget();
}
else if (node instanceof Tree.Type) {
return ((Tree.Type) node).getTypeModel();
}
ClassOrInterface outer = dec.isClassOrInterfaceMember() ?
(ClassOrInterface) dec.getContainer() : null;
return dec.getProducedReference(getQualifyingType(node, outer),
Collections.<ProducedType>emptyList());
}
private static boolean addDoc(CeylonParseController cpc,
Declaration dec, Node node, StringBuilder buffer) {
boolean hasDoc = false;
Node rn = getReferencedNode(dec, cpc);
if (rn instanceof Tree.Declaration) {
Tree.Declaration refnode = (Tree.Declaration) rn;
appendDeprecatedAnnotationContent(refnode.getAnnotationList(),
buffer, resolveScope(dec));
int len = buffer.length();
appendDocAnnotationContent(refnode.getAnnotationList(),
buffer, resolveScope(dec));
hasDoc = buffer.length()!=len;
appendThrowAnnotationContent(refnode.getAnnotationList(),
buffer, resolveScope(dec));
appendSeeAnnotationContent(refnode.getAnnotationList(),
buffer);
}
else {
appendJavadoc(dec, cpc.getProject(), buffer, node);
}
return hasDoc;
}
private static void addContainerInfo(Declaration dec, Node node,
StringBuilder buffer) {
buffer.append("<p>");
if (dec.isParameter()) {
Declaration pd =
((MethodOrValue) dec).getInitializerParameter()
.getDeclaration();
if (pd.getName().startsWith("anonymous#")) {
buffer.append("Parameter of anonymous function.");
}
else {
buffer.append("Parameter of ");
appendLink(buffer, pd);
buffer.append(".");
}
// HTML.addImageAndLabel(buffer, pd,
// HTML.fileUrl(getIcon(pd)).toExternalForm(),
// 16, 16,
// "<span style='font-size:96%'>parameter of <tt><a " + HTML.link(pd) + ">" +
// pd.getName() +"</a></tt><span>", 20, 2);
}
else if (dec instanceof TypeParameter) {
Declaration pd = ((TypeParameter) dec).getDeclaration();
buffer.append("Type parameter of ");
appendLink(buffer, pd);
buffer.append(".");
// HTML.addImageAndLabel(buffer, pd,
// HTML.fileUrl(getIcon(pd)).toExternalForm(),
// 16, 16,
// "<span style='font-size:96%'>type parameter of <tt><a " + HTML.link(pd) + ">" +
// pd.getName() +"</a></tt></span>",
// 20, 2);
}
else {
if (dec.isClassOrInterfaceMember()) {
ClassOrInterface outer = (ClassOrInterface) dec.getContainer();
ProducedType qt = getQualifyingType(node, outer);
if (qt!=null) {
Unit unit = node==null ? null : node.getUnit();
buffer.append("Member of <tt>" +
producedTypeLink(qt, unit) + "</tt>.");
// HTML.addImageAndLabel(buffer, outer,
// HTML.fileUrl(getIcon(outer)).toExternalForm(),
// 16, 16,
// "<span style='font-size:96%'>member of <tt>" +
// producedTypeLink(qt, unit) + "</tt></span>",
// 20, 2);
}
}
}
buffer.append("</p>");
}
private static void addPackageInfo(Declaration dec,
StringBuilder buffer) {
buffer.append("<p>");
Package pack = dec.getUnit().getPackage();
if ((dec.isShared() || dec.isToplevel()) &&
!(dec instanceof NothingType)) {
String label;
if (pack.getNameAsString().isEmpty()) {
label = "<span style='font-size:96%'>in default package</span>";
}
else {
label = "<span style='font-size:96%'>in package " +
link(pack) + "</span>";
}
HTML.addImageAndLabel(buffer, pack,
HTML.fileUrl(getIcon(pack)).toExternalForm(),
16, 16, label, 20, 2);
Module mod = pack.getModule();
HTML.addImageAndLabel(buffer, mod,
HTML.fileUrl(getIcon(mod)).toExternalForm(),
16, 16,
"<span style='font-size:96%'>in module " +
link(mod) + "</span>",
20, 2);
}
buffer.append("</p>");
}
private static ProducedType getQualifyingType(Node node,
ClassOrInterface outer) {
if (outer == null) {
return null;
}
if (node instanceof Tree.MemberOrTypeExpression) {
ProducedReference pr = ((Tree.MemberOrTypeExpression) node).getTarget();
if (pr!=null) {
return pr.getQualifyingType();
}
}
if (node instanceof Tree.QualifiedType) {
return ((Tree.QualifiedType) node).getOuterType().getTypeModel();
}
return outer.getType();
}
private static void addUnitInfo(Declaration dec,
StringBuilder buffer) {
buffer.append("<p>");
String unitName = null;
if (dec.getUnit() instanceof CeylonUnit) {
// Manage the case of CeylonBinaryUnit : getFileName() would return the class file name.
// but getCeylonFileName() will return the ceylon source file name if any.
unitName = ((CeylonUnit)dec.getUnit()).getCeylonFileName();
}
if (unitName == null) {
unitName = dec.getUnit().getFilename();
}
HTML.addImageAndLabel(buffer, null,
HTML.fileUrl("unit.gif").toExternalForm(),
16, 16,
"<span style='font-size:96%'>declared in <tt><a href='dec:" +
HTML.declink(dec) + "'>"+ unitName + "</a></tt></span>",
20, 2);
//}
buffer.append("</p>");
}
private static void appendExtraActions(Declaration dec,
StringBuilder buffer) {
buffer.append("<p>");
HTML.addImageAndLabel(buffer, null,
HTML.fileUrl("search_ref_obj.png").toExternalForm(),
16, 16,
"<span style='font-size:96%'><a href='ref:" + HTML.declink(dec) +
"'>find references</a> to <tt>" +
dec.getName() + "</tt></span>",
20, 2);
if (dec instanceof ClassOrInterface) {
HTML.addImageAndLabel(buffer, null,
HTML.fileUrl("search_decl_obj.png").toExternalForm(),
16, 16,
"<span style='font-size:96%'><a href='sub:" + HTML.declink(dec) +
"'>find subtypes</a> of <tt>" +
dec.getName() + "</tt></span>",
20, 2);
}
if (dec instanceof MethodOrValue ||
dec instanceof TypeParameter) {
HTML.addImageAndLabel(buffer, null,
HTML.fileUrl("search_ref_obj.png").toExternalForm(),
16, 16,
"<span style='font-size:96%'><a href='ass:" + HTML.declink(dec) +
"'>find assignments</a> to <tt>" +
dec.getName() + "</tt></span>",
20, 2);
}
if (dec.isFormal() || dec.isDefault()) {
HTML.addImageAndLabel(buffer, null,
HTML.fileUrl("search_decl_obj.png").toExternalForm(),
16, 16,
"<span style='font-size:96%'><a href='act:" + HTML.declink(dec) +
"'>find refinements</a> of <tt>" +
dec.getName() + "</tt></span>",
20, 2);
}
buffer.append("</p>");
}
private static void documentInheritance(TypeDeclaration dec,
Node node, ProducedReference pr, StringBuilder buffer,
Unit unit) {
if (pr==null) {
pr = getProducedReference(dec, node);
}
ProducedType type;
if (pr instanceof ProducedType) {
type = (ProducedType) pr;
}
else {
type = dec.getType();
}
List<ProducedType> cts = type.getCaseTypes();
if (cts!=null) {
StringBuilder cases = new StringBuilder();
for (ProducedType ct: cts) {
if (cases.length()>0) {
cases.append(" | ");
}
cases.append(producedTypeLink(ct, unit));
}
if (dec.getSelfType()!=null) {
cases.append(" (self type)");
}
HTML.addImageAndLabel(buffer, null,
HTML.fileUrl("sub.gif").toExternalForm(),
16, 16,
" <tt style='font-size:96%'>of " + cases +"</tt>",
20, 2);
}
if (dec instanceof Class) {
ProducedType sup = type.getExtendedType();
if (sup!=null) {
HTML.addImageAndLabel(buffer, sup.getDeclaration(),
HTML.fileUrl("superclass.gif").toExternalForm(),
16, 16,
"<tt style='font-size:96%'>extends " +
producedTypeLink(sup, unit) +"</tt>",
20, 2);
}
}
List<ProducedType> sts = type.getSatisfiedTypes();
if (!sts.isEmpty()) {
StringBuilder satisfies = new StringBuilder();
for (ProducedType st: sts) {
if (satisfies.length()>0) {
satisfies.append(" & ");
}
satisfies.append(producedTypeLink(st, unit));
}
HTML.addImageAndLabel(buffer, null,
HTML.fileUrl("super.gif").toExternalForm(),
16, 16,
"<tt style='font-size:96%'>satisfies " + satisfies +"</tt>",
20, 2);
}
}
private static void documentTypeParameters(Declaration dec,
Node node, ProducedReference pr, StringBuilder buffer,
Unit unit) {
if (pr==null) {
pr = getProducedReference(dec, node);
}
List<TypeParameter> typeParameters;
if (dec instanceof Functional) {
typeParameters = ((Functional) dec).getTypeParameters();
}
else if (dec instanceof Interface) {
typeParameters = ((Interface) dec).getTypeParameters();
}
else {
typeParameters = Collections.emptyList();
}
for (TypeParameter tp: typeParameters) {
StringBuilder bounds = new StringBuilder();
for (ProducedType st: tp.getSatisfiedTypes()) {
if (bounds.length() == 0) {
bounds.append(" satisfies ");
}
else {
bounds.append(" & ");
}
bounds.append(producedTypeLink(st, dec.getUnit()));
}
String arg;
ProducedType typeArg = pr==null ? null : pr.getTypeArguments().get(tp);
if (typeArg!=null && !tp.getType().isExactly(typeArg)) {
arg = " = " + producedTypeLink(typeArg, unit);
}
else {
arg = "";
}
HTML.addImageAndLabel(buffer, tp,
HTML.fileUrl(getIcon(tp)).toExternalForm(),
16, 16,
"<tt style='font-size:96%'>given <a " + HTML.link(tp) + ">" +
tp.getName() + "</a>" + bounds + arg + "</tt>",
20, 4);
}
}
private static String description(Declaration dec, Node node,
ProducedReference pr, CeylonParseController cpc, Unit unit) {
if (pr==null) {
pr = getProducedReference(dec, node);
}
String description = getDocDescriptionFor(dec, pr, unit);
if (dec instanceof TypeDeclaration) {
TypeDeclaration td = (TypeDeclaration) dec;
if (td.isAlias() && td.getExtendedType()!=null) {
description += " => " +
td.getExtendedType().getProducedTypeName();
}
}
if (dec instanceof Value && !isVariable(dec) ||
dec instanceof Method) {
description += getInitialValueDescription(dec, cpc);
}
return HTML.highlightLine(description);
}
private static void appendJavadoc(Declaration model, IProject project,
StringBuilder buffer, Node node) {
try {
appendJavadoc(getJavaElement(model), buffer);
}
catch (JavaModelException jme) {
jme.printStackTrace();
}
}
private static void appendDocAnnotationContent(Tree.AnnotationList annotationList,
StringBuilder documentation, Scope linkScope) {
if (annotationList!=null) {
AnonymousAnnotation aa = annotationList.getAnonymousAnnotation();
if (aa!=null) {
documentation.append(markdown(aa.getStringLiteral().getText(),
linkScope, annotationList.getUnit()));
// HTML.addImageAndLabel(documentation, null,
// HTML.fileUrl("toc_obj.gif").toExternalForm(),
// 16, 16,
// markdown(aa.getStringLiteral().getText(),
// linkScope, annotationList.getUnit()),
// 20, 0);
}
for (Tree.Annotation annotation : annotationList.getAnnotations()) {
Tree.Primary annotPrim = annotation.getPrimary();
if (annotPrim instanceof Tree.BaseMemberExpression) {
String name = ((Tree.BaseMemberExpression) annotPrim).getIdentifier().getText();
if ("doc".equals(name)) {
Tree.PositionalArgumentList argList = annotation.getPositionalArgumentList();
if (argList!=null) {
List<Tree.PositionalArgument> args = argList.getPositionalArguments();
if (!args.isEmpty()) {
Tree.PositionalArgument a = args.get(0);
if (a instanceof Tree.ListedArgument) {
String text = ((Tree.ListedArgument) a).getExpression()
.getTerm().getText();
if (text!=null) {
documentation.append(markdown(text, linkScope,
annotationList.getUnit()));
}
}
}
}
}
}
}
}
}
private static void appendDeprecatedAnnotationContent(Tree.AnnotationList annotationList,
StringBuilder documentation, Scope linkScope) {
if (annotationList!=null) {
for (Tree.Annotation annotation : annotationList.getAnnotations()) {
Tree.Primary annotPrim = annotation.getPrimary();
if (annotPrim instanceof Tree.BaseMemberExpression) {
String name = ((Tree.BaseMemberExpression) annotPrim).getIdentifier().getText();
if ("deprecated".equals(name)) {
Tree.PositionalArgumentList argList = annotation.getPositionalArgumentList();
if (argList!=null) {
List<Tree.PositionalArgument> args = argList.getPositionalArguments();
if (!args.isEmpty()) {
Tree.PositionalArgument a = args.get(0);
if (a instanceof Tree.ListedArgument) {
String text = ((Tree.ListedArgument) a).getExpression()
.getTerm().getText();
if (text!=null) {
documentation.append(markdown("_(This is a deprecated program element.)_\n\n" + text,
linkScope, annotationList.getUnit()));
}
}
}
}
}
}
}
}
}
private static void appendSeeAnnotationContent(Tree.AnnotationList annotationList,
StringBuilder documentation) {
if (annotationList!=null) {
for (Tree.Annotation annotation : annotationList.getAnnotations()) {
Tree.Primary annotPrim = annotation.getPrimary();
if (annotPrim instanceof Tree.BaseMemberExpression) {
String name = ((Tree.BaseMemberExpression) annotPrim).getIdentifier().getText();
if ("see".equals(name)) {
Tree.PositionalArgumentList argList = annotation.getPositionalArgumentList();
if (argList!=null) {
StringBuilder sb = new StringBuilder();
List<Tree.PositionalArgument> args = argList.getPositionalArguments();
for (Tree.PositionalArgument arg: args) {
if (arg instanceof Tree.ListedArgument) {
Tree.Term term = ((Tree.ListedArgument) arg).getExpression().getTerm();
if (term instanceof Tree.MetaLiteral) {
Declaration dec = ((Tree.MetaLiteral) term).getDeclaration();
if (dec!=null) {
String dn = dec.getName();
if (dec.isClassOrInterfaceMember()) {
dn = ((ClassOrInterface) dec.getContainer()).getName() + "." + dn;
}
if (sb.length()!=0) sb.append(", ");
sb.append("<tt><a "+HTML.link(dec)+">"+dn+"</a></tt>");
}
}
}
}
if (sb.length()!=0) {
HTML.addImageAndLabel(documentation, null,
HTML.fileUrl("link_obj.gif"/*getIcon(dec)*/).toExternalForm(),
16, 16,
"see " + sb + ".",
20, 2);
}
}
}
}
}
}
}
private static void appendThrowAnnotationContent(Tree.AnnotationList annotationList,
StringBuilder documentation, Scope linkScope) {
if (annotationList!=null) {
for (Tree.Annotation annotation : annotationList.getAnnotations()) {
Tree.Primary annotPrim = annotation.getPrimary();
if (annotPrim instanceof Tree.BaseMemberExpression) {
String name = ((Tree.BaseMemberExpression) annotPrim).getIdentifier().getText();
if ("throws".equals(name)) {
Tree.PositionalArgumentList argList = annotation.getPositionalArgumentList();
if (argList!=null) {
List<Tree.PositionalArgument> args = argList.getPositionalArguments();
if (args.isEmpty()) continue;
Tree.PositionalArgument typeArg = args.get(0);
Tree.PositionalArgument textArg = args.size()>1 ? args.get(1) : null;
if (typeArg instanceof Tree.ListedArgument &&
(textArg==null || textArg instanceof Tree.ListedArgument)) {
Tree.Term typeArgTerm = ((Tree.ListedArgument) typeArg).getExpression().getTerm();
Tree.Term textArgTerm = textArg==null ? null : ((Tree.ListedArgument) textArg).getExpression().getTerm();
String text = textArgTerm instanceof Tree.StringLiteral ?
textArgTerm.getText() : "";
if (typeArgTerm instanceof Tree.MetaLiteral) {
Declaration dec = ((Tree.MetaLiteral) typeArgTerm).getDeclaration();
if (dec!=null) {
String dn = dec.getName();
if (typeArgTerm instanceof Tree.QualifiedMemberOrTypeExpression) {
Tree.Primary p = ((Tree.QualifiedMemberOrTypeExpression) typeArgTerm).getPrimary();
if (p instanceof Tree.MemberOrTypeExpression) {
dn = ((Tree.MemberOrTypeExpression) p).getDeclaration().getName()
+ "." + dn;
}
}
HTML.addImageAndLabel(documentation, dec,
HTML.fileUrl("ihigh_obj.gif"/*getIcon(dec)*/).toExternalForm(),
16, 16,
"throws <tt><a "+HTML.link(dec)+">"+dn+"</a></tt>" +
markdown(text, linkScope, annotationList.getUnit()),
20, 2);
}
}
}
}
}
}
}
}
}
private static String markdown(String text, final Scope linkScope, final Unit unit) {
if (text == null || text.isEmpty()) {
return text;
}
Builder builder = Configuration.builder().forceExtentedProfile();
builder.setCodeBlockEmitter(new CeylonBlockEmitter());
if (linkScope!=null && unit!=null) {
builder.setSpecialLinkEmitter(new CeylonSpanEmitter(linkScope, unit));
}
else {
builder.setSpecialLinkEmitter(new UnlinkedSpanEmitter());
}
return Processor.process(text, builder.build());
}
private static Scope resolveScope(Declaration decl) {
if (decl == null) {
return null;
}
else if (decl instanceof Scope) {
return (Scope) decl;
}
else {
return decl.getContainer();
}
}
static Module resolveModule(Scope scope) {
if (scope == null) {
return null;
}
else if (scope instanceof Package) {
return ((Package) scope).getModule();
}
else {
return resolveModule(scope.getContainer());
}
}
/**
* Creates the "enriched" control.
*/
private final class PresenterControlCreator extends AbstractReusableInformationControlCreator {
private final DocumentationHover docHover;
PresenterControlCreator(DocumentationHover docHover) {
this.docHover = docHover;
}
@Override
public IInformationControl doCreateInformationControl(Shell parent) {
if (isAvailable(parent)) {
ToolBarManager tbm = new ToolBarManager(SWT.FLAT);
BrowserInformationControl control = new BrowserInformationControl(parent,
APPEARANCE_JAVADOC_FONT, tbm);
final BackAction backAction = new BackAction(control);
backAction.setEnabled(false);
tbm.add(backAction);
final ForwardAction forwardAction = new ForwardAction(control);
tbm.add(forwardAction);
forwardAction.setEnabled(false);
//final ShowInJavadocViewAction showInJavadocViewAction= new ShowInJavadocViewAction(iControl);
//tbm.add(showInJavadocViewAction);
final OpenDeclarationAction openDeclarationAction = new OpenDeclarationAction(control);
tbm.add(openDeclarationAction);
// final SimpleSelectionProvider selectionProvider = new SimpleSelectionProvider();
//TODO: an action to open the generated ceylondoc
// from the doc archive, in a browser window
/*if (fSite != null) {
OpenAttachedJavadocAction openAttachedJavadocAction= new OpenAttachedJavadocAction(fSite);
openAttachedJavadocAction.setSpecialSelectionProvider(selectionProvider);
openAttachedJavadocAction.setImageDescriptor(DESC_ELCL_OPEN_BROWSER);
openAttachedJavadocAction.setDisabledImageDescriptor(DESC_DLCL_OPEN_BROWSER);
selectionProvider.addSelectionChangedListener(openAttachedJavadocAction);
selectionProvider.setSelection(new StructuredSelection());
tbm.add(openAttachedJavadocAction);
}*/
IInputChangedListener inputChangeListener = new IInputChangedListener() {
public void inputChanged(Object newInput) {
backAction.update();
forwardAction.update();
// if (newInput == null) {
// selectionProvider.setSelection(new StructuredSelection());
// }
// else
boolean isDeclaration = false;
if (newInput instanceof CeylonBrowserInput) {
// Object inputElement = ((CeylonBrowserInput) newInput).getInputElement();
// selectionProvider.setSelection(new StructuredSelection(inputElement));
//showInJavadocViewAction.setEnabled(isJavaElementInput);
isDeclaration = ((CeylonBrowserInput) newInput).getAddress()!=null;
}
openDeclarationAction.setEnabled(isDeclaration);
}
};
control.addInputChangeListener(inputChangeListener);
tbm.update(true);
docHover.addLinkListener(control);
return control;
}
else {
return new DefaultInformationControl(parent, true);
}
}
}
private final class HoverControlCreator extends AbstractReusableInformationControlCreator {
private final DocumentationHover docHover;
private String statusLineMessage;
private final IInformationControlCreator enrichedControlCreator;
HoverControlCreator(DocumentationHover docHover,
IInformationControlCreator enrichedControlCreator,
String statusLineMessage) {
this.docHover = docHover;
this.enrichedControlCreator = enrichedControlCreator;
this.statusLineMessage = statusLineMessage;
}
@Override
public IInformationControl doCreateInformationControl(Shell parent) {
if (enrichedControlCreator!=null && isAvailable(parent)) {
BrowserInformationControl control = new BrowserInformationControl(parent,
APPEARANCE_JAVADOC_FONT, statusLineMessage) {
@Override
public IInformationControlCreator getInformationPresenterControlCreator() {
return enrichedControlCreator;
}
};
if (docHover!=null) {
docHover.addLinkListener(control);
}
return control;
}
else {
return new DefaultInformationControl(parent, statusLineMessage);
}
}
}
} | 1no label
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_hover_DocumentationHover.java |
33 | @Service("blProductFieldService")
public class ProductFieldServiceImpl extends AbstractRuleBuilderFieldService {
@Override
public void init() {
fields.add(new FieldData.Builder()
.label("rule_productUrl")
.name("url")
.operators("blcOperators_Text")
.options("[]")
.type(SupportedFieldType.STRING)
.build());
fields.add(new FieldData.Builder()
.label("rule_productUrlKey")
.name("urlKey")
.operators("blcOperators_Text")
.options("[]")
.type(SupportedFieldType.STRING)
.build());
fields.add(new FieldData.Builder()
.label("rule_productIsFeatured")
.name("isFeaturedProduct")
.operators("blcOperators_Boolean")
.options("[]")
.type(SupportedFieldType.BOOLEAN)
.build());
fields.add(new FieldData.Builder()
.label("rule_productManufacturer")
.name("manufacturer")
.operators("blcOperators_Text")
.options("[]")
.type(SupportedFieldType.STRING)
.build());
fields.add(new FieldData.Builder()
.label("rule_productModel")
.name("model")
.operators("blcOperators_Text")
.options("[]")
.type(SupportedFieldType.STRING)
.build());
fields.add(new FieldData.Builder()
.label("rule_productSkuName")
.name("defaultSku.name")
.operators("blcOperators_Text")
.options("[]")
.type(SupportedFieldType.STRING)
.build());
fields.add(new FieldData.Builder()
.label("rule_productSkuFulfillmentType")
.name("defaultSku.fulfillmentType")
.operators("blcOperators_Enumeration")
.options("blcOptions_FulfillmentType")
.type(SupportedFieldType.BROADLEAF_ENUMERATION)
.build());
fields.add(new FieldData.Builder()
.label("rule_productSkuInventoryType")
.name("defaultSku.inventoryType")
.operators("blcOperators_Enumeration")
.options("blcOptions_InventoryType")
.type(SupportedFieldType.BROADLEAF_ENUMERATION)
.build());
fields.add(new FieldData.Builder()
.label("rule_productSkuDescription")
.name("defaultSku.description")
.operators("blcOperators_Text")
.options("[]")
.type(SupportedFieldType.STRING)
.build());
fields.add(new FieldData.Builder()
.label("rule_productSkuLongDescription")
.name("defaultSku.longDescription")
.operators("blcOperators_Text")
.options("[]")
.type(SupportedFieldType.STRING)
.build());
fields.add(new FieldData.Builder()
.label("rule_productSkuTaxable")
.name("defaultSku.taxable")
.operators("blcOperators_Boolean")
.options("[]")
.type(SupportedFieldType.BOOLEAN)
.build());
fields.add(new FieldData.Builder()
.label("rule_productSkuAvailable")
.name("defaultSku.available")
.operators("blcOperators_Boolean")
.options("[]")
.type(SupportedFieldType.BOOLEAN)
.build());
fields.add(new FieldData.Builder()
.label("rule_productSkuStartDate")
.name("defaultSku.activeStartDate")
.operators("blcOperators_Date")
.options("[]")
.type(SupportedFieldType.DATE)
.build());
fields.add(new FieldData.Builder()
.label("rule_productSkuEndDate")
.name("defaultSku.activeEndDate")
.operators("blcOperators_Date")
.options("[]")
.type(SupportedFieldType.DATE)
.build());
}
@Override
public String getName() {
return RuleIdentifier.PRODUCT;
}
@Override
public String getDtoClassName() {
return "org.broadleafcommerce.core.catalog.domain.ProductImpl";
}
} | 0true
| admin_broadleaf-admin-module_src_main_java_org_broadleafcommerce_admin_web_rulebuilder_service_ProductFieldServiceImpl.java |
45 | public class HeartbeatIAmAliveProcessor implements MessageProcessor
{
private final MessageHolder output;
private final ClusterContext clusterContext;
public HeartbeatIAmAliveProcessor( MessageHolder output, ClusterContext clusterContext )
{
this.output = output;
this.clusterContext = clusterContext;
}
@Override
public boolean process( Message<? extends MessageType> message )
{
if ( !message.isInternal() &&
!message.getMessageType().equals( HeartbeatMessage.i_am_alive ) )
{
// We assume the FROM header always exists.
String from = message.getHeader( Message.FROM );
if ( !from.equals( message.getHeader( Message.TO ) ) )
{
InstanceId theId;
if ( message.hasHeader( Message.INSTANCE_ID ) )
{
// INSTANCE_ID is there since after 1.9.6
theId = new InstanceId( Integer.parseInt( message.getHeader( Message.INSTANCE_ID ) ) );
}
else
{
theId = clusterContext.getConfiguration().getIdForUri( URI.create( from ) );
}
if ( theId != null && clusterContext.getConfiguration().getMembers().containsKey( theId )
&& !clusterContext.isMe( theId ) )
{
output.offer( message.copyHeadersTo(
Message.internal( HeartbeatMessage.i_am_alive,
new HeartbeatMessage.IAmAliveState( theId ) ),
Message.FROM, Message.INSTANCE_ID ) );
}
}
}
return true;
}
} | 1no label
| enterprise_cluster_src_main_java_org_neo4j_cluster_protocol_heartbeat_HeartbeatIAmAliveProcessor.java |
360 | public class FilterDefinition {
protected String name;
protected List<FilterParameter> params;
protected String entityImplementationClassName;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public List<FilterParameter> getParams() {
return params;
}
public void setParams(List<FilterParameter> params) {
this.params = params;
}
public String getEntityImplementationClassName() {
return entityImplementationClassName;
}
public void setEntityImplementationClassName(String entityImplementationClassName) {
this.entityImplementationClassName = entityImplementationClassName;
}
} | 0true
| common_src_main_java_org_broadleafcommerce_common_filter_FilterDefinition.java |
2,090 | public class PutAllOperation extends AbstractMapOperation implements PartitionAwareOperation, BackupAwareOperation {
private MapEntrySet entrySet;
private boolean initialLoad = false;
private List<Map.Entry<Data, Data>> backupEntrySet;
private List<RecordInfo> backupRecordInfos;
public PutAllOperation(String name, MapEntrySet entrySet) {
super(name);
this.entrySet = entrySet;
}
public PutAllOperation(String name, MapEntrySet entrySet, boolean initialLoad) {
super(name);
this.entrySet = entrySet;
this.initialLoad = initialLoad;
}
public PutAllOperation() {
}
public void run() {
backupRecordInfos = new ArrayList<RecordInfo>();
backupEntrySet = new ArrayList<Map.Entry<Data, Data>>();
int partitionId = getPartitionId();
RecordStore recordStore = mapService.getRecordStore(partitionId, name);
Set<Map.Entry<Data, Data>> entries = entrySet.getEntrySet();
InternalPartitionService partitionService = getNodeEngine().getPartitionService();
Set<Data> keysToInvalidate = new HashSet<Data>();
for (Map.Entry<Data, Data> entry : entries) {
Data dataKey = entry.getKey();
Data dataValue = entry.getValue();
if (partitionId == partitionService.getPartitionId(dataKey)) {
Data dataOldValue = null;
if (initialLoad) {
recordStore.putFromLoad(dataKey, dataValue, -1);
} else {
dataOldValue = mapService.toData(recordStore.put(dataKey, dataValue, -1));
}
mapService.interceptAfterPut(name, dataValue);
EntryEventType eventType = dataOldValue == null ? EntryEventType.ADDED : EntryEventType.UPDATED;
mapService.publishEvent(getCallerAddress(), name, eventType, dataKey, dataOldValue, dataValue);
keysToInvalidate.add(dataKey);
if (mapContainer.getWanReplicationPublisher() != null && mapContainer.getWanMergePolicy() != null) {
Record record = recordStore.getRecord(dataKey);
final SimpleEntryView entryView = mapService.createSimpleEntryView(dataKey, mapService.toData(dataValue), record);
mapService.publishWanReplicationUpdate(name, entryView);
}
backupEntrySet.add(entry);
RecordInfo replicationInfo = mapService.createRecordInfo(recordStore.getRecord(dataKey));
backupRecordInfos.add(replicationInfo);
}
}
invalidateNearCaches(keysToInvalidate);
}
protected final void invalidateNearCaches(Set<Data> keys) {
if (mapService.isNearCacheAndInvalidationEnabled(name)) {
mapService.invalidateAllNearCaches(name, keys);
}
}
@Override
public Object getResponse() {
return true;
}
@Override
public String toString() {
return "PutAllOperation{" +
'}';
}
@Override
protected void writeInternal(ObjectDataOutput out) throws IOException {
super.writeInternal(out);
out.writeObject(entrySet);
out.writeBoolean(initialLoad);
}
@Override
protected void readInternal(ObjectDataInput in) throws IOException {
super.readInternal(in);
entrySet = in.readObject();
initialLoad = in.readBoolean();
}
@Override
public boolean shouldBackup() {
return !backupEntrySet.isEmpty();
}
public final int getAsyncBackupCount() {
return mapContainer.getAsyncBackupCount();
}
public final int getSyncBackupCount() {
return mapContainer.getBackupCount();
}
@Override
public Operation getBackupOperation() {
return new PutAllBackupOperation(name, backupEntrySet, backupRecordInfos);
}
} | 1no label
| hazelcast_src_main_java_com_hazelcast_map_operation_PutAllOperation.java |
1,623 | public class ClusterDynamicSettingsModule extends AbstractModule {
private final DynamicSettings clusterDynamicSettings;
public ClusterDynamicSettingsModule() {
clusterDynamicSettings = new DynamicSettings();
clusterDynamicSettings.addDynamicSetting(AwarenessAllocationDecider.CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTES);
clusterDynamicSettings.addDynamicSetting(AwarenessAllocationDecider.CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP + "*");
clusterDynamicSettings.addDynamicSetting(BalancedShardsAllocator.SETTING_INDEX_BALANCE_FACTOR, Validator.FLOAT);
clusterDynamicSettings.addDynamicSetting(BalancedShardsAllocator.SETTING_PRIMARY_BALANCE_FACTOR, Validator.FLOAT);
clusterDynamicSettings.addDynamicSetting(BalancedShardsAllocator.SETTING_SHARD_BALANCE_FACTOR, Validator.FLOAT);
clusterDynamicSettings.addDynamicSetting(BalancedShardsAllocator.SETTING_THRESHOLD, Validator.NON_NEGATIVE_FLOAT);
clusterDynamicSettings.addDynamicSetting(ConcurrentRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE, Validator.INTEGER);
clusterDynamicSettings.addDynamicSetting(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE);
clusterDynamicSettings.addDynamicSetting(DisableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_DISABLE_NEW_ALLOCATION);
clusterDynamicSettings.addDynamicSetting(DisableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_DISABLE_ALLOCATION);
clusterDynamicSettings.addDynamicSetting(DisableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_DISABLE_REPLICA_ALLOCATION);
clusterDynamicSettings.addDynamicSetting(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES, Validator.INTEGER);
clusterDynamicSettings.addDynamicSetting(FilterAllocationDecider.CLUSTER_ROUTING_INCLUDE_GROUP + "*");
clusterDynamicSettings.addDynamicSetting(FilterAllocationDecider.CLUSTER_ROUTING_EXCLUDE_GROUP + "*");
clusterDynamicSettings.addDynamicSetting(FilterAllocationDecider.CLUSTER_ROUTING_REQUIRE_GROUP + "*");
clusterDynamicSettings.addDynamicSetting(IndicesFilterCache.INDICES_CACHE_FILTER_SIZE);
clusterDynamicSettings.addDynamicSetting(IndicesFilterCache.INDICES_CACHE_FILTER_EXPIRE, Validator.TIME);
clusterDynamicSettings.addDynamicSetting(IndicesStore.INDICES_STORE_THROTTLE_TYPE);
clusterDynamicSettings.addDynamicSetting(IndicesStore.INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC, Validator.BYTES_SIZE);
clusterDynamicSettings.addDynamicSetting(IndicesTTLService.INDICES_TTL_INTERVAL, Validator.TIME);
clusterDynamicSettings.addDynamicSetting(MetaData.SETTING_READ_ONLY);
clusterDynamicSettings.addDynamicSetting(RecoverySettings.INDICES_RECOVERY_FILE_CHUNK_SIZE, Validator.BYTES_SIZE);
clusterDynamicSettings.addDynamicSetting(RecoverySettings.INDICES_RECOVERY_TRANSLOG_OPS, Validator.INTEGER);
clusterDynamicSettings.addDynamicSetting(RecoverySettings.INDICES_RECOVERY_TRANSLOG_SIZE, Validator.BYTES_SIZE);
clusterDynamicSettings.addDynamicSetting(RecoverySettings.INDICES_RECOVERY_COMPRESS);
clusterDynamicSettings.addDynamicSetting(RecoverySettings.INDICES_RECOVERY_CONCURRENT_STREAMS, Validator.POSITIVE_INTEGER);
clusterDynamicSettings.addDynamicSetting(RecoverySettings.INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS, Validator.POSITIVE_INTEGER);
clusterDynamicSettings.addDynamicSetting(RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC, Validator.BYTES_SIZE);
clusterDynamicSettings.addDynamicSetting(RecoverySettings.INDICES_RECOVERY_MAX_SIZE_PER_SEC, Validator.BYTES_SIZE);
clusterDynamicSettings.addDynamicSetting(ThreadPool.THREADPOOL_GROUP + "*");
clusterDynamicSettings.addDynamicSetting(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES, Validator.INTEGER);
clusterDynamicSettings.addDynamicSetting(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES, Validator.INTEGER);
clusterDynamicSettings.addDynamicSetting(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK);
clusterDynamicSettings.addDynamicSetting(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK);
clusterDynamicSettings.addDynamicSetting(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED);
clusterDynamicSettings.addDynamicSetting(InternalClusterInfoService.INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL, Validator.TIME);
clusterDynamicSettings.addDynamicSetting(SnapshotInProgressAllocationDecider.CLUSTER_ROUTING_ALLOCATION_SNAPSHOT_RELOCATION_ENABLED);
clusterDynamicSettings.addDynamicSetting(InternalCircuitBreakerService.CIRCUIT_BREAKER_MAX_BYTES_SETTING, Validator.MEMORY_SIZE);
clusterDynamicSettings.addDynamicSetting(InternalCircuitBreakerService.CIRCUIT_BREAKER_OVERHEAD_SETTING, Validator.NON_NEGATIVE_DOUBLE);
clusterDynamicSettings.addDynamicSetting(DestructiveOperations.REQUIRES_NAME);
}
public void addDynamicSettings(String... settings) {
clusterDynamicSettings.addDynamicSettings(settings);
}
public void addDynamicSetting(String setting, Validator validator) {
clusterDynamicSettings.addDynamicSetting(setting, validator);
}
@Override
protected void configure() {
bind(DynamicSettings.class).annotatedWith(ClusterDynamicSettings.class).toInstance(clusterDynamicSettings);
}
} | 1no label
| src_main_java_org_elasticsearch_cluster_settings_ClusterDynamicSettingsModule.java |
53 | public abstract class OAbstractLock implements OLock {
@Override
public <V> V callInLock(final Callable<V> iCallback) throws Exception {
lock();
try {
return iCallback.call();
} finally {
unlock();
}
}
} | 0true
| commons_src_main_java_com_orientechnologies_common_concur_lock_OAbstractLock.java |
515 | public class SystemTimeTest extends TestCase {
private TimeSource mockTimeSource;
protected void setUp() throws Exception {
super.setUp();
mockTimeSource = createMock(TimeSource.class);
}
protected void tearDown() throws Exception {
SystemTime.reset();
super.tearDown();
}
/**
* Test method for {@link SystemTime#setGlobalTimeSource(TimeSource)}.
*/
public void testSetGlobalTimeSource() {
expect(mockTimeSource.timeInMillis()).andReturn(100L).atLeastOnce();
replay(mockTimeSource);
SystemTime.setGlobalTimeSource(mockTimeSource);
assertEquals(100L, SystemTime.asMillis());
verify();
}
/**
* Test method for {@link SystemTime#resetGlobalTimeSource()}.
*/
public void testResetGlobalTimeSource() {
expect(mockTimeSource.timeInMillis()).andReturn(200L).anyTimes();
replay(mockTimeSource);
SystemTime.setGlobalTimeSource(mockTimeSource);
SystemTime.resetGlobalTimeSource();
assertTrue(200L != SystemTime.asMillis());
verify();
}
/**
* Test method for {@link SystemTime#setLocalTimeSource(TimeSource)}.
*/
public void testSetLocalTimeSource() {
expect(mockTimeSource.timeInMillis()).andReturn(300L).atLeastOnce();
replay(mockTimeSource);
SystemTime.setLocalTimeSource(mockTimeSource);
assertEquals(300L, SystemTime.asMillis());
verify();
}
/**
* Test method for {@link SystemTime#resetLocalTimeSource()}.
*/
public void testResetLocalTimeSource() {
expect(mockTimeSource.timeInMillis()).andReturn(400L).anyTimes();
replay(mockTimeSource);
SystemTime.setLocalTimeSource(mockTimeSource);
SystemTime.resetLocalTimeSource();
assertTrue(400L != SystemTime.asMillis());
verify();
}
/**
* Test method for {@link SystemTime#resetLocalTimeSource()}.
*/
public void testLocalOverridesGlobal() {
TimeSource mockLocalTimeSource = createMock(TimeSource.class);
expect(mockTimeSource.timeInMillis()).andReturn(500L).anyTimes();
expect(mockLocalTimeSource.timeInMillis()).andReturn(600L).atLeastOnce();
replay(mockTimeSource, mockLocalTimeSource);
SystemTime.setGlobalTimeSource(mockTimeSource);
SystemTime.setLocalTimeSource(mockLocalTimeSource);
assertEquals(600L, SystemTime.asMillis());
SystemTime.resetLocalTimeSource();
assertEquals(500L, SystemTime.asMillis());
verify();
}
/**
* Test method for {@link SystemTime#reset()}.
*/
public void testReset() {
TimeSource mockLocalTimeSource = createMock(TimeSource.class);
expect(mockTimeSource.timeInMillis()).andReturn(700L).anyTimes();
expect(mockLocalTimeSource.timeInMillis()).andReturn(800L).anyTimes();
replay(mockTimeSource, mockLocalTimeSource);
SystemTime.setGlobalTimeSource(mockTimeSource);
SystemTime.setLocalTimeSource(mockLocalTimeSource);
SystemTime.reset();
assertTrue(SystemTime.asMillis() > 800L);
verify();
}
/**
* Test method for {@link SystemTime#asMillis()}.
*/
public void testAsMillis() {
expect(mockTimeSource.timeInMillis()).andReturn(1000L).atLeastOnce();
replay(mockTimeSource);
SystemTime.setGlobalTimeSource(mockTimeSource);
assertEquals(1000L, SystemTime.asMillis());
verify();
}
/**
* Test method for {@link SystemTime#asDate()}.
*/
public void testAsDate() {
expect(mockTimeSource.timeInMillis()).andReturn(1100L).atLeastOnce();
replay(mockTimeSource);
SystemTime.setGlobalTimeSource(mockTimeSource);
assertEquals(1100L, SystemTime.asDate().getTime());
verify();
}
/**
* Test method for {@link SystemTime#asCalendar()}.
*/
public void testAsCalendar() {
expect(mockTimeSource.timeInMillis()).andReturn(1200L).atLeastOnce();
replay(mockTimeSource);
SystemTime.setGlobalTimeSource(mockTimeSource);
assertEquals(1200L, SystemTime.asCalendar().getTimeInMillis());
verify();
}
/**
* Test method for {@link SystemTime#asMillis(boolean)}.
*/
public void testAsMillisBoolean() {
Calendar cal = new GregorianCalendar(2010, 1, 2, 3, 4, 5);
long timeInMillis = cal.getTimeInMillis() + 3; // Add a few milliseconds for good measure
expect(mockTimeSource.timeInMillis()).andReturn(timeInMillis).atLeastOnce();
replay(mockTimeSource);
SystemTime.setGlobalTimeSource(mockTimeSource);
Calendar calMidnight = new GregorianCalendar(2010, 1, 2, 0, 0, 0);
calMidnight.set(Calendar.MILLISECOND, 0);
assertEquals(calMidnight.getTimeInMillis(), SystemTime.asMillis(false));
assertEquals(timeInMillis, SystemTime.asMillis(true));
verify();
}
/**
* Test method for {@link SystemTime#asCalendar(boolean)}.
*/
public void testAsCalendarBoolean() {
Calendar cal = new GregorianCalendar(2010, 1, 2, 3, 4, 5);
cal.set(Calendar.MILLISECOND, 3); // Add a few milliseconds for good measure
expect(mockTimeSource.timeInMillis()).andReturn(cal.getTimeInMillis()).atLeastOnce();
replay(mockTimeSource);
SystemTime.setGlobalTimeSource(mockTimeSource);
Calendar calMidnight = new GregorianCalendar(2010, 1, 2, 0, 0, 0);
calMidnight.set(Calendar.MILLISECOND, 0);
assertEquals(calMidnight, SystemTime.asCalendar(false));
assertEquals(cal, SystemTime.asCalendar(true));
verify();
}
/**
* Test method for {@link SystemTime#asDate(boolean)}.
*/
public void testAsDateBoolean() {
Calendar cal = new GregorianCalendar(2010, 1, 2, 3, 4, 5);
cal.set(Calendar.MILLISECOND, 3); // Add a few milliseconds for good measure
expect(mockTimeSource.timeInMillis()).andReturn(cal.getTimeInMillis()).atLeastOnce();
replay(mockTimeSource);
SystemTime.setGlobalTimeSource(mockTimeSource);
Calendar calMidnight = new GregorianCalendar(2010, 1, 2, 0, 0, 0);
calMidnight.set(Calendar.MILLISECOND, 0);
assertEquals(calMidnight.getTimeInMillis(), SystemTime.asDate(false).getTime());
assertEquals(cal.getTimeInMillis(), SystemTime.asDate(true).getTime());
verify();
}
} | 0true
| common_src_test_java_org_broadleafcommerce_common_time_SystemTimeTest.java |
326 | public interface OStoragePhysicalClusterConfiguration extends OStorageClusterConfiguration {
public OStorageFileConfiguration[] getInfoFiles();
public String getMaxSize();
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_config_OStoragePhysicalClusterConfiguration.java |
198 | public class OJNADirectMemory implements ODirectMemory {
private static final CLibrary C_LIBRARY = OCLibraryFactory.INSTANCE.library();
public static final OJNADirectMemory INSTANCE = new OJNADirectMemory();
@Override
public long allocate(byte[] bytes) {
final long pointer = allocate(bytes.length);
set(pointer, bytes, 0, bytes.length);
return pointer;
}
@Override
public long allocate(long size) {
return Native.malloc(size);
}
@Override
public void free(long pointer) {
Native.free(pointer);
}
@Override
public byte[] get(long pointer, int length) {
return new Pointer(pointer).getByteArray(0, length);
}
@Override
public void get(long pointer, byte[] array, int arrayOffset, int length) {
new Pointer(pointer).read(0, array, arrayOffset, length);
}
@Override
public void set(long pointer, byte[] content, int arrayOffset, int length) {
new Pointer(pointer).write(0, content, arrayOffset, length);
}
@Override
public int getInt(long pointer) {
return new Pointer(pointer).getInt(0);
}
@Override
public void setInt(long pointer, int value) {
new Pointer(pointer).setInt(0, value);
}
@Override
public void setShort(long pointer, short value) {
new Pointer(pointer).setShort(0, value);
}
@Override
public short getShort(long pointer) {
return new Pointer(pointer).getShort(0);
}
@Override
public long getLong(long pointer) {
return new Pointer(pointer).getLong(0);
}
@Override
public void setLong(long pointer, long value) {
new Pointer(pointer).setLong(0, value);
}
@Override
public byte getByte(long pointer) {
return new Pointer(pointer).getByte(0);
}
@Override
public void setByte(long pointer, byte value) {
new Pointer(pointer).setByte(0, value);
}
@Override
public void setChar(long pointer, char value) {
final short short_char = (short) value;
new Pointer(pointer).setShort(0, short_char);
}
@Override
public char getChar(long pointer) {
final short short_char = new Pointer(pointer).getShort(0);
return (char) short_char;
}
@Override
public void moveData(long srcPointer, long destPointer, long len) {
C_LIBRARY.memoryMove(srcPointer, destPointer, len);
}
} | 0true
| nativeos_src_main_java_com_orientechnologies_nio_OJNADirectMemory.java |
78 | @SuppressWarnings("serial")
static final class MapReduceMappingsToDoubleTask<K,V>
extends BulkTask<K,V,Double> {
final ObjectByObjectToDouble<? super K, ? super V> transformer;
final DoubleByDoubleToDouble reducer;
final double basis;
double result;
MapReduceMappingsToDoubleTask<K,V> rights, nextRight;
MapReduceMappingsToDoubleTask
(BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
MapReduceMappingsToDoubleTask<K,V> nextRight,
ObjectByObjectToDouble<? super K, ? super V> transformer,
double basis,
DoubleByDoubleToDouble reducer) {
super(p, b, i, f, t); this.nextRight = nextRight;
this.transformer = transformer;
this.basis = basis; this.reducer = reducer;
}
public final Double getRawResult() { return result; }
public final void compute() {
final ObjectByObjectToDouble<? super K, ? super V> transformer;
final DoubleByDoubleToDouble reducer;
if ((transformer = this.transformer) != null &&
(reducer = this.reducer) != null) {
double r = this.basis;
for (int i = baseIndex, f, h; batch > 0 &&
(h = ((f = baseLimit) + i) >>> 1) > i;) {
addToPendingCount(1);
(rights = new MapReduceMappingsToDoubleTask<K,V>
(this, batch >>>= 1, baseLimit = h, f, tab,
rights, transformer, r, reducer)).fork();
}
for (Node<K,V> p; (p = advance()) != null; )
r = reducer.apply(r, transformer.apply(p.key, p.val));
result = r;
CountedCompleter<?> c;
for (c = firstComplete(); c != null; c = c.nextComplete()) {
@SuppressWarnings("unchecked") MapReduceMappingsToDoubleTask<K,V>
t = (MapReduceMappingsToDoubleTask<K,V>)c,
s = t.rights;
while (s != null) {
t.result = reducer.apply(t.result, s.result);
s = t.rights = s.nextRight;
}
}
}
}
} | 0true
| src_main_java_jsr166e_ConcurrentHashMapV8.java |
454 | executor.execute(new Runnable() {
@Override
public void run() {
for (int i = 0; i < operations; i++) {
map1.put("foo-" + i, "bar");
}
}
}, 60, EntryEventType.ADDED, operations, 0.75, map1, map2); | 0true
| hazelcast-client_src_test_java_com_hazelcast_client_replicatedmap_ClientReplicatedMapTest.java |
76 | public class OSharedResourceExternal extends OSharedResourceAbstract implements OSharedResource {
@Override
public void acquireExclusiveLock() {
super.acquireExclusiveLock();
}
@Override
public void acquireSharedLock() {
super.acquireSharedLock();
}
@Override
public void releaseExclusiveLock() {
super.releaseExclusiveLock();
}
@Override
public void releaseSharedLock() {
super.releaseSharedLock();
}
} | 0true
| commons_src_main_java_com_orientechnologies_common_concur_resource_OSharedResourceExternal.java |
463 | new Thread(){
public void run() {
try {
semaphore.acquire();
latch.countDown();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}.start(); | 0true
| hazelcast-client_src_test_java_com_hazelcast_client_semaphore_ClientSemaphoreTest.java |
3,673 | public class ParentFieldMapper extends AbstractFieldMapper<Uid> implements InternalMapper, RootMapper {
public static final String NAME = "_parent";
public static final String CONTENT_TYPE = "_parent";
public static class Defaults extends AbstractFieldMapper.Defaults {
public static final String NAME = ParentFieldMapper.NAME;
public static final FieldType FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE);
static {
FIELD_TYPE.setIndexed(true);
FIELD_TYPE.setTokenized(false);
FIELD_TYPE.setStored(true);
FIELD_TYPE.setOmitNorms(true);
FIELD_TYPE.setIndexOptions(IndexOptions.DOCS_ONLY);
FIELD_TYPE.freeze();
}
}
public static class Builder extends Mapper.Builder<Builder, ParentFieldMapper> {
protected String indexName;
private String type;
protected PostingsFormatProvider postingsFormat;
public Builder() {
super(Defaults.NAME);
this.indexName = name;
}
public Builder type(String type) {
this.type = type;
return builder;
}
protected Builder postingsFormat(PostingsFormatProvider postingsFormat) {
this.postingsFormat = postingsFormat;
return builder;
}
@Override
public ParentFieldMapper build(BuilderContext context) {
if (type == null) {
throw new MapperParsingException("Parent mapping must contain the parent type");
}
return new ParentFieldMapper(name, indexName, type, postingsFormat, null, context.indexSettings());
}
}
public static class TypeParser implements Mapper.TypeParser {
@Override
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
ParentFieldMapper.Builder builder = parent();
for (Map.Entry<String, Object> entry : node.entrySet()) {
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals("type")) {
builder.type(fieldNode.toString());
} else if (fieldName.equals("postings_format")) {
String postingFormatName = fieldNode.toString();
builder.postingsFormat(parserContext.postingFormatService().get(postingFormatName));
}
}
return builder;
}
}
private final String type;
private final BytesRef typeAsBytes;
protected ParentFieldMapper(String name, String indexName, String type, PostingsFormatProvider postingsFormat, @Nullable Settings fieldDataSettings, Settings indexSettings) {
super(new Names(name, indexName, indexName, name), Defaults.BOOST, new FieldType(Defaults.FIELD_TYPE), null,
Lucene.KEYWORD_ANALYZER, Lucene.KEYWORD_ANALYZER, postingsFormat, null, null, null, fieldDataSettings, indexSettings);
this.type = type;
this.typeAsBytes = type == null ? null : new BytesRef(type);
}
public ParentFieldMapper() {
this(Defaults.NAME, Defaults.NAME, null, null, null, null);
}
public String type() {
return type;
}
@Override
public FieldType defaultFieldType() {
return Defaults.FIELD_TYPE;
}
@Override
public FieldDataType defaultFieldDataType() {
return new FieldDataType("string");
}
@Override
public boolean hasDocValues() {
return false;
}
@Override
public void preParse(ParseContext context) throws IOException {
}
@Override
public void postParse(ParseContext context) throws IOException {
parse(context);
}
@Override
public void validate(ParseContext context) throws MapperParsingException {
}
@Override
public boolean includeInObject() {
return true;
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
if (!active()) {
return;
}
if (context.parser().currentName() != null && context.parser().currentName().equals(Defaults.NAME)) {
// we are in the parsing of _parent phase
String parentId = context.parser().text();
context.sourceToParse().parent(parentId);
fields.add(new Field(names.indexName(), Uid.createUid(context.stringBuilder(), type, parentId), fieldType));
} else {
// otherwise, we are running it post processing of the xcontent
String parsedParentId = context.doc().get(Defaults.NAME);
if (context.sourceToParse().parent() != null) {
String parentId = context.sourceToParse().parent();
if (parsedParentId == null) {
if (parentId == null) {
throw new MapperParsingException("No parent id provided, not within the document, and not externally");
}
// we did not add it in the parsing phase, add it now
fields.add(new Field(names.indexName(), Uid.createUid(context.stringBuilder(), type, parentId), fieldType));
} else if (parentId != null && !parsedParentId.equals(Uid.createUid(context.stringBuilder(), type, parentId))) {
throw new MapperParsingException("Parent id mismatch, document value is [" + Uid.createUid(parsedParentId).id() + "], while external value is [" + parentId + "]");
}
}
}
// we have parent mapping, yet no value was set, ignore it...
}
@Override
public Uid value(Object value) {
if (value == null) {
return null;
}
return Uid.createUid(value.toString());
}
@Override
public Object valueForSearch(Object value) {
if (value == null) {
return null;
}
String sValue = value.toString();
if (sValue == null) {
return null;
}
int index = sValue.indexOf(Uid.DELIMITER);
if (index == -1) {
return sValue;
}
return sValue.substring(index + 1);
}
@Override
public BytesRef indexedValueForSearch(Object value) {
if (value instanceof BytesRef) {
BytesRef bytesRef = (BytesRef) value;
if (Uid.hasDelimiter(bytesRef)) {
return bytesRef;
}
return Uid.createUidAsBytes(typeAsBytes, bytesRef);
}
String sValue = value.toString();
if (sValue.indexOf(Uid.DELIMITER) == -1) {
return Uid.createUidAsBytes(type, sValue);
}
return super.indexedValueForSearch(value);
}
@Override
public Query termQuery(Object value, @Nullable QueryParseContext context) {
if (context == null) {
return super.termQuery(value, context);
}
return new ConstantScoreQuery(termFilter(value, context));
}
@Override
public Filter termFilter(Object value, @Nullable QueryParseContext context) {
if (context == null) {
return super.termFilter(value, context);
}
BytesRef bValue = BytesRefs.toBytesRef(value);
if (Uid.hasDelimiter(bValue)) {
return new TermFilter(new Term(names.indexName(), bValue));
}
List<String> types = new ArrayList<String>(context.mapperService().types().size());
for (DocumentMapper documentMapper : context.mapperService()) {
if (!documentMapper.parentFieldMapper().active()) {
types.add(documentMapper.type());
}
}
if (types.isEmpty()) {
return Queries.MATCH_NO_FILTER;
} else if (types.size() == 1) {
return new TermFilter(new Term(names.indexName(), Uid.createUidAsBytes(types.get(0), bValue)));
} else {
// we use all non child types, cause we don't know if its exact or not...
List<BytesRef> typesValues = new ArrayList<BytesRef>(types.size());
for (String type : context.mapperService().types()) {
typesValues.add(Uid.createUidAsBytes(type, bValue));
}
return new TermsFilter(names.indexName(), typesValues);
}
}
@Override
public Filter termsFilter(List values, @Nullable QueryParseContext context) {
if (context == null) {
return super.termsFilter(values, context);
}
// This will not be invoked if values is empty, so don't check for empty
if (values.size() == 1) {
return termFilter(values.get(0), context);
}
List<String> types = new ArrayList<String>(context.mapperService().types().size());
for (DocumentMapper documentMapper : context.mapperService()) {
if (!documentMapper.parentFieldMapper().active()) {
types.add(documentMapper.type());
}
}
List<BytesRef> bValues = new ArrayList<BytesRef>(values.size());
for (Object value : values) {
BytesRef bValue = BytesRefs.toBytesRef(value);
if (Uid.hasDelimiter(bValue)) {
bValues.add(bValue);
} else {
// we use all non child types, cause we don't know if its exact or not...
for (String type : types) {
bValues.add(Uid.createUidAsBytes(type, bValue));
}
}
}
return new TermsFilter(names.indexName(), bValues);
}
/**
* We don't need to analyzer the text, and we need to convert it to UID...
*/
@Override
public boolean useTermQueryWithQueryString() {
return true;
}
@Override
protected String contentType() {
return CONTENT_TYPE;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (!active()) {
return builder;
}
builder.startObject(CONTENT_TYPE);
builder.field("type", type);
builder.endObject();
return builder;
}
@Override
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
ParentFieldMapper other = (ParentFieldMapper) mergeWith;
if (active() == other.active()) {
return;
}
if (active() != other.active() || !type.equals(other.type)) {
mergeContext.addConflict("The _parent field can't be added or updated");
}
}
/**
* @return Whether the _parent field is actually used.
*/
public boolean active() {
return type != null;
}
} | 1no label
| src_main_java_org_elasticsearch_index_mapper_internal_ParentFieldMapper.java |
733 | public class CollectionCompareAndRemoveRequest extends CollectionRequest {
private Set<Data> valueSet;
private boolean retain;
public CollectionCompareAndRemoveRequest() {
}
public CollectionCompareAndRemoveRequest(String name, Set<Data> valueSet, boolean retain) {
super(name);
this.valueSet = valueSet;
this.retain = retain;
}
@Override
protected Operation prepareOperation() {
return new CollectionCompareAndRemoveOperation(name, retain, valueSet);
}
@Override
public int getClassId() {
return CollectionPortableHook.COLLECTION_COMPARE_AND_REMOVE;
}
public void write(PortableWriter writer) throws IOException {
super.write(writer);
writer.writeBoolean("r", retain);
final ObjectDataOutput out = writer.getRawDataOutput();
out.writeInt(valueSet.size());
for (Data value : valueSet) {
value.writeData(out);
}
}
public void read(PortableReader reader) throws IOException {
super.read(reader);
retain = reader.readBoolean("r");
final ObjectDataInput in = reader.getRawDataInput();
final int size = in.readInt();
valueSet = new HashSet<Data>(size);
for (int i = 0; i < size; i++) {
final Data value = new Data();
value.readData(in);
valueSet.add(value);
}
}
@Override
public String getRequiredAction() {
return ActionConstants.ACTION_REMOVE;
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_collection_client_CollectionCompareAndRemoveRequest.java |
872 | public class TransportSearchQueryAndFetchAction extends TransportSearchTypeAction {
@Inject
public TransportSearchQueryAndFetchAction(Settings settings, ThreadPool threadPool, ClusterService clusterService,
SearchServiceTransportAction searchService, SearchPhaseController searchPhaseController) {
super(settings, threadPool, clusterService, searchService, searchPhaseController);
}
@Override
protected void doExecute(SearchRequest searchRequest, ActionListener<SearchResponse> listener) {
new AsyncAction(searchRequest, listener).start();
}
private class AsyncAction extends BaseAsyncAction<QueryFetchSearchResult> {
private AsyncAction(SearchRequest request, ActionListener<SearchResponse> listener) {
super(request, listener);
}
@Override
protected String firstPhaseName() {
return "query_fetch";
}
@Override
protected void sendExecuteFirstPhase(DiscoveryNode node, ShardSearchRequest request, SearchServiceListener<QueryFetchSearchResult> listener) {
searchService.sendExecuteFetch(node, request, listener);
}
@Override
protected void moveToSecondPhase() throws Exception {
try {
innerFinishHim();
} catch (Throwable e) {
ReduceSearchPhaseException failure = new ReduceSearchPhaseException("merge", "", e, buildShardFailures());
if (logger.isDebugEnabled()) {
logger.debug("failed to reduce search", failure);
}
listener.onFailure(failure);
}
}
private void innerFinishHim() throws IOException {
sortedShardList = searchPhaseController.sortDocs(firstResults);
final InternalSearchResponse internalResponse = searchPhaseController.merge(sortedShardList, firstResults, firstResults);
String scrollId = null;
if (request.scroll() != null) {
scrollId = buildScrollId(request.searchType(), firstResults, null);
}
listener.onResponse(new SearchResponse(internalResponse, scrollId, expectedSuccessfulOps, successulOps.get(), buildTookInMillis(), buildShardFailures()));
}
}
} | 0true
| src_main_java_org_elasticsearch_action_search_type_TransportSearchQueryAndFetchAction.java |
780 | searchAction.execute(searchRequest, new ActionListener<SearchResponse>() {
@Override
public void onResponse(SearchResponse response) {
listener.onResponse(response);
}
@Override
public void onFailure(Throwable e) {
listener.onFailure(e);
}
}); | 0true
| src_main_java_org_elasticsearch_action_mlt_TransportMoreLikeThisAction.java |
760 | public class ListRemoveOperation extends CollectionBackupAwareOperation {
private int index;
private long itemId;
public ListRemoveOperation() {
}
public ListRemoveOperation(String name, int index) {
super(name);
this.index = index;
}
@Override
public boolean shouldBackup() {
return true;
}
@Override
public Operation getBackupOperation() {
return new CollectionRemoveBackupOperation(name, itemId);
}
@Override
public int getId() {
return CollectionDataSerializerHook.LIST_REMOVE;
}
@Override
public void beforeRun() throws Exception {
publishEvent(ItemEventType.ADDED, (Data) response);
}
@Override
public void run() throws Exception {
final CollectionItem item = getOrCreateListContainer().remove(index);
itemId = item.getItemId();
response = item.getValue();
}
@Override
public void afterRun() throws Exception {
}
@Override
protected void writeInternal(ObjectDataOutput out) throws IOException {
super.writeInternal(out);
out.writeInt(index);
}
@Override
protected void readInternal(ObjectDataInput in) throws IOException {
super.readInternal(in);
index = in.readInt();
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_collection_list_ListRemoveOperation.java |
310 | public enum ResourceType {
FILESYSTEM,CLASSPATH
} | 0true
| common_src_main_java_org_broadleafcommerce_common_extensibility_context_MergeFileSystemAndClassPathXMLApplicationContext.java |
99 | @SuppressWarnings("restriction")
public class OUnsafeMemory implements ODirectMemory {
public static final OUnsafeMemory INSTANCE;
protected static final Unsafe unsafe;
private static final boolean unaligned;
private static final long UNSAFE_COPY_THRESHOLD = 1024L * 1024L;
static {
OUnsafeMemory futureInstance;
unsafe = (Unsafe) AccessController.doPrivileged(new PrivilegedAction<Object>() {
public Object run() {
try {
Field f = Unsafe.class.getDeclaredField("theUnsafe");
f.setAccessible(true);
return f.get(null);
} catch (NoSuchFieldException e) {
throw new Error();
} catch (IllegalAccessException e) {
throw new Error();
}
}
});
try {
unsafe.getClass().getDeclaredMethod("copyMemory", Object.class, long.class, Object.class, long.class, long.class);
Class<?> unsafeMemoryJava7 = OUnsafeMemory.class.getClassLoader().loadClass(
"com.orientechnologies.common.directmemory.OUnsafeMemoryJava7");
futureInstance = (OUnsafeMemory) unsafeMemoryJava7.newInstance();
} catch (Exception e) {
futureInstance = new OUnsafeMemory();
}
INSTANCE = futureInstance;
String arch = System.getProperty("os.arch");
unaligned = arch.equals("i386") || arch.equals("x86") || arch.equals("amd64") || arch.equals("x86_64");
}
@Override
public long allocate(byte[] bytes) {
final long pointer = unsafe.allocateMemory(bytes.length);
set(pointer, bytes, 0, bytes.length);
return pointer;
}
@Override
public long allocate(long size) {
return unsafe.allocateMemory(size);
}
@Override
public void free(long pointer) {
unsafe.freeMemory(pointer);
}
@Override
public byte[] get(long pointer, final int length) {
final byte[] result = new byte[length];
for (int i = 0; i < length; i++)
result[i] = unsafe.getByte(pointer++);
return result;
}
@Override
public void get(long pointer, byte[] array, int arrayOffset, int length) {
pointer += arrayOffset;
for (int i = arrayOffset; i < length + arrayOffset; i++)
array[i] = unsafe.getByte(pointer++);
}
@Override
public void set(long pointer, byte[] content, int arrayOffset, int length) {
for (int i = arrayOffset; i < length + arrayOffset; i++)
unsafe.putByte(pointer++, content[i]);
}
@Override
public int getInt(long pointer) {
if (unaligned)
return unsafe.getInt(pointer);
return (0xFF & unsafe.getByte(pointer++)) << 24 | (0xFF & unsafe.getByte(pointer++)) << 16
| (0xFF & unsafe.getByte(pointer++)) << 8 | (0xFF & unsafe.getByte(pointer));
}
@Override
public void setInt(long pointer, int value) {
if (unaligned)
unsafe.putInt(pointer, value);
else {
unsafe.putByte(pointer++, (byte) (value >>> 24));
unsafe.putByte(pointer++, (byte) (value >>> 16));
unsafe.putByte(pointer++, (byte) (value >>> 8));
unsafe.putByte(pointer, (byte) (value));
}
}
@Override
public void setShort(long pointer, short value) {
if (unaligned)
unsafe.putShort(pointer, value);
else {
unsafe.putByte(pointer++, (byte) (value >>> 8));
unsafe.putByte(pointer, (byte) value);
}
}
@Override
public short getShort(long pointer) {
if (unaligned)
return unsafe.getShort(pointer);
return (short) (unsafe.getByte(pointer++) << 8 | (unsafe.getByte(pointer) & 0xff));
}
@Override
public void setChar(long pointer, char value) {
if (unaligned)
unsafe.putChar(pointer, value);
else {
unsafe.putByte(pointer++, (byte) (value >>> 8));
unsafe.putByte(pointer, (byte) (value));
}
}
@Override
public char getChar(long pointer) {
if (unaligned)
return unsafe.getChar(pointer);
return (char) ((unsafe.getByte(pointer++) << 8) | (unsafe.getByte(pointer) & 0xff));
}
@Override
public long getLong(long pointer) {
if (unaligned)
return unsafe.getLong(pointer);
return (0xFFL & unsafe.getByte(pointer++)) << 56 | (0xFFL & unsafe.getByte(pointer++)) << 48
| (0xFFL & unsafe.getByte(pointer++)) << 40 | (0xFFL & unsafe.getByte(pointer++)) << 32
| (0xFFL & unsafe.getByte(pointer++)) << 24 | (0xFFL & unsafe.getByte(pointer++)) << 16
| (0xFFL & unsafe.getByte(pointer++)) << 8 | (0xFFL & unsafe.getByte(pointer));
}
@Override
public void setLong(long pointer, long value) {
if (unaligned)
unsafe.putLong(pointer, value);
else {
unsafe.putByte(pointer++, (byte) (value >>> 56));
unsafe.putByte(pointer++, (byte) (value >>> 48));
unsafe.putByte(pointer++, (byte) (value >>> 40));
unsafe.putByte(pointer++, (byte) (value >>> 32));
unsafe.putByte(pointer++, (byte) (value >>> 24));
unsafe.putByte(pointer++, (byte) (value >>> 16));
unsafe.putByte(pointer++, (byte) (value >>> 8));
unsafe.putByte(pointer, (byte) (value));
}
}
@Override
public byte getByte(long pointer) {
return unsafe.getByte(pointer);
}
@Override
public void setByte(long pointer, byte value) {
unsafe.putByte(pointer, value);
}
@Override
public void moveData(long srcPointer, long destPointer, long len) {
while (len > 0) {
long size = (len > UNSAFE_COPY_THRESHOLD) ? UNSAFE_COPY_THRESHOLD : len;
unsafe.copyMemory(srcPointer, destPointer, size);
len -= size;
srcPointer += size;
destPointer += size;
}
}
} | 0true
| commons_src_main_java_com_orientechnologies_common_directmemory_OUnsafeMemory.java |
388 | new Thread(){
public void run() {
try {
if(mm.tryLock(key, 10, TimeUnit.SECONDS)){
tryLockReturnsTrue.countDown();
}
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}.start(); | 0true
| hazelcast-client_src_test_java_com_hazelcast_client_multimap_ClientMultiMapLockTest.java |
4,836 | public class RestGetFieldMappingAction extends BaseRestHandler {
@Inject
public RestGetFieldMappingAction(Settings settings, Client client, RestController controller) {
super(settings, client);
controller.registerHandler(GET, "/_mapping/field/{fields}", this);
controller.registerHandler(GET, "/_mapping/{type}/field/{fields}", this);
controller.registerHandler(GET, "/{index}/_mapping/field/{fields}", this);
controller.registerHandler(GET, "/{index}/{type}/_mapping/field/{fields}", this);
controller.registerHandler(GET, "/{index}/_mapping/{type}/field/{fields}", this);
}
@Override
public void handleRequest(final RestRequest request, final RestChannel channel) {
final String[] indices = Strings.splitStringByCommaToArray(request.param("index"));
final String[] types = request.paramAsStringArrayOrEmptyIfAll("type");
final String[] fields = Strings.splitStringByCommaToArray(request.param("fields"));
GetFieldMappingsRequest getMappingsRequest = new GetFieldMappingsRequest();
getMappingsRequest.indices(indices).types(types).fields(fields).includeDefaults(request.paramAsBoolean("include_defaults", false));
getMappingsRequest.indicesOptions(IndicesOptions.fromRequest(request, getMappingsRequest.indicesOptions()));
getMappingsRequest.local(request.paramAsBoolean("local", getMappingsRequest.local()));
client.admin().indices().getFieldMappings(getMappingsRequest, new ActionListener<GetFieldMappingsResponse>() {
@SuppressWarnings("unchecked")
@Override
public void onResponse(GetFieldMappingsResponse response) {
try {
ImmutableMap<String, ImmutableMap<String, ImmutableMap<String, FieldMappingMetaData>>> mappingsByIndex = response.mappings();
boolean isPossibleSingleFieldRequest = indices.length == 1 && types.length == 1 && fields.length == 1;
if (isPossibleSingleFieldRequest && isFieldMappingMissingField(mappingsByIndex)) {
channel.sendResponse(new XContentRestResponse(request, OK, emptyBuilder(request)));
return;
}
RestStatus status = OK;
if (mappingsByIndex.isEmpty() && fields.length > 0) {
status = NOT_FOUND;
}
XContentBuilder builder = RestXContentBuilder.restContentBuilder(request);
builder.startObject();
response.toXContent(builder, ToXContent.EMPTY_PARAMS);
builder.endObject();
channel.sendResponse(new XContentRestResponse(request, status, builder));
} catch (Throwable e) {
onFailure(e);
}
}
@Override
public void onFailure(Throwable e) {
try {
channel.sendResponse(new XContentThrowableRestResponse(request, e));
} catch (IOException e1) {
logger.error("Failed to send failure response", e1);
}
}
});
}
/**
*
* Helper method to find out if the only included fieldmapping metadata is typed NULL, which means
* that type and index exist, but the field did not
*/
private boolean isFieldMappingMissingField(ImmutableMap<String, ImmutableMap<String, ImmutableMap<String, FieldMappingMetaData>>> mappingsByIndex) throws IOException {
if (mappingsByIndex.size() != 1) {
return false;
}
for (ImmutableMap<String, ImmutableMap<String, FieldMappingMetaData>> value : mappingsByIndex.values()) {
for (ImmutableMap<String, FieldMappingMetaData> fieldValue : value.values()) {
for (Map.Entry<String, FieldMappingMetaData> fieldMappingMetaDataEntry : fieldValue.entrySet()) {
if (fieldMappingMetaDataEntry.getValue().isNull()) {
return true;
}
}
}
}
return false;
}
} | 1no label
| src_main_java_org_elasticsearch_rest_action_admin_indices_mapping_get_RestGetFieldMappingAction.java |
3,426 | nodeEngine.getExecutionService().execute(ExecutionService.SYSTEM_EXECUTOR, new Runnable() {
public void run() {
try {
((InitializingObject) object).initialize();
} catch (Exception e) {
getLogger().warning("Error while initializing proxy: " + object, e);
}
}
}); | 1no label
| hazelcast_src_main_java_com_hazelcast_spi_impl_ProxyServiceImpl.java |
512 | public class TransportDeleteIndexAction extends TransportMasterNodeOperationAction<DeleteIndexRequest, DeleteIndexResponse> {
private final MetaDataDeleteIndexService deleteIndexService;
private final DestructiveOperations destructiveOperations;
@Inject
public TransportDeleteIndexAction(Settings settings, TransportService transportService, ClusterService clusterService,
ThreadPool threadPool, MetaDataDeleteIndexService deleteIndexService,
NodeSettingsService nodeSettingsService) {
super(settings, transportService, clusterService, threadPool);
this.deleteIndexService = deleteIndexService;
this.destructiveOperations = new DestructiveOperations(logger, settings, nodeSettingsService);
}
@Override
protected String executor() {
return ThreadPool.Names.SAME;
}
@Override
protected String transportAction() {
return DeleteIndexAction.NAME;
}
@Override
protected DeleteIndexRequest newRequest() {
return new DeleteIndexRequest();
}
@Override
protected DeleteIndexResponse newResponse() {
return new DeleteIndexResponse();
}
@Override
protected void doExecute(DeleteIndexRequest request, ActionListener<DeleteIndexResponse> listener) {
destructiveOperations.failDestructive(request.indices());
super.doExecute(request, listener);
}
@Override
protected ClusterBlockException checkBlock(DeleteIndexRequest request, ClusterState state) {
return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA, request.indices());
}
@Override
protected void masterOperation(final DeleteIndexRequest request, final ClusterState state, final ActionListener<DeleteIndexResponse> listener) throws ElasticsearchException {
request.indices(state.metaData().concreteIndices(request.indices(), request.indicesOptions()));
if (request.indices().length == 0) {
listener.onResponse(new DeleteIndexResponse(true));
return;
}
// TODO: this API should be improved, currently, if one delete index failed, we send a failure, we should send a response array that includes all the indices that were deleted
final CountDown count = new CountDown(request.indices().length);
for (final String index : request.indices()) {
deleteIndexService.deleteIndex(new MetaDataDeleteIndexService.Request(index).timeout(request.timeout()).masterTimeout(request.masterNodeTimeout()), new MetaDataDeleteIndexService.Listener() {
private volatile Throwable lastFailure;
private volatile boolean ack = true;
@Override
public void onResponse(MetaDataDeleteIndexService.Response response) {
if (!response.acknowledged()) {
ack = false;
}
if (count.countDown()) {
if (lastFailure != null) {
listener.onFailure(lastFailure);
} else {
listener.onResponse(new DeleteIndexResponse(ack));
}
}
}
@Override
public void onFailure(Throwable t) {
logger.debug("[{}] failed to delete index", t, index);
lastFailure = t;
if (count.countDown()) {
listener.onFailure(t);
}
}
});
}
}
} | 1no label
| src_main_java_org_elasticsearch_action_admin_indices_delete_TransportDeleteIndexAction.java |
1,653 | public abstract class Names {
public static String randomNodeName(URL nodeNames) {
BufferedReader reader = null;
try {
reader = new BufferedReader(new InputStreamReader(nodeNames.openStream(), Charsets.UTF_8));
int numberOfNames = 0;
while (reader.readLine() != null) {
numberOfNames++;
}
reader.close();
reader = new BufferedReader(new InputStreamReader(nodeNames.openStream(), Charsets.UTF_8));
int number = ((ThreadLocalRandom.current().nextInt(numberOfNames)) % numberOfNames);
for (int i = 0; i < number; i++) {
reader.readLine();
}
return reader.readLine();
} catch (IOException e) {
return null;
} finally {
try {
if (reader != null) {
reader.close();
}
} catch (IOException e) {
// ignore this exception
}
}
}
public static String randomNodeName(InputStream nodeNames) {
if (nodeNames == null) {
return null;
}
try {
BufferedReader reader = new BufferedReader(new InputStreamReader(nodeNames, Charsets.UTF_8));
int numberOfNames = Integer.parseInt(reader.readLine());
int number = ((new Random().nextInt(numberOfNames)) % numberOfNames) - 2; // remove 2 for last line and first line
for (int i = 0; i < number; i++) {
reader.readLine();
}
return reader.readLine();
} catch (Exception e) {
return null;
} finally {
try {
nodeNames.close();
} catch (IOException e) {
// ignore
}
}
}
private Names() {
}
} | 1no label
| src_main_java_org_elasticsearch_common_Names.java |
3,722 | private static final Comparator<ScheduledEntry> SCHEDULED_ENTRIES_COMPARATOR = new Comparator<ScheduledEntry>() {
@Override
public int compare(ScheduledEntry o1, ScheduledEntry o2) {
if (o1.getScheduleStartTimeInNanos() > o2.getScheduleStartTimeInNanos()) {
return 1;
} else if (o1.getScheduleStartTimeInNanos() < o2.getScheduleStartTimeInNanos()) {
return -1;
}
return 0;
}
}; | 1no label
| hazelcast_src_main_java_com_hazelcast_util_scheduler_SecondsBasedEntryTaskScheduler.java |
261 | public interface OCommandContext {
public enum TIMEOUT_STRATEGY {
RETURN, EXCEPTION
}
public Object getVariable(String iName);
public Object getVariable(String iName, Object iDefaultValue);
public OCommandContext setVariable(final String iName, final Object iValue);
public Map<String, Object> getVariables();
public OCommandContext getParent();
public OCommandContext setParent(OCommandContext iParentContext);
public OCommandContext setChild(OCommandContext context);
/**
* Updates a counter. Used to record metrics.
*
* @param iName
* Metric's name
* @param iValue
* delta to add or subtract
* @return
*/
public long updateMetric(String iName, long iValue);
public boolean isRecordingMetrics();
public OCommandContext setRecordingMetrics(boolean recordMetrics);
public void beginExecution(long timeoutMs, TIMEOUT_STRATEGY iStrategy);
public boolean checkTimeout();
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_command_OCommandContext.java |
377 | public class TransportPutRepositoryAction extends TransportMasterNodeOperationAction<PutRepositoryRequest, PutRepositoryResponse> {
private final RepositoriesService repositoriesService;
@Inject
public TransportPutRepositoryAction(Settings settings, TransportService transportService, ClusterService clusterService,
RepositoriesService repositoriesService, ThreadPool threadPool) {
super(settings, transportService, clusterService, threadPool);
this.repositoriesService = repositoriesService;
}
@Override
protected String executor() {
return ThreadPool.Names.SAME;
}
@Override
protected String transportAction() {
return PutRepositoryAction.NAME;
}
@Override
protected PutRepositoryRequest newRequest() {
return new PutRepositoryRequest();
}
@Override
protected PutRepositoryResponse newResponse() {
return new PutRepositoryResponse();
}
@Override
protected ClusterBlockException checkBlock(PutRepositoryRequest request, ClusterState state) {
return state.blocks().indexBlockedException(ClusterBlockLevel.METADATA, "");
}
@Override
protected void masterOperation(final PutRepositoryRequest request, ClusterState state, final ActionListener<PutRepositoryResponse> listener) throws ElasticsearchException {
repositoriesService.registerRepository(new RepositoriesService.RegisterRepositoryRequest("put_repository [" + request.name() + "]", request.name(), request.type())
.settings(request.settings())
.masterNodeTimeout(request.masterNodeTimeout())
.ackTimeout(request.timeout()), new ActionListener<RepositoriesService.RegisterRepositoryResponse>() {
@Override
public void onResponse(RepositoriesService.RegisterRepositoryResponse response) {
listener.onResponse(new PutRepositoryResponse(response.isAcknowledged()));
}
@Override
public void onFailure(Throwable e) {
listener.onFailure(e);
}
});
}
} | 1no label
| src_main_java_org_elasticsearch_action_admin_cluster_repositories_put_TransportPutRepositoryAction.java |
56 | public interface OLock {
public void lock();
public void unlock();
public <V> V callInLock(Callable<V> iCallback) throws Exception;
} | 0true
| commons_src_main_java_com_orientechnologies_common_concur_lock_OLock.java |
1,364 | public class OStorageMemory extends OStorageEmbedded {
private final List<ODataSegmentMemory> dataSegments = new ArrayList<ODataSegmentMemory>();
private final List<OClusterMemory> clusters = new ArrayList<OClusterMemory>();
private final Map<String, OClusterMemory> clusterMap = new HashMap<String, OClusterMemory>();
private int defaultClusterId = 0;
private long positionGenerator = 0;
public OStorageMemory(final String iURL) {
super(iURL, iURL, "rw");
configuration = new OStorageConfiguration(this);
}
public void create(final Map<String, Object> iOptions) {
addUser();
lock.acquireExclusiveLock();
try {
addDataSegment(OStorage.DATA_DEFAULT_NAME);
addDataSegment(OMetadataDefault.DATASEGMENT_INDEX_NAME);
// ADD THE METADATA CLUSTER TO STORE INTERNAL STUFF
addCluster(CLUSTER_TYPE.PHYSICAL.toString(), OMetadataDefault.CLUSTER_INTERNAL_NAME, null, null, true);
// ADD THE INDEX CLUSTER TO STORE, BY DEFAULT, ALL THE RECORDS OF INDEXING IN THE INDEX DATA SEGMENT
addCluster(CLUSTER_TYPE.PHYSICAL.toString(), OMetadataDefault.CLUSTER_INDEX_NAME, null,
OMetadataDefault.DATASEGMENT_INDEX_NAME, true);
// ADD THE INDEX CLUSTER TO STORE, BY DEFAULT, ALL THE RECORDS OF INDEXING
addCluster(CLUSTER_TYPE.PHYSICAL.toString(), OMetadataDefault.CLUSTER_MANUAL_INDEX_NAME, null, null, true);
// ADD THE DEFAULT CLUSTER
defaultClusterId = addCluster(CLUSTER_TYPE.PHYSICAL.toString(), CLUSTER_DEFAULT_NAME, null, null, false);
configuration.create();
status = STATUS.OPEN;
} catch (OStorageException e) {
close();
throw e;
} catch (IOException e) {
close();
throw new OStorageException("Error on creation of storage: " + name, e);
} finally {
lock.releaseExclusiveLock();
}
}
public void open(final String iUserName, final String iUserPassword, final Map<String, Object> iOptions) {
addUser();
if (status == STATUS.OPEN)
// ALREADY OPENED: THIS IS THE CASE WHEN A STORAGE INSTANCE IS
// REUSED
return;
lock.acquireExclusiveLock();
try {
if (!exists())
throw new OStorageException("Cannot open the storage '" + name + "' because it does not exist in path: " + url);
status = STATUS.OPEN;
} finally {
lock.releaseExclusiveLock();
}
}
public void close(final boolean iForce) {
lock.acquireExclusiveLock();
try {
if (!checkForClose(iForce))
return;
status = STATUS.CLOSING;
// CLOSE ALL THE CLUSTERS
for (OClusterMemory c : clusters)
if (c != null)
c.close();
clusters.clear();
clusterMap.clear();
// CLOSE THE DATA SEGMENTS
for (ODataSegmentMemory d : dataSegments)
if (d != null)
d.close();
dataSegments.clear();
level2Cache.shutdown();
super.close(iForce);
Orient.instance().unregisterStorage(this);
status = STATUS.CLOSED;
} finally {
lock.releaseExclusiveLock();
}
}
public void delete() {
close(true);
}
@Override
public void backup(OutputStream out, Map<String, Object> options, Callable<Object> callable) throws IOException {
throw new UnsupportedOperationException("backup");
}
@Override
public void restore(InputStream in, Map<String, Object> options, Callable<Object> callable) throws IOException {
throw new UnsupportedOperationException("restore");
}
public void reload() {
}
public int addCluster(final String iClusterType, String iClusterName, final String iLocation, final String iDataSegmentName,
boolean forceListBased, final Object... iParameters) {
iClusterName = iClusterName.toLowerCase();
lock.acquireExclusiveLock();
try {
int clusterId = clusters.size();
for (int i = 0; i < clusters.size(); ++i) {
if (clusters.get(i) == null) {
clusterId = i;
break;
}
}
final OClusterMemory cluster = (OClusterMemory) Orient.instance().getClusterFactory().createCluster(OClusterMemory.TYPE);
cluster.configure(this, clusterId, iClusterName, iLocation, getDataSegmentIdByName(iDataSegmentName), iParameters);
if (clusterId == clusters.size())
// APPEND IT
clusters.add(cluster);
else
// RECYCLE THE FREE POSITION
clusters.set(clusterId, cluster);
clusterMap.put(iClusterName, cluster);
return clusterId;
} finally {
lock.releaseExclusiveLock();
}
}
public int addCluster(String iClusterType, String iClusterName, int iRequestedId, String iLocation, String iDataSegmentName,
boolean forceListBased, Object... iParameters) {
throw new UnsupportedOperationException("This operation is unsupported for " + getType()
+ " storage. If you are doing import please use parameter -preserveClusterIDs=false .");
}
public boolean dropCluster(final int iClusterId, final boolean iTruncate) {
lock.acquireExclusiveLock();
try {
final OCluster c = clusters.get(iClusterId);
if (c != null) {
if (iTruncate)
c.truncate();
c.delete();
clusters.set(iClusterId, null);
getLevel2Cache().freeCluster(iClusterId);
clusterMap.remove(c.getName());
}
} catch (IOException e) {
} finally {
lock.releaseExclusiveLock();
}
return false;
}
public boolean dropDataSegment(final String iName) {
lock.acquireExclusiveLock();
try {
final int id = getDataSegmentIdByName(iName);
final ODataSegment data = dataSegments.get(id);
if (data == null)
return false;
data.drop();
dataSegments.set(id, null);
// UPDATE CONFIGURATION
configuration.dropCluster(id);
return true;
} catch (Exception e) {
OLogManager.instance().exception("Error while removing data segment '" + iName + '\'', e, OStorageException.class);
} finally {
lock.releaseExclusiveLock();
}
return false;
}
public int addDataSegment(final String iDataSegmentName) {
lock.acquireExclusiveLock();
try {
int pos = -1;
for (int i = 0; i < dataSegments.size(); ++i) {
if (dataSegments.get(i) == null) {
pos = i;
break;
}
}
if (pos == -1)
pos = dataSegments.size();
final ODataSegmentMemory dataSegment = new ODataSegmentMemory(iDataSegmentName, pos);
if (pos == dataSegments.size())
dataSegments.add(dataSegment);
else
dataSegments.set(pos, dataSegment);
return pos;
} finally {
lock.releaseExclusiveLock();
}
}
public int addDataSegment(final String iSegmentName, final String iLocation) {
return addDataSegment(iSegmentName);
}
public OStorageOperationResult<OPhysicalPosition> createRecord(final int iDataSegmentId, final ORecordId iRid,
final byte[] iContent, ORecordVersion iRecordVersion, final byte iRecordType, final int iMode,
ORecordCallback<OClusterPosition> iCallback) {
final long timer = Orient.instance().getProfiler().startChrono();
lock.acquireSharedLock();
try {
final ODataSegmentMemory data = getDataSegmentById(iDataSegmentId);
final long offset = data.createRecord(iContent);
final OCluster cluster = getClusterById(iRid.clusterId);
// ASSIGN THE POSITION IN THE CLUSTER
final OPhysicalPosition ppos = new OPhysicalPosition(iDataSegmentId, offset, iRecordType);
if (cluster.isHashBased()) {
if (iRid.isNew()) {
if (OGlobalConfiguration.USE_NODE_ID_CLUSTER_POSITION.getValueAsBoolean()) {
ppos.clusterPosition = OClusterPositionFactory.INSTANCE.generateUniqueClusterPosition();
} else {
ppos.clusterPosition = OClusterPositionFactory.INSTANCE.valueOf(positionGenerator++);
}
} else {
ppos.clusterPosition = iRid.clusterPosition;
}
}
if (!cluster.addPhysicalPosition(ppos)) {
data.readRecord(ppos.dataSegmentPos);
throw new OStorageException("Record with given id " + iRid + " has already exists.");
}
iRid.clusterPosition = ppos.clusterPosition;
if (iCallback != null)
iCallback.call(iRid, iRid.clusterPosition);
if (iRecordVersion.getCounter() > 0 && iRecordVersion.compareTo(ppos.recordVersion) != 0) {
// OVERWRITE THE VERSION
cluster.updateVersion(iRid.clusterPosition, iRecordVersion);
ppos.recordVersion = iRecordVersion;
}
return new OStorageOperationResult<OPhysicalPosition>(ppos);
} catch (IOException e) {
throw new OStorageException("Error on create record in cluster: " + iRid.clusterId, e);
} finally {
lock.releaseSharedLock();
Orient.instance().getProfiler()
.stopChrono(PROFILER_CREATE_RECORD, "Create a record in database", timer, "db.*.data.updateHole");
}
}
public OStorageOperationResult<ORawBuffer> readRecord(final ORecordId iRid, String iFetchPlan, boolean iIgnoreCache,
ORecordCallback<ORawBuffer> iCallback, boolean loadTombstones) {
return new OStorageOperationResult<ORawBuffer>(readRecord(getClusterById(iRid.clusterId), iRid, true, loadTombstones));
}
@Override
protected ORawBuffer readRecord(final OCluster iClusterSegment, final ORecordId iRid, final boolean iAtomicLock,
boolean loadTombstones) {
final long timer = Orient.instance().getProfiler().startChrono();
lock.acquireSharedLock();
try {
lockManager.acquireLock(Thread.currentThread(), iRid, LOCK.SHARED);
try {
final OClusterPosition lastPos = iClusterSegment.getLastPosition();
if (!iClusterSegment.isHashBased()) {
if (iRid.clusterPosition.compareTo(lastPos) > 0)
return null;
}
final OPhysicalPosition ppos = iClusterSegment.getPhysicalPosition(new OPhysicalPosition(iRid.clusterPosition));
if (ppos != null && loadTombstones && ppos.recordVersion.isTombstone())
return new ORawBuffer(null, ppos.recordVersion, ppos.recordType);
if (ppos == null || ppos.recordVersion.isTombstone())
return null;
final ODataSegmentMemory dataSegment = getDataSegmentById(ppos.dataSegmentId);
return new ORawBuffer(dataSegment.readRecord(ppos.dataSegmentPos), ppos.recordVersion, ppos.recordType);
} finally {
lockManager.releaseLock(Thread.currentThread(), iRid, LOCK.SHARED);
}
} catch (IOException e) {
throw new OStorageException("Error on read record in cluster: " + iClusterSegment.getId(), e);
} finally {
lock.releaseSharedLock();
Orient.instance().getProfiler().stopChrono(PROFILER_READ_RECORD, "Read a record from database", timer, "db.*.readRecord");
}
}
public OStorageOperationResult<ORecordVersion> updateRecord(final ORecordId iRid, final byte[] iContent,
final ORecordVersion iVersion, final byte iRecordType, final int iMode, ORecordCallback<ORecordVersion> iCallback) {
final long timer = Orient.instance().getProfiler().startChrono();
final OCluster cluster = getClusterById(iRid.clusterId);
lock.acquireSharedLock();
try {
lockManager.acquireLock(Thread.currentThread(), iRid, LOCK.EXCLUSIVE);
try {
final OPhysicalPosition ppos = cluster.getPhysicalPosition(new OPhysicalPosition(iRid.clusterPosition));
if (ppos == null || ppos.recordVersion.isTombstone()) {
final ORecordVersion v = OVersionFactory.instance().createUntrackedVersion();
if (iCallback != null) {
iCallback.call(iRid, v);
}
return new OStorageOperationResult<ORecordVersion>(v);
}
// VERSION CONTROL CHECK
switch (iVersion.getCounter()) {
// DOCUMENT UPDATE, NO VERSION CONTROL
case -1:
ppos.recordVersion.increment();
cluster.updateVersion(iRid.clusterPosition, ppos.recordVersion);
break;
// DOCUMENT UPDATE, NO VERSION CONTROL, NO VERSION UPDATE
case -2:
break;
default:
// MVCC CONTROL AND RECORD UPDATE OR WRONG VERSION VALUE
if (iVersion.getCounter() > -1) {
// MVCC TRANSACTION: CHECK IF VERSION IS THE SAME
if (!iVersion.equals(ppos.recordVersion))
if (OFastConcurrentModificationException.enabled())
throw OFastConcurrentModificationException.instance();
else
throw new OConcurrentModificationException(iRid, ppos.recordVersion, iVersion, ORecordOperation.UPDATED);
ppos.recordVersion.increment();
cluster.updateVersion(iRid.clusterPosition, ppos.recordVersion);
} else {
// DOCUMENT ROLLBACKED
iVersion.clearRollbackMode();
ppos.recordVersion.copyFrom(iVersion);
cluster.updateVersion(iRid.clusterPosition, ppos.recordVersion);
}
}
if (ppos.recordType != iRecordType)
cluster.updateRecordType(iRid.clusterPosition, iRecordType);
final ODataSegmentMemory dataSegment = getDataSegmentById(ppos.dataSegmentId);
dataSegment.updateRecord(ppos.dataSegmentPos, iContent);
if (iCallback != null)
iCallback.call(null, ppos.recordVersion);
return new OStorageOperationResult<ORecordVersion>(ppos.recordVersion);
} finally {
lockManager.releaseLock(Thread.currentThread(), iRid, LOCK.EXCLUSIVE);
}
} catch (IOException e) {
throw new OStorageException("Error on update record " + iRid, e);
} finally {
lock.releaseSharedLock();
Orient.instance().getProfiler().stopChrono(PROFILER_UPDATE_RECORD, "Update a record to database", timer, "db.*.updateRecord");
}
}
@Override
public boolean updateReplica(int dataSegmentId, ORecordId rid, byte[] content, ORecordVersion recordVersion, byte recordType)
throws IOException {
if (rid.isNew())
throw new OStorageException("Passed record with id " + rid + " is new and can not be treated as replica.");
checkOpeness();
final OCluster cluster = getClusterById(rid.clusterId);
final ODataSegmentMemory data = getDataSegmentById(dataSegmentId);
lock.acquireSharedLock();
try {
lockManager.acquireLock(Thread.currentThread(), rid, LOCK.EXCLUSIVE);
try {
OPhysicalPosition ppos = cluster.getPhysicalPosition(new OPhysicalPosition(rid.clusterPosition));
if (ppos == null) {
if (!cluster.isHashBased())
throw new OStorageException("Cluster with LH support is required.");
ppos = new OPhysicalPosition(rid.clusterPosition, recordVersion);
ppos.recordType = recordType;
ppos.dataSegmentId = data.getId();
if (!recordVersion.isTombstone()) {
ppos.dataSegmentPos = data.createRecord(content);
}
cluster.addPhysicalPosition(ppos);
return true;
} else {
if (ppos.recordType != recordType)
throw new OStorageException("Record types of provided and stored replicas are different " + recordType + ":"
+ ppos.recordType + ".");
if (ppos.recordVersion.compareTo(recordVersion) < 0) {
if (!recordVersion.isTombstone() && !ppos.recordVersion.isTombstone()) {
data.updateRecord(ppos.dataSegmentPos, content);
} else if (recordVersion.isTombstone() && !ppos.recordVersion.isTombstone()) {
data.deleteRecord(ppos.dataSegmentPos);
} else if (!recordVersion.isTombstone() && ppos.recordVersion.isTombstone()) {
ppos.dataSegmentPos = data.createRecord(content);
cluster.updateDataSegmentPosition(ppos.clusterPosition, dataSegmentId, ppos.dataSegmentPos);
}
cluster.updateVersion(ppos.clusterPosition, recordVersion);
return true;
}
}
} finally {
lockManager.releaseLock(Thread.currentThread(), rid, LOCK.EXCLUSIVE);
}
} finally {
lock.releaseSharedLock();
}
return false;
}
@Override
public <V> V callInRecordLock(Callable<V> callable, ORID rid, boolean exclusiveLock) {
lock.acquireSharedLock();
try {
lockManager.acquireLock(Thread.currentThread(), rid, exclusiveLock ? LOCK.EXCLUSIVE : LOCK.SHARED);
try {
return callable.call();
} finally {
lockManager.releaseLock(Thread.currentThread(), rid, exclusiveLock ? LOCK.EXCLUSIVE : LOCK.SHARED);
}
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new OException("Error on nested call in lock", e);
} finally {
lock.releaseSharedLock();
}
}
@Override
public OStorageOperationResult<Boolean> deleteRecord(final ORecordId iRid, final ORecordVersion iVersion, final int iMode,
ORecordCallback<Boolean> iCallback) {
return new OStorageOperationResult<Boolean>(deleteRecord(iRid, iVersion,
OGlobalConfiguration.STORAGE_USE_TOMBSTONES.getValueAsBoolean(), iCallback));
}
@Override
public boolean cleanOutRecord(ORecordId recordId, ORecordVersion recordVersion, int iMode, ORecordCallback<Boolean> callback) {
return deleteRecord(recordId, recordVersion, false, callback);
}
private boolean deleteRecord(ORecordId iRid, ORecordVersion iVersion, boolean useTombstones, ORecordCallback<Boolean> iCallback) {
final long timer = Orient.instance().getProfiler().startChrono();
final OCluster cluster = getClusterById(iRid.clusterId);
lock.acquireSharedLock();
try {
lockManager.acquireLock(Thread.currentThread(), iRid, LOCK.EXCLUSIVE);
try {
final OPhysicalPosition ppos = cluster.getPhysicalPosition(new OPhysicalPosition(iRid.clusterPosition));
if (ppos == null || (ppos.recordVersion.isTombstone() && useTombstones)) {
if (iCallback != null)
iCallback.call(iRid, false);
return false;
}
// MVCC TRANSACTION: CHECK IF VERSION IS THE SAME
if (iVersion.getCounter() > -1 && !ppos.recordVersion.equals(iVersion))
if (OFastConcurrentModificationException.enabled())
throw OFastConcurrentModificationException.instance();
else
throw new OConcurrentModificationException(iRid, ppos.recordVersion, iVersion, ORecordOperation.DELETED);
if (!ppos.recordVersion.isTombstone()) {
final ODataSegmentMemory dataSegment = getDataSegmentById(ppos.dataSegmentId);
dataSegment.deleteRecord(ppos.dataSegmentPos);
ppos.dataSegmentPos = -1;
}
if (useTombstones && cluster.hasTombstonesSupport())
cluster.convertToTombstone(iRid.clusterPosition);
else
cluster.removePhysicalPosition(iRid.clusterPosition);
if (iCallback != null)
iCallback.call(null, true);
return true;
} finally {
lockManager.releaseLock(Thread.currentThread(), iRid, LOCK.EXCLUSIVE);
}
} catch (IOException e) {
throw new OStorageException("Error on delete record " + iRid, e);
} finally {
lock.releaseSharedLock();
Orient.instance().getProfiler()
.stopChrono(PROFILER_DELETE_RECORD, "Delete a record from database", timer, "db.*.deleteRecord");
}
}
public long count(final int iClusterId) {
return count(iClusterId, false);
}
@Override
public long count(int iClusterId, boolean countTombstones) {
final OCluster cluster = getClusterById(iClusterId);
lock.acquireSharedLock();
try {
return cluster.getEntries() - (countTombstones ? 0L : cluster.getTombstonesCount());
} finally {
lock.releaseSharedLock();
}
}
public OClusterPosition[] getClusterDataRange(final int iClusterId) {
final OCluster cluster = getClusterById(iClusterId);
lock.acquireSharedLock();
try {
return new OClusterPosition[] { cluster.getFirstPosition(), cluster.getLastPosition() };
} catch (IOException ioe) {
throw new OStorageException("Can not retrieve information about data range", ioe);
} finally {
lock.releaseSharedLock();
}
}
public long count(final int[] iClusterIds) {
return count(iClusterIds, false);
}
@Override
public long count(int[] iClusterIds, boolean countTombstones) {
lock.acquireSharedLock();
try {
long tot = 0;
for (int iClusterId : iClusterIds) {
if (iClusterId > -1) {
final OCluster cluster = clusters.get(iClusterId);
if (cluster != null)
tot += cluster.getEntries() - (countTombstones ? 0L : cluster.getTombstonesCount());
}
}
return tot;
} finally {
lock.releaseSharedLock();
}
}
public OCluster getClusterByName(final String iClusterName) {
lock.acquireSharedLock();
try {
return clusterMap.get(iClusterName.toLowerCase());
} finally {
lock.releaseSharedLock();
}
}
public int getClusterIdByName(String iClusterName) {
iClusterName = iClusterName.toLowerCase();
lock.acquireSharedLock();
try {
final OCluster cluster = clusterMap.get(iClusterName.toLowerCase());
if (cluster == null)
return -1;
return cluster.getId();
} finally {
lock.releaseSharedLock();
}
}
public String getClusterTypeByName(final String iClusterName) {
return OClusterMemory.TYPE;
}
public String getPhysicalClusterNameById(final int iClusterId) {
lock.acquireSharedLock();
try {
for (OClusterMemory cluster : clusters) {
if (cluster != null && cluster.getId() == iClusterId)
return cluster.getName();
}
return null;
} finally {
lock.releaseSharedLock();
}
}
public Set<String> getClusterNames() {
lock.acquireSharedLock();
try {
return new HashSet<String>(clusterMap.keySet());
} finally {
lock.releaseSharedLock();
}
}
public void commit(final OTransaction iTx, Runnable callback) {
lock.acquireExclusiveLock();
try {
final List<ORecordOperation> tmpEntries = new ArrayList<ORecordOperation>();
while (iTx.getCurrentRecordEntries().iterator().hasNext()) {
for (ORecordOperation txEntry : iTx.getCurrentRecordEntries())
tmpEntries.add(txEntry);
iTx.clearRecordEntries();
for (ORecordOperation txEntry : tmpEntries)
// COMMIT ALL THE SINGLE ENTRIES ONE BY ONE
commitEntry(iTx, txEntry);
tmpEntries.clear();
}
// UPDATE THE CACHE ONLY IF THE ITERATOR ALLOWS IT
OTransactionAbstract.updateCacheFromEntries(iTx, iTx.getAllRecordEntries(), true);
} catch (IOException e) {
rollback(iTx);
} finally {
lock.releaseExclusiveLock();
}
}
public void rollback(final OTransaction iTx) {
}
public void synch() {
}
public boolean exists() {
lock.acquireSharedLock();
try {
return !clusters.isEmpty();
} finally {
lock.releaseSharedLock();
}
}
public ODataSegmentMemory getDataSegmentById(int iDataId) {
lock.acquireSharedLock();
try {
if (iDataId < 0 || iDataId > dataSegments.size() - 1)
throw new IllegalArgumentException("Invalid data segment id " + iDataId + ". Range is 0-" + (dataSegments.size() - 1));
return dataSegments.get(iDataId);
} finally {
lock.releaseSharedLock();
}
}
public int getDataSegmentIdByName(final String iDataSegmentName) {
if (iDataSegmentName == null)
return 0;
lock.acquireSharedLock();
try {
for (ODataSegmentMemory d : dataSegments)
if (d != null && d.getName().equalsIgnoreCase(iDataSegmentName))
return d.getId();
throw new IllegalArgumentException("Data segment '" + iDataSegmentName + "' does not exist in storage '" + name + "'");
} finally {
lock.releaseSharedLock();
}
}
public OCluster getClusterById(int iClusterId) {
lock.acquireSharedLock();
try {
if (iClusterId == ORID.CLUSTER_ID_INVALID)
// GET THE DEFAULT CLUSTER
iClusterId = defaultClusterId;
checkClusterSegmentIndexRange(iClusterId);
return clusters.get(iClusterId);
} finally {
lock.releaseSharedLock();
}
}
public int getClusters() {
lock.acquireSharedLock();
try {
return clusterMap.size();
} finally {
lock.releaseSharedLock();
}
}
public Collection<? extends OCluster> getClusterInstances() {
lock.acquireSharedLock();
try {
return Collections.unmodifiableCollection(clusters);
} finally {
lock.releaseSharedLock();
}
}
public int getDefaultClusterId() {
return defaultClusterId;
}
public long getSize() {
long size = 0;
lock.acquireSharedLock();
try {
for (ODataSegmentMemory d : dataSegments)
if (d != null)
size += d.getSize();
} finally {
lock.releaseSharedLock();
}
return size;
}
@Override
public boolean checkForRecordValidity(final OPhysicalPosition ppos) {
if (ppos.dataSegmentId > 0)
return false;
lock.acquireSharedLock();
try {
final ODataSegmentMemory dataSegment = getDataSegmentById(ppos.dataSegmentId);
if (ppos.dataSegmentPos >= dataSegment.count())
return false;
} finally {
lock.releaseSharedLock();
}
return true;
}
private void commitEntry(final OTransaction iTx, final ORecordOperation txEntry) throws IOException {
final ORecordId rid = (ORecordId) txEntry.getRecord().getIdentity();
final OCluster cluster = getClusterById(rid.clusterId);
rid.clusterId = cluster.getId();
if (txEntry.getRecord() instanceof OTxListener)
((OTxListener) txEntry.getRecord()).onEvent(txEntry, OTxListener.EVENT.BEFORE_COMMIT);
switch (txEntry.type) {
case ORecordOperation.LOADED:
break;
case ORecordOperation.CREATED:
if (rid.isNew()) {
// CHECK 2 TIMES TO ASSURE THAT IT'S A CREATE OR AN UPDATE BASED ON RECURSIVE TO-STREAM METHOD
final byte[] stream = txEntry.getRecord().toStream();
if (stream == null) {
OLogManager.instance().warn(this, "Null serialization on committing new record %s in transaction", rid);
break;
}
if (rid.isNew()) {
final ORecordId oldRID = rid.copy();
final OPhysicalPosition ppos = createRecord(txEntry.dataSegmentId, rid, stream,
OVersionFactory.instance().createVersion(), txEntry.getRecord().getRecordType(), 0, null).getResult();
txEntry.getRecord().getRecordVersion().copyFrom(ppos.recordVersion);
iTx.updateIdentityAfterCommit(oldRID, rid);
} else {
txEntry
.getRecord()
.getRecordVersion()
.copyFrom(
updateRecord(rid, stream, txEntry.getRecord().getRecordVersion(), txEntry.getRecord().getRecordType(), 0, null)
.getResult());
}
}
break;
case ORecordOperation.UPDATED:
final byte[] stream = txEntry.getRecord().toStream();
if (stream == null) {
OLogManager.instance().warn(this, "Null serialization on committing updated record %s in transaction", rid);
break;
}
txEntry
.getRecord()
.getRecordVersion()
.copyFrom(
updateRecord(rid, stream, txEntry.getRecord().getRecordVersion(), txEntry.getRecord().getRecordType(), 0, null)
.getResult());
break;
case ORecordOperation.DELETED:
deleteRecord(rid, txEntry.getRecord().getRecordVersion(), 0, null);
break;
}
txEntry.getRecord().unsetDirty();
if (txEntry.getRecord() instanceof OTxListener)
((OTxListener) txEntry.getRecord()).onEvent(txEntry, OTxListener.EVENT.AFTER_COMMIT);
}
@Override
public String getURL() {
return OEngineMemory.NAME + ":" + url;
}
public OStorageConfigurationSegment getConfigurationSegment() {
return null;
}
public void renameCluster(final String iOldName, final String iNewName) {
final OClusterMemory cluster = (OClusterMemory) getClusterByName(iOldName);
if (cluster != null)
try {
cluster.set(com.orientechnologies.orient.core.storage.OCluster.ATTRIBUTES.NAME, iNewName);
} catch (IOException e) {
}
}
public void setDefaultClusterId(int defaultClusterId) {
this.defaultClusterId = defaultClusterId;
}
@Override
public String getType() {
return OEngineMemory.NAME;
}
private void checkClusterSegmentIndexRange(final int iClusterId) {
if (iClusterId > clusters.size() - 1)
throw new IllegalArgumentException("Cluster segment #" + iClusterId + " does not exist in database '" + name + "'");
}
} | 1no label
| core_src_main_java_com_orientechnologies_orient_core_storage_impl_memory_OStorageMemory.java |
592 | public class JoinCheckOperation extends AbstractOperation implements JoinOperation {
private JoinRequest joinRequest;
private JoinRequest response;
public JoinCheckOperation() {
}
public JoinCheckOperation(final JoinRequest joinRequest) {
this.joinRequest = joinRequest;
}
@Override
public void run() {
final ClusterServiceImpl service = getService();
final NodeEngineImpl nodeEngine = (NodeEngineImpl) getNodeEngine();
final Node node = nodeEngine.getNode();
boolean ok = false;
if (joinRequest != null && node.joined() && node.isActive()) {
try {
ok = service.validateJoinMessage(joinRequest);
} catch (Exception ignored) {
}
}
if (ok) {
response = node.createJoinRequest();
}
}
@Override
public boolean returnsResponse() {
return true;
}
@Override
public Object getResponse() {
return response;
}
@Override
protected void readInternal(final ObjectDataInput in) throws IOException {
joinRequest = new JoinRequest();
joinRequest.readData(in);
}
@Override
protected void writeInternal(final ObjectDataOutput out) throws IOException {
joinRequest.writeData(out);
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_cluster_JoinCheckOperation.java |
238 | service.submitToMember(runnable, member, new ExecutionCallback() {
public void onResponse(Object response) {
responseLatch.countDown();
}
public void onFailure(Throwable t) {
}
}); | 0true
| hazelcast-client_src_test_java_com_hazelcast_client_executor_ClientExecutorServiceSubmitTest.java |
1,287 | public interface ClusterService extends LifecycleComponent<ClusterService> {
/**
* The local node.
*/
DiscoveryNode localNode();
/**
* The current state.
*/
ClusterState state();
/**
* Adds an initial block to be set on the first cluster state created.
*/
void addInitialStateBlock(ClusterBlock block) throws ElasticsearchIllegalStateException;
/**
* Remove an initial block to be set on the first cluster state created.
*/
void removeInitialStateBlock(ClusterBlock block) throws ElasticsearchIllegalStateException;
/**
* The operation routing.
*/
OperationRouting operationRouting();
/**
* Adds a priority listener for updated cluster states.
*/
void addFirst(ClusterStateListener listener);
/**
* Adds last listener.
*/
void addLast(ClusterStateListener listener);
/**
* Adds a listener for updated cluster states.
*/
void add(ClusterStateListener listener);
/**
* Removes a listener for updated cluster states.
*/
void remove(ClusterStateListener listener);
/**
* Add a listener for on/off local node master events
*/
void add(LocalNodeMasterListener listener);
/**
* Remove the given listener for on/off local master events
*/
void remove(LocalNodeMasterListener listener);
/**
* Adds a cluster state listener that will timeout after the provided timeout.
*/
void add(TimeValue timeout, TimeoutClusterStateListener listener);
/**
* Submits a task that will update the cluster state.
*/
void submitStateUpdateTask(final String source, Priority priority, final ClusterStateUpdateTask updateTask);
/**
* Submits a task that will update the cluster state (the task has a default priority of {@link Priority#NORMAL}).
*/
void submitStateUpdateTask(final String source, final ClusterStateUpdateTask updateTask);
/**
* Returns the tasks that are pending.
*/
List<PendingClusterTask> pendingTasks();
} | 0true
| src_main_java_org_elasticsearch_cluster_ClusterService.java |
573 | public interface ValueAssignable<T extends Serializable> extends Serializable {
/**
* The value
*
* @return The value
*/
T getValue();
/**
* The value
*
* @param value The value
*/
void setValue(T value);
/**
* The name
*
* @return The name
*/
String getName();
/**
* The name
*
* @param name The name
*/
void setName(String name);
} | 0true
| common_src_main_java_org_broadleafcommerce_common_value_ValueAssignable.java |
1,431 | public class MetaDataService extends AbstractComponent {
private final Semaphore[] indexMdLocks;
@Inject
public MetaDataService(Settings settings) {
super(settings);
indexMdLocks = new Semaphore[500];
for (int i = 0; i < indexMdLocks.length; i++) {
indexMdLocks[i] = new Semaphore(1);
}
}
public Semaphore indexMetaDataLock(String index) {
return indexMdLocks[Math.abs(DjbHashFunction.DJB_HASH(index) % indexMdLocks.length)];
}
} | 0true
| src_main_java_org_elasticsearch_cluster_metadata_MetaDataService.java |
545 | deleteByQueryAction.execute(Requests.deleteByQueryRequest(request.indices()).source(querySourceBuilder), new ActionListener<DeleteByQueryResponse>() {
@Override
public void onResponse(DeleteByQueryResponse deleteByQueryResponse) {
refreshAction.execute(Requests.refreshRequest(request.indices()), new ActionListener<RefreshResponse>() {
@Override
public void onResponse(RefreshResponse refreshResponse) {
removeMapping();
}
@Override
public void onFailure(Throwable e) {
removeMapping();
}
protected void removeMapping() {
DeleteMappingClusterStateUpdateRequest clusterStateUpdateRequest = new DeleteMappingClusterStateUpdateRequest()
.indices(request.indices()).types(request.types())
.ackTimeout(request.timeout())
.masterNodeTimeout(request.masterNodeTimeout());
metaDataMappingService.removeMapping(clusterStateUpdateRequest, new ClusterStateUpdateListener() {
@Override
public void onResponse(ClusterStateUpdateResponse response) {
listener.onResponse(new DeleteMappingResponse(response.isAcknowledged()));
}
@Override
public void onFailure(Throwable t) {
listener.onFailure(t);
}
});
}
});
}
@Override
public void onFailure(Throwable t) {
listener.onFailure(t);
}
}); | 1no label
| src_main_java_org_elasticsearch_action_admin_indices_mapping_delete_TransportDeleteMappingAction.java |
313 | public class ClusterShardHealth implements Streamable {
private int shardId;
ClusterHealthStatus status = ClusterHealthStatus.RED;
int activeShards = 0;
int relocatingShards = 0;
int initializingShards = 0;
int unassignedShards = 0;
boolean primaryActive = false;
private ClusterShardHealth() {
}
ClusterShardHealth(int shardId) {
this.shardId = shardId;
}
public int getId() {
return shardId;
}
public ClusterHealthStatus getStatus() {
return status;
}
public int getRelocatingShards() {
return relocatingShards;
}
public int getActiveShards() {
return activeShards;
}
public boolean isPrimaryActive() {
return primaryActive;
}
public int getInitializingShards() {
return initializingShards;
}
public int getUnassignedShards() {
return unassignedShards;
}
static ClusterShardHealth readClusterShardHealth(StreamInput in) throws IOException {
ClusterShardHealth ret = new ClusterShardHealth();
ret.readFrom(in);
return ret;
}
@Override
public void readFrom(StreamInput in) throws IOException {
shardId = in.readVInt();
status = ClusterHealthStatus.fromValue(in.readByte());
activeShards = in.readVInt();
relocatingShards = in.readVInt();
initializingShards = in.readVInt();
unassignedShards = in.readVInt();
primaryActive = in.readBoolean();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVInt(shardId);
out.writeByte(status.value());
out.writeVInt(activeShards);
out.writeVInt(relocatingShards);
out.writeVInt(initializingShards);
out.writeVInt(unassignedShards);
out.writeBoolean(primaryActive);
}
} | 0true
| src_main_java_org_elasticsearch_action_admin_cluster_health_ClusterShardHealth.java |
90 | {
@Override
protected void configure( GraphDatabaseBuilder builder )
{
builder.setConfig( GraphDatabaseSettings.cache_type, "none" );
};
}; | 0true
| community_kernel_src_test_java_org_neo4j_kernel_impl_transaction_ReadTransactionLogWritingTest.java |
638 | public abstract class AbstractCollectionProxyImpl<S extends RemoteService, E> extends AbstractDistributedObject<S>
implements InitializingObject {
protected final String name;
protected final int partitionId;
protected AbstractCollectionProxyImpl(String name, NodeEngine nodeEngine, S service) {
super(nodeEngine, service);
this.name = name;
this.partitionId = nodeEngine.getPartitionService().getPartitionId(getNameAsPartitionAwareData());
}
@Override
public void initialize() {
final NodeEngine nodeEngine = getNodeEngine();
CollectionConfig config = getConfig(nodeEngine);
final List<ItemListenerConfig> itemListenerConfigs = config.getItemListenerConfigs();
for (ItemListenerConfig itemListenerConfig : itemListenerConfigs) {
ItemListener listener = itemListenerConfig.getImplementation();
if (listener == null && itemListenerConfig.getClassName() != null) {
try {
listener = ClassLoaderUtil.newInstance(nodeEngine.getConfigClassLoader(), itemListenerConfig.getClassName());
} catch (Exception e) {
throw ExceptionUtil.rethrow(e);
}
}
if (listener != null) {
if (listener instanceof HazelcastInstanceAware) {
((HazelcastInstanceAware) listener).setHazelcastInstance(nodeEngine.getHazelcastInstance());
}
addItemListener(listener, itemListenerConfig.isIncludeValue());
}
}
}
protected abstract CollectionConfig getConfig(NodeEngine nodeEngine);
@Override
public String getName() {
return name;
}
public boolean add(E e) {
throwExceptionIfNull(e);
final Data value = getNodeEngine().toData(e);
final CollectionAddOperation operation = new CollectionAddOperation(name, value);
final Boolean result = invoke(operation);
return result;
}
public boolean remove(Object o) {
throwExceptionIfNull(o);
final Data value = getNodeEngine().toData(o);
final CollectionRemoveOperation operation = new CollectionRemoveOperation(name, value);
final Boolean result = invoke(operation);
return result;
}
public int size() {
final CollectionSizeOperation operation = new CollectionSizeOperation(name);
final Integer result = invoke(operation);
return result;
}
public boolean isEmpty() {
return size() == 0;
}
public boolean contains(Object o) {
throwExceptionIfNull(o);
Set<Data> valueSet = new HashSet<Data>(1);
valueSet.add(getNodeEngine().toData(o));
final CollectionContainsOperation operation = new CollectionContainsOperation(name, valueSet);
final Boolean result = invoke(operation);
return result;
}
public boolean containsAll(Collection<?> c) {
throwExceptionIfNull(c);
Set<Data> valueSet = new HashSet<Data>(c.size());
final NodeEngine nodeEngine = getNodeEngine();
for (Object o : c) {
throwExceptionIfNull(o);
valueSet.add(nodeEngine.toData(o));
}
final CollectionContainsOperation operation = new CollectionContainsOperation(name, valueSet);
final Boolean result = invoke(operation);
return result;
}
public boolean addAll(Collection<? extends E> c) {
throwExceptionIfNull(c);
List<Data> valueList = new ArrayList<Data>(c.size());
final NodeEngine nodeEngine = getNodeEngine();
for (E e : c) {
throwExceptionIfNull(e);
valueList.add(nodeEngine.toData(e));
}
final CollectionAddAllOperation operation = new CollectionAddAllOperation(name, valueList);
final Boolean result = invoke(operation);
return result;
}
public boolean retainAll(Collection<?> c) {
return compareAndRemove(true, c);
}
public boolean removeAll(Collection<?> c) {
return compareAndRemove(false, c);
}
private boolean compareAndRemove(boolean retain, Collection<?> c) {
throwExceptionIfNull(c);
Set<Data> valueSet = new HashSet<Data>(c.size());
final NodeEngine nodeEngine = getNodeEngine();
for (Object o : c) {
throwExceptionIfNull(o);
valueSet.add(nodeEngine.toData(o));
}
final CollectionCompareAndRemoveOperation operation = new CollectionCompareAndRemoveOperation(name, retain, valueSet);
final Boolean result = invoke(operation);
return result;
}
public void clear() {
final CollectionClearOperation operation = new CollectionClearOperation(name);
invoke(operation);
}
public Iterator<E> iterator() {
return getAll().iterator();
}
public Object[] toArray() {
return getAll().toArray();
}
public <T> T[] toArray(T[] a) {
return getAll().toArray(a);
}
private Collection<E> getAll() {
final CollectionGetAllOperation operation = new CollectionGetAllOperation(name);
final SerializableCollection result = invoke(operation);
final Collection<Data> collection = result.getCollection();
final List<E> list = new ArrayList<E>(collection.size());
final NodeEngine nodeEngine = getNodeEngine();
for (Data data : collection) {
list.add(nodeEngine.<E>toObject(data));
}
return list;
}
public String addItemListener(ItemListener<E> listener, boolean includeValue) {
final EventService eventService = getNodeEngine().getEventService();
final CollectionEventFilter filter = new CollectionEventFilter(includeValue);
final EventRegistration registration = eventService.registerListener(getServiceName(), name, filter, listener);
return registration.getId();
}
public boolean removeItemListener(String registrationId) {
EventService eventService = getNodeEngine().getEventService();
return eventService.deregisterListener(getServiceName(), name, registrationId);
}
protected <T> T invoke(CollectionOperation operation) {
final NodeEngine nodeEngine = getNodeEngine();
try {
Future f = nodeEngine.getOperationService().invokeOnPartition(getServiceName(), operation, partitionId);
return nodeEngine.toObject(f.get());
} catch (Throwable throwable) {
throw ExceptionUtil.rethrow(throwable);
}
}
protected void throwExceptionIfNull(Object o) {
if (o == null) {
throw new NullPointerException("Object is null");
}
}
} | 1no label
| hazelcast_src_main_java_com_hazelcast_collection_AbstractCollectionProxyImpl.java |
967 | @Entity
@DiscriminatorColumn(name = "TYPE")
@Inheritance(strategy = InheritanceType.JOINED)
@Table(name = "BLC_BUND_ITEM_FEE_PRICE")
@Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region = "blOrderElements")
@AdminPresentationMergeOverrides(
{
@AdminPresentationMergeOverride(name = "", mergeEntries =
@AdminPresentationMergeEntry(propertyType = PropertyType.AdminPresentation.READONLY,
booleanOverrideValue = true))
}
)
public class BundleOrderItemFeePriceImpl implements BundleOrderItemFeePrice {
public static final Log LOG = LogFactory.getLog(BundleOrderItemFeePriceImpl.class);
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(generator = "BundleOrderItemFeePriceId")
@GenericGenerator(
name="BundleOrderItemFeePriceId",
strategy="org.broadleafcommerce.common.persistence.IdOverrideTableGenerator",
parameters = {
@Parameter(name="segment_value", value="BundleOrderItemFeePriceImpl"),
@Parameter(name="entity_name", value="org.broadleafcommerce.core.order.domain.BundleOrderItemFeePriceImpl")
}
)
@Column(name = "BUND_ITEM_FEE_PRICE_ID")
protected Long id;
@ManyToOne(targetEntity = BundleOrderItemImpl.class, optional = false)
@JoinColumn(name = "BUND_ORDER_ITEM_ID")
protected BundleOrderItem bundleOrderItem;
@Column(name = "AMOUNT", precision=19, scale=5)
@AdminPresentation(friendlyName = "BundleOrderItemFeePriceImpl_Amount", order=2, prominent=true)
protected BigDecimal amount;
@Column(name = "NAME")
@AdminPresentation(friendlyName = "BundleOrderItemFeePriceImpl_Name", order=1, prominent=true)
private String name;
@Column(name = "REPORTING_CODE")
@AdminPresentation(friendlyName = "BundleOrderItemFeePriceImpl_Reporting_Code", order=3, prominent=true)
private String reportingCode;
@Column(name = "IS_TAXABLE")
@AdminPresentation(friendlyName = "BundleOrderItemFeePriceImpl_Taxable", order=4)
private Boolean isTaxable = Boolean.FALSE;
@Override
public Long getId() {
return id;
}
@Override
public void setId(Long id) {
this.id = id;
}
@Override
public BundleOrderItem getBundleOrderItem() {
return bundleOrderItem;
}
@Override
public void setBundleOrderItem(BundleOrderItem bundleOrderItem) {
this.bundleOrderItem = bundleOrderItem;
}
@Override
public Money getAmount() {
return convertToMoney(amount);
}
@Override
public void setAmount(Money amount) {
this.amount = Money.toAmount(amount);
}
@Override
public String getName() {
return name;
}
@Override
public void setName(String name) {
this.name = name;
}
@Override
public Boolean isTaxable() {
return isTaxable;
}
@Override
public void setTaxable(Boolean isTaxable) {
this.isTaxable = isTaxable;
}
@Override
public String getReportingCode() {
return reportingCode;
}
@Override
public void setReportingCode(String reportingCode) {
this.reportingCode = reportingCode;
}
public void checkCloneable(BundleOrderItemFeePrice bundleFeePrice) throws CloneNotSupportedException, SecurityException, NoSuchMethodException {
Method cloneMethod = bundleFeePrice.getClass().getMethod("clone", new Class[]{});
if (cloneMethod.getDeclaringClass().getName().startsWith("org.broadleafcommerce") && !bundleFeePrice.getClass().getName().startsWith("org.broadleafcommerce")) {
//subclass is not implementing the clone method
throw new CloneNotSupportedException("Custom extensions and implementations should implement clone in order to guarantee split and merge operations are performed accurately");
}
}
protected Money convertToMoney(BigDecimal amount) {
return amount == null ? null : BroadleafCurrencyUtils.getMoney(amount, bundleOrderItem.getOrder().getCurrency());
}
@Override
public BundleOrderItemFeePrice clone() {
//instantiate from the fully qualified name via reflection
BundleOrderItemFeePrice clone;
try {
clone = (BundleOrderItemFeePrice) Class.forName(this.getClass().getName()).newInstance();
try {
checkCloneable(clone);
} catch (CloneNotSupportedException e) {
LOG.warn("Clone implementation missing in inheritance hierarchy outside of Broadleaf: " + clone.getClass().getName(), e);
}
clone.setAmount(convertToMoney(amount));
clone.setName(name);
clone.setReportingCode(reportingCode);
clone.setBundleOrderItem(bundleOrderItem);
} catch (Exception e) {
throw new RuntimeException(e);
}
return clone;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((amount == null) ? 0 : amount.hashCode());
result = prime * result + ((bundleOrderItem == null) ? 0 : bundleOrderItem.hashCode());
result = prime * result + ((id == null) ? 0 : id.hashCode());
result = prime * result + (isTaxable ? 1231 : 1237);
result = prime * result + ((name == null) ? 0 : name.hashCode());
result = prime * result + ((reportingCode == null) ? 0 : reportingCode.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
BundleOrderItemFeePriceImpl other = (BundleOrderItemFeePriceImpl) obj;
if (amount == null) {
if (other.amount != null) {
return false;
}
} else if (!amount.equals(other.amount)) {
return false;
}
if (bundleOrderItem == null) {
if (other.bundleOrderItem != null) {
return false;
}
} else if (!bundleOrderItem.equals(other.bundleOrderItem)) {
return false;
}
if (id == null) {
if (other.id != null) {
return false;
}
} else if (!id.equals(other.id)) {
return false;
}
if (isTaxable != other.isTaxable) {
return false;
}
if (name == null) {
if (other.name != null) {
return false;
}
} else if (!name.equals(other.name)) {
return false;
}
if (reportingCode == null) {
if (other.reportingCode != null) {
return false;
}
} else if (!reportingCode.equals(other.reportingCode)) {
return false;
}
return true;
}
} | 1no label
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_order_domain_BundleOrderItemFeePriceImpl.java |
79 | public class ChangeInitialCaseOfIdentifierInDeclaration
extends CorrectionProposal {
public static void addChangeIdentifierCaseProposal(Node node,
Collection<ICompletionProposal> proposals, IFile file) {
Tree.Identifier identifier = null;
if (node instanceof Tree.TypeDeclaration) {
identifier = ((Tree.TypeDeclaration) node).getIdentifier();
}
else if (node instanceof Tree.TypeParameterDeclaration) {
identifier = ((Tree.TypeParameterDeclaration) node).getIdentifier();
}
else if (node instanceof Tree.TypedDeclaration) {
identifier = ((Tree.TypedDeclaration) node).getIdentifier();
}
else if (node instanceof Tree.ImportPath) {
List<Identifier> importIdentifiers = ((Tree.ImportPath) node).getIdentifiers();
for (Identifier importIdentifier : importIdentifiers) {
if (importIdentifier.getText() != null &&
!importIdentifier.getText().isEmpty() &&
Character.isUpperCase(importIdentifier.getText().charAt(0))) {
identifier = importIdentifier;
break;
}
}
}
if (identifier != null && !identifier.getText().isEmpty()) {
addProposal(identifier, proposals, file);
}
}
private static void addProposal(Identifier identifier,
Collection<ICompletionProposal> proposals, IFile file) {
String newIdentifier;
String newFirstLetter;
String oldIdentifier = identifier.getText();
if (Character.isUpperCase(oldIdentifier.charAt(0))) {
newFirstLetter = String.valueOf(Character.toLowerCase(oldIdentifier.charAt(0)));
newIdentifier = newFirstLetter + oldIdentifier.substring(1);
} else {
newFirstLetter = String.valueOf(Character.toUpperCase(oldIdentifier.charAt(0)));
newIdentifier = newFirstLetter + oldIdentifier.substring(1);
}
TextFileChange change = new TextFileChange("Change initial case of identifier", file);
change.setEdit(new ReplaceEdit(identifier.getStartIndex(), 1, newFirstLetter));
ChangeInitialCaseOfIdentifierInDeclaration proposal =
new ChangeInitialCaseOfIdentifierInDeclaration(newIdentifier, change);
if (!proposals.contains(proposal)) {
proposals.add(proposal);
}
}
public ChangeInitialCaseOfIdentifierInDeclaration(String newIdentifier, Change change) {
super("Change initial case of identifier to '" + newIdentifier + "'", change, null);
}
} | 0true
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_correct_ChangeInitialCaseOfIdentifierInDeclaration.java |
479 | public class TransportGetAliasesAction extends TransportMasterNodeReadOperationAction<GetAliasesRequest, GetAliasesResponse> {
@Inject
public TransportGetAliasesAction(Settings settings, TransportService transportService, ClusterService clusterService, ThreadPool threadPool) {
super(settings, transportService, clusterService, threadPool);
}
@Override
protected String transportAction() {
return GetAliasesAction.NAME;
}
@Override
protected String executor() {
// very lightweight operation all in memory no need to fork to a thread pool
return ThreadPool.Names.SAME;
}
@Override
protected GetAliasesRequest newRequest() {
return new GetAliasesRequest();
}
@Override
protected GetAliasesResponse newResponse() {
return new GetAliasesResponse();
}
@Override
protected void masterOperation(GetAliasesRequest request, ClusterState state, ActionListener<GetAliasesResponse> listener) throws ElasticsearchException {
String[] concreteIndices = state.metaData().concreteIndices(request.indices(), request.indicesOptions());
request.indices(concreteIndices);
@SuppressWarnings("unchecked") // ImmutableList to List results incompatible type
ImmutableOpenMap<String, List<AliasMetaData>> result = (ImmutableOpenMap) state.metaData().findAliases(request.aliases(), request.indices());
listener.onResponse(new GetAliasesResponse(result));
}
} | 1no label
| src_main_java_org_elasticsearch_action_admin_indices_alias_get_TransportGetAliasesAction.java |
300 | public class ValidateActions {
public static ActionRequestValidationException addValidationError(String error, ActionRequestValidationException validationException) {
if (validationException == null) {
validationException = new ActionRequestValidationException();
}
validationException.addValidationError(error);
return validationException;
}
} | 0true
| src_main_java_org_elasticsearch_action_ValidateActions.java |
471 | Object indexOneValue = makeDbCall(databaseDocumentTxOne, new ODbRelatedCall<Object>() {
public Object call() {
return indexOneEntry.getValue();
}
}); | 0true
| core_src_main_java_com_orientechnologies_orient_core_db_tool_ODatabaseCompare.java |
115 | static final class EmptyTask extends ForkJoinTask<Void> {
private static final long serialVersionUID = -7721805057305804111L;
EmptyTask() { status = ForkJoinTask.NORMAL; } // force done
public final Void getRawResult() { return null; }
public final void setRawResult(Void x) {}
public final boolean exec() { return true; }
} | 0true
| src_main_java_jsr166e_ForkJoinPool.java |
605 | public class BroadleafRequestContext {
protected static final Log LOG = LogFactory.getLog(BroadleafRequestContext.class);
private static final ThreadLocal<BroadleafRequestContext> BROADLEAF_REQUEST_CONTEXT = ThreadLocalManager.createThreadLocal(BroadleafRequestContext.class);
public static BroadleafRequestContext getBroadleafRequestContext() {
return BROADLEAF_REQUEST_CONTEXT.get();
}
public static void setBroadleafRequestContext(BroadleafRequestContext broadleafRequestContext) {
BROADLEAF_REQUEST_CONTEXT.set(broadleafRequestContext);
}
public static boolean hasLocale(){
if (getBroadleafRequestContext() != null) {
if(getBroadleafRequestContext().getLocale() != null){
return true;
}
}
return false;
}
public static boolean hasCurrency() {
if (getBroadleafRequestContext() != null) {
if (getBroadleafRequestContext().getBroadleafCurrency() != null) {
return true;
}
}
return false;
}
protected HttpServletRequest request;
protected HttpServletResponse response;
protected WebRequest webRequest;
protected SandBox sandbox;
protected Locale locale;
protected TimeZone timeZone;
protected BroadleafCurrency broadleafCurrency;
protected Site site;
protected Theme theme;
protected java.util.Locale javaLocale;
protected Currency javaCurrency;
protected Catalog currentCatalog;
protected Boolean ignoreSite = false;
protected Map<String, Object> additionalProperties = new HashMap<String, Object>();
protected MessageSource messageSource;
protected RequestDTO requestDTO;
/**
* Gets the current request on the context
* @return
*/
public HttpServletRequest getRequest() {
return request;
}
/**
* Sets the current request on the context. Note that this also invokes {@link #setWebRequest(WebRequest)} by wrapping
* <b>request</b> in a {@link ServletWebRequest}.
*
* @param request
*/
public void setRequest(HttpServletRequest request) {
this.request = request;
this.webRequest = new ServletWebRequest(request);
}
/**
* Returns the response for the context
*
* @return
*/
public HttpServletResponse getResponse() {
return response;
}
/**
* Sets the response on the context
*
* @param response
*/
public void setResponse(HttpServletResponse response) {
this.response = response;
}
/**
* Sets the generic request on the context. This is available to be used in non-Servlet environments (like Portlets).
* Note that if <b>webRequest</b> is an instance of {@link ServletWebRequest} then
* {@link #setRequest(HttpServletRequest)} will be invoked as well with the native underlying {@link HttpServletRequest}
* passed as a parameter.
* <br />
* <br />
* Also, if <b>webRequest</b> is an instance of {@link ServletWebRequest} then an attempt is made to set the response
* (note that this could be null if the ServletWebRequest was not instantiated with both the {@link HttpServletRequest}
* and {@link HttpServletResponse}
* @param webRequest
*/
public void setWebRequest(WebRequest webRequest) {
this.webRequest = webRequest;
if (webRequest instanceof ServletWebRequest) {
this.request = ((ServletWebRequest) webRequest).getRequest();
setResponse(((ServletWebRequest) webRequest).getResponse());
}
}
/**
* Returns the generic request for use outside of servlets (like in Portlets). This will be automatically set
* by invoking {@link #setRequest(HttpServletRequest)}
*
* @return the generic request
* @see {@link #setWebRequest(WebRequest)}
*/
public WebRequest getWebRequest() {
return webRequest;
}
public Site getSite() {
return site;
}
public void setSite(Site site) {
this.site = site;
}
public SandBox getSandbox() {
return sandbox;
}
public void setSandbox(SandBox sandbox) {
this.sandbox = sandbox;
}
public Locale getLocale() {
return locale;
}
/**
* Returns the java.util.Locale constructed from the org.broadleafcommerce.common.locale.domain.Locale.
* @return
*/
public java.util.Locale getJavaLocale() {
if (this.javaLocale == null) {
this.javaLocale = convertLocaleToJavaLocale();
}
return this.javaLocale;
}
/**
* Returns the java.util.Currency constructed from the org.broadleafcommerce.common.currency.domain.BroadleafCurrency.
* If there is no BroadleafCurrency specified this will return the currency based on the JVM locale
*
* @return
*/
public Currency getJavaCurrency() {
if (javaCurrency == null) {
try {
if (getBroadleafCurrency() != null && getBroadleafCurrency().getCurrencyCode() != null) {
javaCurrency = Currency.getInstance(getBroadleafCurrency().getCurrencyCode());
} else {
javaCurrency = Currency.getInstance(getJavaLocale());
}
} catch (IllegalArgumentException e) {
LOG.warn("There was an error processing the configured locale into the java currency. This is likely because the default" +
" locale is set to something like 'en' (which is NOT apart of ISO 3166 and does not have a currency" +
" associated with it) instead of 'en_US' (which IS apart of ISO 3166 and has a currency associated" +
" with it). Because of this, the currency is now set to the default locale of the JVM");
LOG.warn("To fully resolve this, update the default entry in the BLC_LOCALE table to take into account the" +
" country code as well as the language. Alternatively, you could also update the BLC_CURRENCY table" +
" to contain a default currency.");
javaCurrency = Currency.getInstance(java.util.Locale.getDefault());
}
}
return javaCurrency;
}
public void setLocale(Locale locale) {
this.locale = locale;
this.javaLocale = convertLocaleToJavaLocale();
}
public String getRequestURIWithoutContext() {
String requestURIWithoutContext = null;
if (request.getRequestURI() != null) {
if (request.getContextPath() != null) {
requestURIWithoutContext = request.getRequestURI().substring(request.getContextPath().length());
} else {
requestURIWithoutContext = request.getRequestURI();
}
// Remove JSESSION-ID or other modifiers
int pos = requestURIWithoutContext.indexOf(";");
if (pos >= 0) {
requestURIWithoutContext = requestURIWithoutContext.substring(0,pos);
}
}
return requestURIWithoutContext;
}
protected java.util.Locale convertLocaleToJavaLocale() {
if (locale == null || locale.getLocaleCode() == null) {
return java.util.Locale.getDefault();
} else {
String localeString = locale.getLocaleCode();
String[] components = localeString.split("_");
if (components.length == 1) {
return new java.util.Locale(components[0]);
} else if (components.length == 2) {
return new java.util.Locale(components[0], components[1]);
} else if (components.length == 3) {
return new java.util.Locale(components[0], components[1], components[2]);
}
return null;
}
}
public boolean isSecure() {
boolean secure = false;
if (request != null) {
secure = ("HTTPS".equalsIgnoreCase(request.getScheme()) || request.isSecure());
}
return secure;
}
public boolean isProductionSandbox() {
return (sandbox == null || SandBoxType.PRODUCTION.equals(sandbox.getSandBoxType()));
}
public Theme getTheme() {
return theme;
}
public void setTheme(Theme theme) {
this.theme = theme;
}
public BroadleafCurrency getBroadleafCurrency() {
return broadleafCurrency;
}
public void setBroadleafCurrency(BroadleafCurrency broadleafCurrency) {
this.broadleafCurrency = broadleafCurrency;
}
public Catalog getCurrentCatalog() {
return currentCatalog;
}
public void setCurrentCatalog(Catalog currentCatalog) {
this.currentCatalog = currentCatalog;
}
@SuppressWarnings("unchecked")
public static Map<String, String[]> getRequestParameterMap() {
return getBroadleafRequestContext().getRequest().getParameterMap();
}
public Boolean getIgnoreSite() {
return ignoreSite;
}
public void setIgnoreSite(Boolean ignoreSite) {
this.ignoreSite = ignoreSite;
}
public Map<String, Object> getAdditionalProperties() {
return additionalProperties;
}
public void setAdditionalProperties(Map<String, Object> additionalProperties) {
this.additionalProperties = additionalProperties;
}
public MessageSource getMessageSource() {
return messageSource;
}
public void setMessageSource(MessageSource messageSource) {
this.messageSource = messageSource;
}
public TimeZone getTimeZone() {
return timeZone;
}
public void setTimeZone(TimeZone timeZone) {
this.timeZone = timeZone;
}
public RequestDTO getRequestDTO() {
return requestDTO;
}
public void setRequestDTO(RequestDTO requestDTO) {
this.requestDTO = requestDTO;
}
} | 0true
| common_src_main_java_org_broadleafcommerce_common_web_BroadleafRequestContext.java |
92 | public class TTYConsoleReader implements OConsoleReader {
private static final String HISTORY_FILE_NAME = ".orientdb_history";
private static int MAX_HISTORY_ENTRIES = 50;
public static int END_CHAR = 70;
public static int BEGIN_CHAR = 72;
public static int DEL_CHAR = 126;
public static int DOWN_CHAR = 66;
public static int UP_CHAR = 65;
public static int RIGHT_CHAR = 67;
public static int LEFT_CHAR = 68;
public static int HORIZONTAL_TAB_CHAR = 9;
public static int VERTICAL_TAB_CHAR = 11;
public static int BACKSPACE_CHAR = 127;
public static int NEW_LINE_CHAR = 10;
public static int UNIT_SEPARATOR_CHAR = 31;
protected int currentPos = 0;
protected List<String> history = new ArrayList<String>();
protected String historyBuffer;
protected Reader inStream;
protected PrintStream outStream;
public TTYConsoleReader() {
File file = getHistoryFile(true);
BufferedReader reader;
try {
reader = new BufferedReader(new FileReader(file));
String historyEntry = reader.readLine();
while (historyEntry != null) {
history.add(historyEntry);
historyEntry = reader.readLine();
}
if (System.getProperty("file.encoding") != null) {
inStream = new InputStreamReader(System.in, System.getProperty("file.encoding"));
outStream = new PrintStream(System.out, false, System.getProperty("file.encoding"));
} else {
inStream = new InputStreamReader(System.in);
outStream = System.out;
}
} catch (FileNotFoundException fnfe) {
OLogManager.instance().error(this, "History file not found", fnfe, "");
} catch (IOException ioe) {
OLogManager.instance().error(this, "Error reading history file.", ioe, "");
}
}
protected OConsoleApplication console;
public String readLine() {
String consoleInput = "";
try {
StringBuffer buffer = new StringBuffer();
currentPos = 0;
historyBuffer = null;
int historyNum = history.size();
boolean hintedHistory = false;
while (true) {
boolean escape = false;
boolean ctrl = false;
int next = inStream.read();
if (next == 27) {
escape = true;
inStream.read();
next = inStream.read();
}
if (escape) {
if (next == 49) {
inStream.read();
next = inStream.read();
}
if (next == 53) {
ctrl = true;
next = inStream.read();
}
if (ctrl) {
if (next == RIGHT_CHAR) {
currentPos = buffer.indexOf(" ", currentPos) + 1;
if (currentPos == 0)
currentPos = buffer.length();
StringBuffer cleaner = new StringBuffer();
for (int i = 0; i < buffer.length(); i++) {
cleaner.append(" ");
}
rewriteConsole(cleaner, true);
rewriteConsole(buffer, false);
} else if (next == LEFT_CHAR) {
if (currentPos > 1 && currentPos < buffer.length() && buffer.charAt(currentPos - 1) == ' ') {
currentPos = buffer.lastIndexOf(" ", (currentPos - 2)) + 1;
} else {
currentPos = buffer.lastIndexOf(" ", currentPos) + 1;
}
if (currentPos < 0)
currentPos = 0;
StringBuffer cleaner = new StringBuffer();
for (int i = 0; i < buffer.length(); i++) {
cleaner.append(" ");
}
rewriteConsole(cleaner, true);
rewriteConsole(buffer, false);
} else {
}
} else {
if (next == UP_CHAR && !history.isEmpty()) {
if (history.size() > 0) { // UP
StringBuffer cleaner = new StringBuffer();
for (int i = 0; i < buffer.length(); i++) {
cleaner.append(" ");
}
rewriteConsole(cleaner, true);
if (!hintedHistory && (historyNum == history.size() || !buffer.toString().equals(history.get(historyNum)))) {
if (buffer.length() > 0) {
hintedHistory = true;
historyBuffer = buffer.toString();
} else {
historyBuffer = null;
}
}
historyNum = getHintedHistoryIndexUp(historyNum);
if (historyNum > -1) {
buffer = new StringBuffer(history.get(historyNum));
} else {
buffer = new StringBuffer(historyBuffer);
}
currentPos = buffer.length();
rewriteConsole(buffer, false);
// writeHistory(historyNum);
}
} else if (next == DOWN_CHAR && !history.isEmpty()) { // DOWN
if (history.size() > 0) {
StringBuffer cleaner = new StringBuffer();
for (int i = 0; i < buffer.length(); i++) {
cleaner.append(" ");
}
rewriteConsole(cleaner, true);
historyNum = getHintedHistoryIndexDown(historyNum);
if (historyNum == history.size()) {
if (historyBuffer != null) {
buffer = new StringBuffer(historyBuffer);
} else {
buffer = new StringBuffer("");
}
} else {
buffer = new StringBuffer(history.get(historyNum));
}
currentPos = buffer.length();
rewriteConsole(buffer, false);
// writeHistory(historyNum);
}
} else if (next == RIGHT_CHAR) {
if (currentPos < buffer.length()) {
currentPos++;
StringBuffer cleaner = new StringBuffer();
for (int i = 0; i < buffer.length(); i++) {
cleaner.append(" ");
}
rewriteConsole(cleaner, true);
rewriteConsole(buffer, false);
}
} else if (next == LEFT_CHAR) {
if (currentPos > 0) {
currentPos--;
StringBuffer cleaner = new StringBuffer();
for (int i = 0; i < buffer.length(); i++) {
cleaner.append(" ");
}
rewriteConsole(cleaner, true);
rewriteConsole(buffer, false);
}
} else if (next == END_CHAR) {
currentPos = buffer.length();
StringBuffer cleaner = new StringBuffer();
for (int i = 0; i < buffer.length(); i++) {
cleaner.append(" ");
}
rewriteConsole(cleaner, true);
rewriteConsole(buffer, false);
} else if (next == BEGIN_CHAR) {
currentPos = 0;
StringBuffer cleaner = new StringBuffer();
for (int i = 0; i < buffer.length(); i++) {
cleaner.append(" ");
}
rewriteConsole(cleaner, true);
rewriteConsole(buffer, false);
} else {
}
}
} else {
if (next == NEW_LINE_CHAR) {
outStream.println();
break;
} else if (next == BACKSPACE_CHAR) {
if (buffer.length() > 0 && currentPos > 0) {
StringBuffer cleaner = new StringBuffer();
for (int i = 0; i < buffer.length(); i++) {
cleaner.append(" ");
}
buffer.deleteCharAt(currentPos - 1);
currentPos--;
rewriteConsole(cleaner, true);
rewriteConsole(buffer, false);
}
} else if (next == DEL_CHAR) {
if (buffer.length() > 0 && currentPos >= 0 && currentPos < buffer.length()) {
StringBuffer cleaner = new StringBuffer();
for (int i = 0; i < buffer.length(); i++) {
cleaner.append(" ");
}
buffer.deleteCharAt(currentPos);
rewriteConsole(cleaner, true);
rewriteConsole(buffer, false);
}
} else if (next == HORIZONTAL_TAB_CHAR) {
StringBuffer cleaner = new StringBuffer();
for (int i = 0; i < buffer.length(); i++) {
cleaner.append(" ");
}
buffer = writeHint(buffer);
rewriteConsole(cleaner, true);
rewriteConsole(buffer, false);
currentPos = buffer.length();
} else {
if ((next > UNIT_SEPARATOR_CHAR && next < BACKSPACE_CHAR) || next > BACKSPACE_CHAR) {
StringBuffer cleaner = new StringBuffer();
for (int i = 0; i < buffer.length(); i++) {
cleaner.append(" ");
}
if (currentPos == buffer.length()) {
buffer.append((char) next);
} else {
buffer.insert(currentPos, (char) next);
}
currentPos++;
rewriteConsole(cleaner, true);
rewriteConsole(buffer, false);
} else {
outStream.println();
outStream.print(buffer);
}
}
historyNum = history.size();
hintedHistory = false;
}
}
consoleInput = buffer.toString();
history.remove(consoleInput);
history.add(consoleInput);
historyNum = history.size();
writeHistory(historyNum);
} catch (IOException e) {
return null;
}
if (consoleInput.equals("clear")) {
outStream.flush();
for (int i = 0; i < 150; i++) {
outStream.println();
}
outStream.print("\r");
outStream.print("orientdb> ");
return readLine();
} else {
return consoleInput;
}
}
private void writeHistory(int historyNum) throws IOException {
if (historyNum <= MAX_HISTORY_ENTRIES) {
File historyFile = getHistoryFile(false);
BufferedWriter writer = new BufferedWriter(new FileWriter(historyFile));
try {
for (String historyEntry : history) {
writer.write(historyEntry);
writer.newLine();
}
} finally {
writer.flush();
writer.close();
}
} else {
File historyFile = getHistoryFile(false);
BufferedWriter writer = new BufferedWriter(new FileWriter(historyFile));
try {
for (String historyEntry : history.subList(historyNum - MAX_HISTORY_ENTRIES - 1, historyNum - 1)) {
writer.write(historyEntry);
writer.newLine();
}
} finally {
writer.flush();
writer.close();
}
}
}
private StringBuffer writeHint(StringBuffer buffer) {
List<String> suggestions = new ArrayList<String>();
for (Method method : console.getConsoleMethods().keySet()) {
String command = OConsoleApplication.getClearName(method.getName());
if (command.startsWith(buffer.toString())) {
suggestions.add(command);
}
}
if (suggestions.size() > 1) {
StringBuffer hintBuffer = new StringBuffer();
String[] bufferComponents = buffer.toString().split(" ");
String[] suggestionComponents;
Set<String> bufferPart = new HashSet<String>();
String suggestionPart = null;
boolean appendSpace = true;
for (String suggestion : suggestions) {
suggestionComponents = suggestion.split(" ");
hintBuffer.append("* " + suggestion + " ");
hintBuffer.append("\n");
suggestionPart = "";
if (bufferComponents.length == 0 || buffer.length() == 0) {
suggestionPart = null;
} else if (bufferComponents.length == 1) {
bufferPart.add(suggestionComponents[0]);
if (bufferPart.size() > 1) {
suggestionPart = bufferComponents[0];
appendSpace = false;
} else {
suggestionPart = suggestionComponents[0];
}
} else {
bufferPart.add(suggestionComponents[bufferComponents.length - 1]);
if (bufferPart.size() > 1) {
for (int i = 0; i < bufferComponents.length; i++) {
suggestionPart += bufferComponents[i];
if (i < (bufferComponents.length - 1)) {
suggestionPart += " ";
}
appendSpace = false;
}
} else {
for (int i = 0; i < suggestionComponents.length; i++) {
suggestionPart += suggestionComponents[i] + " ";
}
}
}
}
if (suggestionPart != null) {
buffer = new StringBuffer();
buffer.append(suggestionPart);
if (appendSpace) {
buffer.append(" ");
}
}
hintBuffer.append("-----------------------------\n");
rewriteHintConsole(hintBuffer);
} else if (suggestions.size() > 0) {
buffer = new StringBuffer();
buffer.append(suggestions.get(0));
buffer.append(" ");
}
return buffer;
}
public void setConsole(OConsoleApplication iConsole) {
console = iConsole;
}
public OConsoleApplication getConsole() {
return console;
}
private void rewriteConsole(StringBuffer buffer, boolean cleaner) {
outStream.print("\r");
outStream.print("orientdb> ");
if (currentPos < buffer.length() && buffer.length() > 0 && !cleaner) {
outStream.print("\033[0m" + buffer.substring(0, currentPos) + "\033[0;30;47m" + buffer.substring(currentPos, currentPos + 1)
+ "\033[0m" + buffer.substring(currentPos + 1) + "\033[0m");
} else {
outStream.print(buffer);
}
}
private void rewriteHintConsole(StringBuffer buffer) {
outStream.print("\r");
outStream.print(buffer);
}
private int getHintedHistoryIndexUp(int historyNum) {
if (historyBuffer != null && !historyBuffer.equals("")) {
for (int i = (historyNum - 1); i >= 0; i--) {
if (history.get(i).startsWith(historyBuffer)) {
return i;
}
}
return -1;
}
return historyNum > 0 ? (historyNum - 1) : 0;
}
private int getHintedHistoryIndexDown(int historyNum) throws IOException {
if (historyBuffer != null && !historyBuffer.equals("")) {
for (int i = historyNum + 1; i < history.size(); i++) {
if (history.get(i).startsWith(historyBuffer)) {
return i;
}
}
return history.size();
}
return historyNum < history.size() ? (historyNum + 1) : history.size();
}
private File getHistoryFile(boolean read) {
File file = new File(HISTORY_FILE_NAME);
if (!file.exists()) {
try {
file.createNewFile();
} catch (IOException ioe) {
OLogManager.instance().error(this, "Error creating history file.", ioe, "");
}
} else if (!read) {
file.delete();
try {
file.createNewFile();
} catch (IOException ioe) {
OLogManager.instance().error(this, "Error creating history file.", ioe, "");
}
}
return file;
}
} | 0true
| commons_src_main_java_com_orientechnologies_common_console_TTYConsoleReader.java |
2,612 | public final class UTFEncoderDecoder {
private static final int STRING_CHUNK_SIZE = 16 * 1024;
private static final UTFEncoderDecoder INSTANCE;
static {
INSTANCE = buildUTFUtil();
}
private final StringCreator stringCreator;
private final boolean hazelcastEnterpriseActive;
private UTFEncoderDecoder(boolean fastStringCreator) {
this(fastStringCreator ? buildFastStringCreator() : new DefaultStringCreator(), false);
}
private UTFEncoderDecoder(StringCreator stringCreator, boolean hazelcastEnterpriseActive) {
this.stringCreator = stringCreator;
this.hazelcastEnterpriseActive = hazelcastEnterpriseActive;
}
public StringCreator getStringCreator() {
return stringCreator;
}
public static void writeUTF(final DataOutput out, final String str, byte[] buffer) throws IOException {
INSTANCE.writeUTF0(out, str, buffer);
}
public static String readUTF(final DataInput in, byte[] buffer) throws IOException {
return INSTANCE.readUTF0(in, buffer);
}
public boolean isHazelcastEnterpriseActive() {
return hazelcastEnterpriseActive;
}
public void writeUTF0(final DataOutput out, final String str, byte[] buffer) throws IOException {
boolean isNull = str == null;
out.writeBoolean(isNull);
if (isNull) {
return;
}
int length = str.length();
out.writeInt(length);
if (length > 0) {
int chunkSize = (length / STRING_CHUNK_SIZE) + 1;
for (int i = 0; i < chunkSize; i++) {
int beginIndex = Math.max(0, i * STRING_CHUNK_SIZE - 1);
int endIndex = Math.min((i + 1) * STRING_CHUNK_SIZE - 1, length);
writeShortUTF(out, str, beginIndex, endIndex, buffer);
}
}
}
private void writeShortUTF(final DataOutput out,
final String str,
final int beginIndex,
final int endIndex,
byte[] buffer) throws IOException {
int utfLength = 0;
int c = 0;
int count = 0;
/* use charAt instead of copying String to char array */
for (int i = beginIndex; i < endIndex; i++) {
c = str.charAt(i);
if ((c >= 0x0001) && (c <= 0x007F)) {
utfLength++;
} else if (c > 0x07FF) {
utfLength += 3;
} else {
utfLength += 2;
}
}
if (utfLength > 65535) {
throw new UTFDataFormatException("encoded string too long:"
+ utfLength + " bytes");
}
out.writeShort(utfLength);
int i;
for (i = beginIndex; i < endIndex; i++) {
c = str.charAt(i);
if (!((c >= 0x0001) && (c <= 0x007F))) {
break;
}
buffering(buffer, count++, (byte) c, out);
}
for (; i < endIndex; i++) {
c = str.charAt(i);
if ((c >= 0x0001) && (c <= 0x007F)) {
buffering(buffer, count++, (byte) c, out);
} else if (c > 0x07FF) {
buffering(buffer, count++, (byte) (0xE0 | ((c >> 12) & 0x0F)), out);
buffering(buffer, count++, (byte) (0x80 | ((c >> 6) & 0x3F)), out);
buffering(buffer, count++, (byte) (0x80 | ((c) & 0x3F)), out);
} else {
buffering(buffer, count++, (byte) (0xC0 | ((c >> 6) & 0x1F)), out);
buffering(buffer, count++, (byte) (0x80 | ((c) & 0x3F)), out);
}
}
int length = count % buffer.length;
out.write(buffer, 0, length == 0 ? buffer.length : length);
}
public String readUTF0(final DataInput in, byte[] buffer) throws IOException {
boolean isNull = in.readBoolean();
if (isNull) {
return null;
}
int length = in.readInt();
final char[] data = new char[length];
if (length > 0) {
int chunkSize = length / STRING_CHUNK_SIZE + 1;
for (int i = 0; i < chunkSize; i++) {
int beginIndex = Math.max(0, i * STRING_CHUNK_SIZE - 1);
int endIndex = Math.min((i + 1) * STRING_CHUNK_SIZE - 1, length);
readShortUTF(in, data, beginIndex, endIndex, buffer);
}
}
return stringCreator.buildString(data);
}
private void readShortUTF(final DataInput in, final char[] data,
final int beginIndex, final int endIndex,
byte[] buffer) throws IOException {
final int utflen = in.readShort();
int c = 0;
int char2 = 0;
int char3 = 0;
int count = 0;
int charArrCount = beginIndex;
int lastCount = -1;
while (count < utflen) {
c = buffered(buffer, count, utflen, in) & 0xff;
if (c > 127) {
break;
}
lastCount = count;
count++;
data[charArrCount++] = (char) c;
}
while (count < utflen) {
if (lastCount > -1 && lastCount < count) {
c = buffered(buffer, count, utflen, in) & 0xff;
}
switch (c >> 4) {
case 0:
case 1:
case 2:
case 3:
case 4:
case 5:
case 6:
case 7:
/* 0xxxxxxx */
lastCount = count;
count++;
data[charArrCount++] = (char) c;
break;
case 12:
case 13:
/* 110x xxxx 10xx xxxx */
lastCount = count++;
if (count + 1 > utflen) {
throw new UTFDataFormatException("malformed input: partial character at end");
}
char2 = buffered(buffer, count++, utflen, in);
if ((char2 & 0xC0) != 0x80) {
throw new UTFDataFormatException("malformed input around byte " + count);
}
data[charArrCount++] = (char) (((c & 0x1F) << 6) | (char2 & 0x3F));
break;
case 14:
/* 1110 xxxx 10xx xxxx 10xx xxxx */
lastCount = count++;
if (count + 2 > utflen) {
throw new UTFDataFormatException("malformed input: partial character at end");
}
char2 = buffered(buffer, count++, utflen, in);
char3 = buffered(buffer, count++, utflen, in);
if (((char2 & 0xC0) != 0x80) || ((char3 & 0xC0) != 0x80)) {
throw new UTFDataFormatException("malformed input around byte " + (count - 1));
}
data[charArrCount++] = (char) (((c & 0x0F) << 12) | ((char2 & 0x3F) << 6) | ((char3 & 0x3F) << 0));
break;
default:
/* 10xx xxxx, 1111 xxxx */
throw new UTFDataFormatException("malformed input around byte " + count);
}
}
}
private void buffering(byte[] buffer, int pos, byte value, DataOutput out) throws IOException {
int innerPos = pos % buffer.length;
if (pos > 0 && innerPos == 0) {
out.write(buffer, 0, buffer.length);
}
buffer[innerPos] = value;
}
private byte buffered(byte[] buffer, int pos, int utfLenght, DataInput in) throws IOException {
int innerPos = pos % buffer.length;
if (innerPos == 0) {
int length = Math.min(buffer.length, utfLenght - pos);
in.readFully(buffer, 0, length);
}
return buffer[innerPos];
}
public static boolean useOldStringConstructor() {
try {
Class<String> clazz = String.class;
clazz.getDeclaredConstructor(int.class, int.class, char[].class);
return true;
} catch (Throwable ignore) {
}
return false;
}
private static UTFEncoderDecoder buildUTFUtil() {
try {
Class<?> clazz = Class.forName("com.hazelcast.nio.utf8.EnterpriseStringCreator");
Method method = clazz.getDeclaredMethod("findBestStringCreator");
return new UTFEncoderDecoder((StringCreator) method.invoke(clazz), true);
} catch (Throwable t) {
}
boolean faststringEnabled = Boolean.parseBoolean(System.getProperty("hazelcast.nio.faststring", "true"));
return new UTFEncoderDecoder(faststringEnabled ? buildFastStringCreator() : new DefaultStringCreator(), false);
}
private static StringCreator buildFastStringCreator() {
try {
// Give access to the package private String constructor
Constructor<String> constructor = null;
if (UTFEncoderDecoder.useOldStringConstructor()) {
constructor = String.class.getDeclaredConstructor(int.class, int.class, char[].class);
} else {
constructor = String.class.getDeclaredConstructor(char[].class, boolean.class);
}
if (constructor != null) {
constructor.setAccessible(true);
return new FastStringCreator(constructor);
}
} catch (Throwable ignore) {
}
return null;
}
private static class DefaultStringCreator implements UTFEncoderDecoder.StringCreator {
@Override
public String buildString(char[] chars) {
return new String(chars);
}
}
private static class FastStringCreator implements UTFEncoderDecoder.StringCreator {
private final Constructor<String> constructor;
private final boolean useOldStringConstructor;
public FastStringCreator(Constructor<String> constructor) {
this.constructor = constructor;
this.useOldStringConstructor = constructor.getParameterTypes().length == 3;
}
@Override
public String buildString(char[] chars) {
try {
if (useOldStringConstructor) {
return constructor.newInstance(0, chars.length, chars);
} else {
return constructor.newInstance(chars, Boolean.TRUE);
}
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
public interface StringCreator {
String buildString(char[] chars);
}
} | 1no label
| hazelcast_src_main_java_com_hazelcast_nio_UTFEncoderDecoder.java |
112 | public interface OProgressListener {
public void onBegin(Object iTask, long iTotal);
public boolean onProgress(Object iTask, long iCounter, float iPercent);
public void onCompletition(Object iTask, boolean iSucceed);
} | 0true
| commons_src_main_java_com_orientechnologies_common_listener_OProgressListener.java |
1,449 | public static class ShardSnapshotStatus {
private State state;
private String nodeId;
private String reason;
private ShardSnapshotStatus() {
}
public ShardSnapshotStatus(String nodeId) {
this(nodeId, State.INIT);
}
public ShardSnapshotStatus(String nodeId, State state) {
this(nodeId, state, null);
}
public ShardSnapshotStatus(String nodeId, State state, String reason) {
this.nodeId = nodeId;
this.state = state;
this.reason = reason;
}
public State state() {
return state;
}
public String nodeId() {
return nodeId;
}
public String reason() {
return reason;
}
public static ShardSnapshotStatus readShardSnapshotStatus(StreamInput in) throws IOException {
ShardSnapshotStatus shardSnapshotStatus = new ShardSnapshotStatus();
shardSnapshotStatus.readFrom(in);
return shardSnapshotStatus;
}
public void readFrom(StreamInput in) throws IOException {
nodeId = in.readOptionalString();
state = State.fromValue(in.readByte());
reason = in.readOptionalString();
}
public void writeTo(StreamOutput out) throws IOException {
out.writeOptionalString(nodeId);
out.writeByte(state.value);
out.writeOptionalString(reason);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ShardSnapshotStatus status = (ShardSnapshotStatus) o;
if (nodeId != null ? !nodeId.equals(status.nodeId) : status.nodeId != null) return false;
if (reason != null ? !reason.equals(status.reason) : status.reason != null) return false;
if (state != status.state) return false;
return true;
}
@Override
public int hashCode() {
int result = state != null ? state.hashCode() : 0;
result = 31 * result + (nodeId != null ? nodeId.hashCode() : 0);
result = 31 * result + (reason != null ? reason.hashCode() : 0);
return result;
}
} | 0true
| src_main_java_org_elasticsearch_cluster_metadata_SnapshotMetaData.java |
69 | @SuppressWarnings("serial")
static final class MapReduceEntriesTask<K,V,U>
extends BulkTask<K,V,U> {
final Fun<Map.Entry<K,V>, ? extends U> transformer;
final BiFun<? super U, ? super U, ? extends U> reducer;
U result;
MapReduceEntriesTask<K,V,U> rights, nextRight;
MapReduceEntriesTask
(BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
MapReduceEntriesTask<K,V,U> nextRight,
Fun<Map.Entry<K,V>, ? extends U> transformer,
BiFun<? super U, ? super U, ? extends U> reducer) {
super(p, b, i, f, t); this.nextRight = nextRight;
this.transformer = transformer;
this.reducer = reducer;
}
public final U getRawResult() { return result; }
public final void compute() {
final Fun<Map.Entry<K,V>, ? extends U> transformer;
final BiFun<? super U, ? super U, ? extends U> reducer;
if ((transformer = this.transformer) != null &&
(reducer = this.reducer) != null) {
for (int i = baseIndex, f, h; batch > 0 &&
(h = ((f = baseLimit) + i) >>> 1) > i;) {
addToPendingCount(1);
(rights = new MapReduceEntriesTask<K,V,U>
(this, batch >>>= 1, baseLimit = h, f, tab,
rights, transformer, reducer)).fork();
}
U r = null;
for (Node<K,V> p; (p = advance()) != null; ) {
U u;
if ((u = transformer.apply(p)) != null)
r = (r == null) ? u : reducer.apply(r, u);
}
result = r;
CountedCompleter<?> c;
for (c = firstComplete(); c != null; c = c.nextComplete()) {
@SuppressWarnings("unchecked") MapReduceEntriesTask<K,V,U>
t = (MapReduceEntriesTask<K,V,U>)c,
s = t.rights;
while (s != null) {
U tr, sr;
if ((sr = s.result) != null)
t.result = (((tr = t.result) == null) ? sr :
reducer.apply(tr, sr));
s = t.rights = s.nextRight;
}
}
}
}
} | 0true
| src_main_java_jsr166e_ConcurrentHashMapV8.java |
1,599 | public class PersistencePackage implements Serializable {
private static final long serialVersionUID = 1L;
protected String ceilingEntityFullyQualifiedClassname;
protected String fetchTypeFullyQualifiedClassname;
protected PersistencePerspective persistencePerspective;
protected String[] customCriteria;
protected Entity entity;
protected String csrfToken;
protected Integer batchId;
protected Map<String, PersistencePackage> subPackages = new LinkedHashMap<String, PersistencePackage>();
protected boolean validateUnsubmittedProperties = true;
public PersistencePackage(String ceilingEntityFullyQualifiedClassname, Entity entity, PersistencePerspective persistencePerspective, String[] customCriteria, String csrfToken) {
this(ceilingEntityFullyQualifiedClassname, null, entity, persistencePerspective, customCriteria, csrfToken);
}
public PersistencePackage(String ceilingEntityFullyQualifiedClassname, String fetchTypeFullyQualifiedClassname, Entity entity, PersistencePerspective persistencePerspective, String[] customCriteria, String csrfToken) {
this.ceilingEntityFullyQualifiedClassname = ceilingEntityFullyQualifiedClassname;
this.fetchTypeFullyQualifiedClassname = fetchTypeFullyQualifiedClassname;
this.persistencePerspective = persistencePerspective;
this.entity = entity;
this.customCriteria = customCriteria;
this.csrfToken = csrfToken;
}
public PersistencePackage() {
//do nothing
}
public String getCeilingEntityFullyQualifiedClassname() {
return ceilingEntityFullyQualifiedClassname;
}
public void setCeilingEntityFullyQualifiedClassname(
String ceilingEntityFullyQualifiedClassname) {
this.ceilingEntityFullyQualifiedClassname = ceilingEntityFullyQualifiedClassname;
}
public PersistencePerspective getPersistencePerspective() {
return persistencePerspective;
}
public void setPersistencePerspective(
PersistencePerspective persistencePerspective) {
this.persistencePerspective = persistencePerspective;
}
public String[] getCustomCriteria() {
return customCriteria;
}
public void setCustomCriteria(String[] customCriteria) {
this.customCriteria = customCriteria;
}
public Entity getEntity() {
return entity;
}
public void setEntity(Entity entity) {
this.entity = entity;
}
public String getCsrfToken() {
return csrfToken;
}
public void setCsrfToken(String csrfToken) {
this.csrfToken = csrfToken;
}
public String getFetchTypeFullyQualifiedClassname() {
return fetchTypeFullyQualifiedClassname;
}
public void setFetchTypeFullyQualifiedClassname(String fetchTypeFullyQualifiedClassname) {
this.fetchTypeFullyQualifiedClassname = fetchTypeFullyQualifiedClassname;
}
public Integer getBatchId() {
return batchId;
}
public void setBatchId(Integer batchId) {
this.batchId = batchId;
}
public Map<String, PersistencePackage> getSubPackages() {
return subPackages;
}
public void setSubPackages(Map<String, PersistencePackage> subPackages) {
this.subPackages = subPackages;
}
public boolean isValidateUnsubmittedProperties() {
return validateUnsubmittedProperties;
}
public void setValidateUnsubmittedProperties(boolean validateUnsubmittedProperties) {
this.validateUnsubmittedProperties = validateUnsubmittedProperties;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof PersistencePackage)) return false;
PersistencePackage that = (PersistencePackage) o;
if (batchId != that.batchId) return false;
if (ceilingEntityFullyQualifiedClassname != null ? !ceilingEntityFullyQualifiedClassname.equals(that.ceilingEntityFullyQualifiedClassname) : that.ceilingEntityFullyQualifiedClassname != null)
return false;
if (!Arrays.equals(customCriteria, that.customCriteria)) return false;
if (entity != null ? !entity.equals(that.entity) : that.entity != null) return false;
if (fetchTypeFullyQualifiedClassname != null ? !fetchTypeFullyQualifiedClassname.equals(that.fetchTypeFullyQualifiedClassname) : that.fetchTypeFullyQualifiedClassname != null)
return false;
if (persistencePerspective != null ? !persistencePerspective.equals(that.persistencePerspective) : that.persistencePerspective != null)
return false;
return true;
}
@Override
public int hashCode() {
int result = ceilingEntityFullyQualifiedClassname != null ? ceilingEntityFullyQualifiedClassname.hashCode() : 0;
result = 31 * result + (fetchTypeFullyQualifiedClassname != null ? fetchTypeFullyQualifiedClassname.hashCode() : 0);
result = 31 * result + (persistencePerspective != null ? persistencePerspective.hashCode() : 0);
result = 31 * result + (customCriteria != null ? Arrays.hashCode(customCriteria) : 0);
result = 31 * result + (entity != null ? entity.hashCode() : 0);
result = 31 * result + batchId;
return result;
}
} | 1no label
| admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_dto_PersistencePackage.java |
874 | public class TransportSearchQueryThenFetchAction extends TransportSearchTypeAction {
@Inject
public TransportSearchQueryThenFetchAction(Settings settings, ThreadPool threadPool, ClusterService clusterService,
SearchServiceTransportAction searchService, SearchPhaseController searchPhaseController) {
super(settings, threadPool, clusterService, searchService, searchPhaseController);
}
@Override
protected void doExecute(SearchRequest searchRequest, ActionListener<SearchResponse> listener) {
new AsyncAction(searchRequest, listener).start();
}
private class AsyncAction extends BaseAsyncAction<QuerySearchResult> {
final AtomicArray<FetchSearchResult> fetchResults;
final AtomicArray<IntArrayList> docIdsToLoad;
private AsyncAction(SearchRequest request, ActionListener<SearchResponse> listener) {
super(request, listener);
fetchResults = new AtomicArray<FetchSearchResult>(firstResults.length());
docIdsToLoad = new AtomicArray<IntArrayList>(firstResults.length());
}
@Override
protected String firstPhaseName() {
return "query";
}
@Override
protected void sendExecuteFirstPhase(DiscoveryNode node, ShardSearchRequest request, SearchServiceListener<QuerySearchResult> listener) {
searchService.sendExecuteQuery(node, request, listener);
}
@Override
protected void moveToSecondPhase() {
sortedShardList = searchPhaseController.sortDocs(firstResults);
searchPhaseController.fillDocIdsToLoad(docIdsToLoad, sortedShardList);
if (docIdsToLoad.asList().isEmpty()) {
finishHim();
return;
}
final AtomicInteger counter = new AtomicInteger(docIdsToLoad.asList().size());
int localOperations = 0;
for (AtomicArray.Entry<IntArrayList> entry : docIdsToLoad.asList()) {
QuerySearchResult queryResult = firstResults.get(entry.index);
DiscoveryNode node = nodes.get(queryResult.shardTarget().nodeId());
if (node.id().equals(nodes.localNodeId())) {
localOperations++;
} else {
FetchSearchRequest fetchSearchRequest = new FetchSearchRequest(request, queryResult.id(), entry.value);
executeFetch(entry.index, queryResult.shardTarget(), counter, fetchSearchRequest, node);
}
}
if (localOperations > 0) {
if (request.operationThreading() == SearchOperationThreading.SINGLE_THREAD) {
threadPool.executor(ThreadPool.Names.SEARCH).execute(new Runnable() {
@Override
public void run() {
for (AtomicArray.Entry<IntArrayList> entry : docIdsToLoad.asList()) {
QuerySearchResult queryResult = firstResults.get(entry.index);
DiscoveryNode node = nodes.get(queryResult.shardTarget().nodeId());
if (node.id().equals(nodes.localNodeId())) {
FetchSearchRequest fetchSearchRequest = new FetchSearchRequest(request, queryResult.id(), entry.value);
executeFetch(entry.index, queryResult.shardTarget(), counter, fetchSearchRequest, node);
}
}
}
});
} else {
boolean localAsync = request.operationThreading() == SearchOperationThreading.THREAD_PER_SHARD;
for (final AtomicArray.Entry<IntArrayList> entry : docIdsToLoad.asList()) {
final QuerySearchResult queryResult = firstResults.get(entry.index);
final DiscoveryNode node = nodes.get(queryResult.shardTarget().nodeId());
if (node.id().equals(nodes.localNodeId())) {
final FetchSearchRequest fetchSearchRequest = new FetchSearchRequest(request, queryResult.id(), entry.value);
try {
if (localAsync) {
threadPool.executor(ThreadPool.Names.SEARCH).execute(new Runnable() {
@Override
public void run() {
executeFetch(entry.index, queryResult.shardTarget(), counter, fetchSearchRequest, node);
}
});
} else {
executeFetch(entry.index, queryResult.shardTarget(), counter, fetchSearchRequest, node);
}
} catch (Throwable t) {
onFetchFailure(t, fetchSearchRequest, entry.index, queryResult.shardTarget(), counter);
}
}
}
}
}
}
void executeFetch(final int shardIndex, final SearchShardTarget shardTarget, final AtomicInteger counter, final FetchSearchRequest fetchSearchRequest, DiscoveryNode node) {
searchService.sendExecuteFetch(node, fetchSearchRequest, new SearchServiceListener<FetchSearchResult>() {
@Override
public void onResult(FetchSearchResult result) {
result.shardTarget(shardTarget);
fetchResults.set(shardIndex, result);
if (counter.decrementAndGet() == 0) {
finishHim();
}
}
@Override
public void onFailure(Throwable t) {
onFetchFailure(t, fetchSearchRequest, shardIndex, shardTarget, counter);
}
});
}
void onFetchFailure(Throwable t, FetchSearchRequest fetchSearchRequest, int shardIndex, SearchShardTarget shardTarget, AtomicInteger counter) {
if (logger.isDebugEnabled()) {
logger.debug("[{}] Failed to execute fetch phase", t, fetchSearchRequest.id());
}
this.addShardFailure(shardIndex, shardTarget, t);
successulOps.decrementAndGet();
if (counter.decrementAndGet() == 0) {
finishHim();
}
}
void finishHim() {
try {
innerFinishHim();
} catch (Throwable e) {
ReduceSearchPhaseException failure = new ReduceSearchPhaseException("fetch", "", e, buildShardFailures());
if (logger.isDebugEnabled()) {
logger.debug("failed to reduce search", failure);
}
listener.onFailure(failure);
} finally {
releaseIrrelevantSearchContexts(firstResults, docIdsToLoad);
}
}
void innerFinishHim() throws Exception {
InternalSearchResponse internalResponse = searchPhaseController.merge(sortedShardList, firstResults, fetchResults);
String scrollId = null;
if (request.scroll() != null) {
scrollId = TransportSearchHelper.buildScrollId(request.searchType(), firstResults, null);
}
listener.onResponse(new SearchResponse(internalResponse, scrollId, expectedSuccessfulOps, successulOps.get(), buildTookInMillis(), buildShardFailures()));
}
}
} | 1no label
| src_main_java_org_elasticsearch_action_search_type_TransportSearchQueryThenFetchAction.java |
378 | .ackTimeout(request.timeout()), new ActionListener<RepositoriesService.RegisterRepositoryResponse>() {
@Override
public void onResponse(RepositoriesService.RegisterRepositoryResponse response) {
listener.onResponse(new PutRepositoryResponse(response.isAcknowledged()));
}
@Override
public void onFailure(Throwable e) {
listener.onFailure(e);
}
}); | 0true
| src_main_java_org_elasticsearch_action_admin_cluster_repositories_put_TransportPutRepositoryAction.java |
487 | public interface CookieUtils {
public final static String CUSTOMER_COOKIE_NAME = "customerId";
public abstract String getCookieValue(HttpServletRequest request, String cookieName);
public abstract void setCookieValue(HttpServletResponse response, String cookieName, String cookieValue, String path, Integer maxAge, Boolean isSecure);
public abstract void setCookieValue(HttpServletResponse response, String cookieName, String cookieValue);
public abstract void invalidateCookie(HttpServletResponse response, String cookieName);
} | 0true
| common_src_main_java_org_broadleafcommerce_common_security_util_CookieUtils.java |
252 | service.submit(runnable, selector, new ExecutionCallback() {
public void onResponse(Object response) {
responseLatch.countDown();
}
public void onFailure(Throwable t) {
}
}); | 0true
| hazelcast-client_src_test_java_com_hazelcast_client_executor_ClientExecutorServiceSubmitTest.java |
3,249 | public class AtomicReferencePermission extends InstancePermission {
private static final int READ = 0x4;
private static final int MODIFY = 0x8;
private static final int ALL = READ | MODIFY | CREATE | DESTROY;
public AtomicReferencePermission(String name, String... actions) {
super(name, actions);
}
@Override
protected int initMask(String[] actions) {
int mask = NONE;
for (String action : actions) {
if (ActionConstants.ACTION_ALL.equals(action)) {
return ALL;
}
if (ActionConstants.ACTION_CREATE.equals(action)) {
mask |= CREATE;
} else if (ActionConstants.ACTION_READ.equals(action)) {
mask |= READ;
} else if (ActionConstants.ACTION_MODIFY.equals(action)) {
mask |= MODIFY;
} else if (ActionConstants.ACTION_DESTROY.equals(action)) {
mask |= DESTROY;
}
}
return mask;
}
} | 1no label
| hazelcast_src_main_java_com_hazelcast_security_permission_AtomicReferencePermission.java |
294 | public interface ShardOperationFailedException extends Streamable, Serializable {
/**
* The index the operation failed on. Might return <tt>null</tt> if it can't be derived.
*/
String index();
/**
* The index the operation failed on. Might return <tt>-1</tt> if it can't be derived.
*/
int shardId();
/**
* The reason of the failure.
*/
String reason();
/**
* The status of the failure.
*/
RestStatus status();
} | 0true
| src_main_java_org_elasticsearch_action_ShardOperationFailedException.java |
90 | public interface StaticAssetStorageService {
StaticAssetStorage findStaticAssetStorageById(Long id);
/**
* @deprecated Use createStaticAssetStorageFromFile instead.
* @return
*/
StaticAssetStorage create();
StaticAssetStorage readStaticAssetStorageByStaticAssetId(Long id);
StaticAssetStorage save(StaticAssetStorage assetStorage);
void delete(StaticAssetStorage assetStorage);
/**
* @deprecated Use createStaticAssetStorageFromFile instead.
*
* @param uploadedFile
* @return
* @throws IOException
*/
Blob createBlob(MultipartFile uploadedFile) throws IOException;
/**
* Stores the file on the filesystem by performing an MD5 hash of the
* the staticAsset.fullUrl.
*
* To ensure that files can be stored and accessed in an efficient manner, the
* system creates directories based on the characters in the hash.
*
* For example, if the URL is /product/myproductimage.jpg, then the MD5 would be
* 35ec52a8dbd8cf3e2c650495001fe55f resulting in the following file on the filesystem
* {assetFileSystemPath}/35/ec/myproductimage.jpg.
*
* If there is a "siteId" in the BroadleafRequestContext then the site is also distributed
* using a similar algorithm but the system attempts to keep images for sites in their own
* directory resulting in an extra two folders required to reach any given product. So, for
* site with id 125, the system will MD5 "/site-125" in order to build the URL string. "/site-125" has an md5
* string of "7fde295edac6ca7f85d0368ea741b241".
*
* So, in this case with the above product URL in site125, the full URL on the filesystem
* will be:
*
* {assetFileSystemPath}/7f/site-125/35/ec/myproductimage.jpg.
*
* This algorithm has the following benefits:
* - Efficient file-system storage with
* - Balanced tree of files that supports 10 million files
*
* If support for more files is needed, implementors should consider one of the following approaches:
* 1. Overriding the maxGeneratedFileSystemDirectories property from its default of 2 to 3
* 2. Overriding this method to introduce an alternate approach
*
* @param fullUrl The URL used to represent an asset for which a name on the fileSystem is desired.
* @param useSharedPath If false, the system will generate a path using {@link Site} information if available.
*
* @return
*/
String generateStorageFileName(String fullUrl, boolean useSharedPath);
/**
* By default, delegates a call to {@link #generateStorageFileName(String)} using <code>staticAsset.getFullUrl()</code>
* as the passed in argument.
*
* @param staticAsset StaticAsset for which a filename is desired.
* @param useSharedPath If false, the system will generate a path using {@link Site} information if available.
* @return
*/
String generateStorageFileName(StaticAsset staticAsset, boolean useSharedPath);
Map<String, String> getCacheFileModel(String fullUrl, SandBox sandBox, Map<String, String> parameterMap) throws Exception;
/**
* Persists the file being based in according to the staticAsset's StorageType.
*
* @param file
* @param id
* @throws IOException
*/
void createStaticAssetStorageFromFile(MultipartFile file, StaticAsset staticAsset) throws IOException;
} | 0true
| admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_file_service_StaticAssetStorageService.java |
1,028 | transportService.sendRequest(node, transportShardAction, new ShardSingleOperationRequest(request, shardRouting.id()), new BaseTransportResponseHandler<Response>() {
@Override
public Response newInstance() {
return newResponse();
}
@Override
public String executor() {
return ThreadPool.Names.SAME;
}
@Override
public void handleResponse(final Response response) {
listener.onResponse(response);
}
@Override
public void handleException(TransportException exp) {
onFailure(shardRouting, exp);
}
}); | 0true
| src_main_java_org_elasticsearch_action_support_single_shard_TransportShardSingleOperationAction.java |
1,009 | transportService.sendRequest(node, transportShardAction, new ShardSingleOperationRequest(request, shard.id()), new BaseTransportResponseHandler<Response>() {
@Override
public Response newInstance() {
return newResponse();
}
@Override
public String executor() {
return ThreadPool.Names.SAME;
}
@Override
public void handleResponse(final Response response) {
listener.onResponse(response);
}
@Override
public void handleException(TransportException exp) {
onFailure(shard, exp);
}
}); | 0true
| src_main_java_org_elasticsearch_action_support_single_custom_TransportSingleCustomOperationAction.java |
494 | public class CloseIndexAction extends IndicesAction<CloseIndexRequest, CloseIndexResponse, CloseIndexRequestBuilder> {
public static final CloseIndexAction INSTANCE = new CloseIndexAction();
public static final String NAME = "indices/close";
private CloseIndexAction() {
super(NAME);
}
@Override
public CloseIndexResponse newResponse() {
return new CloseIndexResponse();
}
@Override
public CloseIndexRequestBuilder newRequestBuilder(IndicesAdminClient client) {
return new CloseIndexRequestBuilder(client);
}
} | 0true
| src_main_java_org_elasticsearch_action_admin_indices_close_CloseIndexAction.java |
485 | while (makeDbCall(databaseDocumentTxOne, new ODbRelatedCall<Boolean>() {
public Boolean call() {
return iteratorOne.hasNext();
}
})) { | 0true
| core_src_main_java_com_orientechnologies_orient_core_db_tool_ODatabaseCompare.java |
5,811 | public class HighlightBuilder implements ToXContent {
private List<Field> fields;
private String tagsSchema;
private String[] preTags;
private String[] postTags;
private String order;
private String encoder;
private Boolean requireFieldMatch;
private String highlighterType;
private String fragmenter;
private QueryBuilder highlightQuery;
private Integer noMatchSize;
private Integer phraseLimit;
private Map<String, Object> options;
private Boolean forceSource;
/**
* Adds a field to be highlighted with default fragment size of 100 characters, and
* default number of fragments of 5 using the default encoder
*
* @param name The field to highlight
*/
public HighlightBuilder field(String name) {
if (fields == null) {
fields = newArrayList();
}
fields.add(new Field(name));
return this;
}
/**
* Adds a field to be highlighted with a provided fragment size (in characters), and
* default number of fragments of 5.
*
* @param name The field to highlight
* @param fragmentSize The size of a fragment in characters
*/
public HighlightBuilder field(String name, int fragmentSize) {
if (fields == null) {
fields = newArrayList();
}
fields.add(new Field(name).fragmentSize(fragmentSize));
return this;
}
/**
* Adds a field to be highlighted with a provided fragment size (in characters), and
* a provided (maximum) number of fragments.
*
* @param name The field to highlight
* @param fragmentSize The size of a fragment in characters
* @param numberOfFragments The (maximum) number of fragments
*/
public HighlightBuilder field(String name, int fragmentSize, int numberOfFragments) {
if (fields == null) {
fields = newArrayList();
}
fields.add(new Field(name).fragmentSize(fragmentSize).numOfFragments(numberOfFragments));
return this;
}
/**
* Adds a field to be highlighted with a provided fragment size (in characters), and
* a provided (maximum) number of fragments.
*
* @param name The field to highlight
* @param fragmentSize The size of a fragment in characters
* @param numberOfFragments The (maximum) number of fragments
* @param fragmentOffset The offset from the start of the fragment to the start of the highlight
*/
public HighlightBuilder field(String name, int fragmentSize, int numberOfFragments, int fragmentOffset) {
if (fields == null) {
fields = newArrayList();
}
fields.add(new Field(name).fragmentSize(fragmentSize).numOfFragments(numberOfFragments)
.fragmentOffset(fragmentOffset));
return this;
}
public HighlightBuilder field(Field field) {
if (fields == null) {
fields = newArrayList();
}
fields.add(field);
return this;
}
/**
* Set a tag scheme that encapsulates a built in pre and post tags. The allows schemes
* are <tt>styled</tt> and <tt>default</tt>.
*
* @param schemaName The tag scheme name
*/
public HighlightBuilder tagsSchema(String schemaName) {
this.tagsSchema = schemaName;
return this;
}
/**
* Set encoder for the highlighting
* are <tt>styled</tt> and <tt>default</tt>.
*
* @param encoder name
*/
public HighlightBuilder encoder(String encoder) {
this.encoder = encoder;
return this;
}
/**
* Explicitly set the pre tags that will be used for highlighting.
*/
public HighlightBuilder preTags(String... preTags) {
this.preTags = preTags;
return this;
}
/**
* Explicitly set the post tags that will be used for highlighting.
*/
public HighlightBuilder postTags(String... postTags) {
this.postTags = postTags;
return this;
}
/**
* The order of fragments per field. By default, ordered by the order in the
* highlighted text. Can be <tt>score</tt>, which then it will be ordered
* by score of the fragments.
*/
public HighlightBuilder order(String order) {
this.order = order;
return this;
}
public HighlightBuilder requireFieldMatch(boolean requireFieldMatch) {
this.requireFieldMatch = requireFieldMatch;
return this;
}
/**
* Set type of highlighter to use. Supported types
* are <tt>highlighter</tt>, <tt>fast-vector-highlighter</tt> and <tt>postings-highlighter</tt>.
*/
public HighlightBuilder highlighterType(String highlighterType) {
this.highlighterType = highlighterType;
return this;
}
/**
* Sets what fragmenter to use to break up text that is eligible for highlighting.
* This option is only applicable when using plain / normal highlighter.
*/
public HighlightBuilder fragmenter(String fragmenter) {
this.fragmenter = fragmenter;
return this;
}
/**
* Sets a query to be used for highlighting all fields instead of the search query.
*/
public HighlightBuilder highlightQuery(QueryBuilder highlightQuery) {
this.highlightQuery = highlightQuery;
return this;
}
/**
* Sets the size of the fragment to return from the beginning of the field if there are no matches to
* highlight and the field doesn't also define noMatchSize.
* @param noMatchSize integer to set or null to leave out of request. default is null.
* @return this for chaining
*/
public HighlightBuilder noMatchSize(Integer noMatchSize) {
this.noMatchSize = noMatchSize;
return this;
}
/**
* Sets the maximum number of phrases the fvh will consider if the field doesn't also define phraseLimit.
* @param phraseLimit maximum number of phrases the fvh will consider
* @return this for chaining
*/
public HighlightBuilder phraseLimit(Integer phraseLimit) {
this.phraseLimit = phraseLimit;
return this;
}
/**
* Allows to set custom options for custom highlighters.
*/
public HighlightBuilder options(Map<String, Object> options) {
this.options = options;
return this;
}
/**
* Forces the highlighting to highlight fields based on the source even if fields are stored separately.
*/
public HighlightBuilder forceSource(boolean forceSource) {
this.forceSource = forceSource;
return this;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject("highlight");
if (tagsSchema != null) {
builder.field("tags_schema", tagsSchema);
}
if (preTags != null) {
builder.array("pre_tags", preTags);
}
if (postTags != null) {
builder.array("post_tags", postTags);
}
if (order != null) {
builder.field("order", order);
}
if (encoder != null) {
builder.field("encoder", encoder);
}
if (requireFieldMatch != null) {
builder.field("require_field_match", requireFieldMatch);
}
if (highlighterType != null) {
builder.field("type", highlighterType);
}
if (fragmenter != null) {
builder.field("fragmenter", fragmenter);
}
if (highlightQuery != null) {
builder.field("highlight_query", highlightQuery);
}
if (noMatchSize != null) {
builder.field("no_match_size", noMatchSize);
}
if (phraseLimit != null) {
builder.field("phrase_limit", phraseLimit);
}
if (options != null && options.size() > 0) {
builder.field("options", options);
}
if (forceSource != null) {
builder.field("force_source", forceSource);
}
if (fields != null) {
builder.startObject("fields");
for (Field field : fields) {
builder.startObject(field.name());
if (field.preTags != null) {
builder.field("pre_tags", field.preTags);
}
if (field.postTags != null) {
builder.field("post_tags", field.postTags);
}
if (field.fragmentSize != -1) {
builder.field("fragment_size", field.fragmentSize);
}
if (field.numOfFragments != -1) {
builder.field("number_of_fragments", field.numOfFragments);
}
if (field.fragmentOffset != -1) {
builder.field("fragment_offset", field.fragmentOffset);
}
if (field.highlightFilter != null) {
builder.field("highlight_filter", field.highlightFilter);
}
if (field.order != null) {
builder.field("order", field.order);
}
if (field.requireFieldMatch != null) {
builder.field("require_field_match", field.requireFieldMatch);
}
if (field.boundaryMaxScan != -1) {
builder.field("boundary_max_scan", field.boundaryMaxScan);
}
if (field.boundaryChars != null) {
builder.field("boundary_chars", field.boundaryChars);
}
if (field.highlighterType != null) {
builder.field("type", field.highlighterType);
}
if (field.fragmenter != null) {
builder.field("fragmenter", field.fragmenter);
}
if (field.highlightQuery != null) {
builder.field("highlight_query", field.highlightQuery);
}
if (field.noMatchSize != null) {
builder.field("no_match_size", field.noMatchSize);
}
if (field.matchedFields != null) {
builder.field("matched_fields", field.matchedFields);
}
if (field.phraseLimit != null) {
builder.field("phrase_limit", field.phraseLimit);
}
if (field.options != null && field.options.size() > 0) {
builder.field("options", field.options);
}
if (field.forceSource != null) {
builder.field("force_source", forceSource);
}
builder.endObject();
}
builder.endObject();
}
builder.endObject();
return builder;
}
public static class Field {
final String name;
String[] preTags;
String[] postTags;
int fragmentSize = -1;
int fragmentOffset = -1;
int numOfFragments = -1;
Boolean highlightFilter;
String order;
Boolean requireFieldMatch;
int boundaryMaxScan = -1;
char[] boundaryChars;
String highlighterType;
String fragmenter;
QueryBuilder highlightQuery;
Integer noMatchSize;
String[] matchedFields;
Integer phraseLimit;
Map<String, Object> options;
Boolean forceSource;
public Field(String name) {
this.name = name;
}
public String name() {
return name;
}
/**
* Explicitly set the pre tags for this field that will be used for highlighting.
* This overrides global settings set by {@link HighlightBuilder#preTags(String...)}.
*/
public Field preTags(String... preTags) {
this.preTags = preTags;
return this;
}
/**
* Explicitly set the post tags for this field that will be used for highlighting.
* This overrides global settings set by {@link HighlightBuilder#postTags(String...)}.
*/
public Field postTags(String... postTags) {
this.postTags = postTags;
return this;
}
public Field fragmentSize(int fragmentSize) {
this.fragmentSize = fragmentSize;
return this;
}
public Field fragmentOffset(int fragmentOffset) {
this.fragmentOffset = fragmentOffset;
return this;
}
public Field numOfFragments(int numOfFragments) {
this.numOfFragments = numOfFragments;
return this;
}
public Field highlightFilter(boolean highlightFilter) {
this.highlightFilter = highlightFilter;
return this;
}
/**
* The order of fragments per field. By default, ordered by the order in the
* highlighted text. Can be <tt>score</tt>, which then it will be ordered
* by score of the fragments.
* This overrides global settings set by {@link HighlightBuilder#order(String)}.
*/
public Field order(String order) {
this.order = order;
return this;
}
public Field requireFieldMatch(boolean requireFieldMatch) {
this.requireFieldMatch = requireFieldMatch;
return this;
}
public Field boundaryMaxScan(int boundaryMaxScan) {
this.boundaryMaxScan = boundaryMaxScan;
return this;
}
public Field boundaryChars(char[] boundaryChars) {
this.boundaryChars = boundaryChars;
return this;
}
/**
* Set type of highlighter to use. Supported types
* are <tt>highlighter</tt>, <tt>fast-vector-highlighter</tt> nad <tt>postings-highlighter</tt>.
* This overrides global settings set by {@link HighlightBuilder#highlighterType(String)}.
*/
public Field highlighterType(String highlighterType) {
this.highlighterType = highlighterType;
return this;
}
/**
* Sets what fragmenter to use to break up text that is eligible for highlighting.
* This option is only applicable when using plain / normal highlighter.
* This overrides global settings set by {@link HighlightBuilder#fragmenter(String)}.
*/
public Field fragmenter(String fragmenter) {
this.fragmenter = fragmenter;
return this;
}
/**
* Sets a query to use for highlighting this field instead of the search query.
*/
public Field highlightQuery(QueryBuilder highlightQuery) {
this.highlightQuery = highlightQuery;
return this;
}
/**
* Sets the size of the fragment to return from the beginning of the field if there are no matches to
* highlight.
* @param noMatchSize integer to set or null to leave out of request. default is null.
* @return this for chaining
*/
public Field noMatchSize(Integer noMatchSize) {
this.noMatchSize = noMatchSize;
return this;
}
/**
* Allows to set custom options for custom highlighters.
* This overrides global settings set by {@link HighlightBuilder#options(Map<String, Object>)}.
*/
public Field options(Map<String, Object> options) {
this.options = options;
return this;
}
/**
* Set the matched fields to highlight against this field data. Default to null, meaning just
* the named field. If you provide a list of fields here then don't forget to include name as
* it is not automatically included.
*/
public Field matchedFields(String... matchedFields) {
this.matchedFields = matchedFields;
return this;
}
/**
* Sets the maximum number of phrases the fvh will consider.
* @param phraseLimit maximum number of phrases the fvh will consider
* @return this for chaining
*/
public Field phraseLimit(Integer phraseLimit) {
this.phraseLimit = phraseLimit;
return this;
}
/**
* Forces the highlighting to highlight this field based on the source even if this field is stored separately.
*/
public Field forceSource(boolean forceSource) {
this.forceSource = forceSource;
return this;
}
}
} | 1no label
| src_main_java_org_elasticsearch_search_highlight_HighlightBuilder.java |
433 | public enum OperationType {
NONDESTRUCTIVEREMOVE,
BASIC,
ADORNEDTARGETLIST,
MAP
} | 0true
| common_src_main_java_org_broadleafcommerce_common_presentation_client_OperationType.java |
183 | static final class Node {
final boolean isData; // false if this is a request node
volatile Object item; // initially non-null if isData; CASed to match
volatile Node next;
volatile Thread waiter; // null until waiting
// CAS methods for fields
final boolean casNext(Node cmp, Node val) {
return UNSAFE.compareAndSwapObject(this, nextOffset, cmp, val);
}
final boolean casItem(Object cmp, Object val) {
// assert cmp == null || cmp.getClass() != Node.class;
return UNSAFE.compareAndSwapObject(this, itemOffset, cmp, val);
}
/**
* Constructs a new node. Uses relaxed write because item can
* only be seen after publication via casNext.
*/
Node(Object item, boolean isData) {
UNSAFE.putObject(this, itemOffset, item); // relaxed write
this.isData = isData;
}
/**
* Links node to itself to avoid garbage retention. Called
* only after CASing head field, so uses relaxed write.
*/
final void forgetNext() {
UNSAFE.putObject(this, nextOffset, this);
}
/**
* Sets item to self and waiter to null, to avoid garbage
* retention after matching or cancelling. Uses relaxed writes
* because order is already constrained in the only calling
* contexts: item is forgotten only after volatile/atomic
* mechanics that extract items. Similarly, clearing waiter
* follows either CAS or return from park (if ever parked;
* else we don't care).
*/
final void forgetContents() {
UNSAFE.putObject(this, itemOffset, this);
UNSAFE.putObject(this, waiterOffset, null);
}
/**
* Returns true if this node has been matched, including the
* case of artificial matches due to cancellation.
*/
final boolean isMatched() {
Object x = item;
return (x == this) || ((x == null) == isData);
}
/**
* Returns true if this is an unmatched request node.
*/
final boolean isUnmatchedRequest() {
return !isData && item == null;
}
/**
* Returns true if a node with the given mode cannot be
* appended to this node because this node is unmatched and
* has opposite data mode.
*/
final boolean cannotPrecede(boolean haveData) {
boolean d = isData;
Object x;
return d != haveData && (x = item) != this && (x != null) == d;
}
/**
* Tries to artificially match a data node -- used by remove.
*/
final boolean tryMatchData() {
// assert isData;
Object x = item;
if (x != null && x != this && casItem(x, null)) {
LockSupport.unpark(waiter);
return true;
}
return false;
}
private static final long serialVersionUID = -3375979862319811754L;
// Unsafe mechanics
private static final sun.misc.Unsafe UNSAFE;
private static final long itemOffset;
private static final long nextOffset;
private static final long waiterOffset;
static {
try {
UNSAFE = getUnsafe();
Class<?> k = Node.class;
itemOffset = UNSAFE.objectFieldOffset
(k.getDeclaredField("item"));
nextOffset = UNSAFE.objectFieldOffset
(k.getDeclaredField("next"));
waiterOffset = UNSAFE.objectFieldOffset
(k.getDeclaredField("waiter"));
} catch (Exception e) {
throw new Error(e);
}
}
} | 0true
| src_main_java_jsr166y_LinkedTransferQueue.java |
1,909 | public class QueryResult implements DataSerializable {
private List<Integer> partitionIds;
private final Set<QueryResultEntry> result = new LinkedHashSet<QueryResultEntry>();
public List<Integer> getPartitionIds() {
return partitionIds;
}
public void setPartitionIds(List<Integer> partitionIds) {
this.partitionIds = partitionIds;
}
public void add(QueryResultEntry resultEntry) {
result.add(resultEntry);
}
public Set<QueryResultEntry> getResult() {
return result;
}
public void writeData(ObjectDataOutput out) throws IOException {
int psize = (partitionIds == null) ? 0 : partitionIds.size();
out.writeInt(psize);
for (int i = 0; i < psize; i++) {
out.writeInt(partitionIds.get(i));
}
int rsize = result.size();
out.writeInt(rsize);
if (rsize > 0) {
Iterator<QueryResultEntry> iterator = result.iterator();
for (int i = 0; i < rsize; i++) {
final QueryResultEntryImpl queryableEntry = (QueryResultEntryImpl) iterator.next();
queryableEntry.writeData(out);
}
}
}
public void readData(ObjectDataInput in) throws IOException {
int psize = in.readInt();
if (psize > 0) {
partitionIds = new ArrayList<Integer>(psize);
for (int i = 0; i < psize; i++) {
partitionIds.add(in.readInt());
}
}
int rsize = in.readInt();
if (rsize > 0) {
for (int i = 0; i < rsize; i++) {
final QueryResultEntryImpl resultEntry = new QueryResultEntryImpl();
resultEntry.readData(in);
result.add(resultEntry);
}
}
}
} | 1no label
| hazelcast_src_main_java_com_hazelcast_map_QueryResult.java |
98 | public enum Type {
POINT, BOX, CIRCLE, POLYGON;
} | 0true
| titan-core_src_main_java_com_thinkaurelius_titan_core_attribute_Geoshape.java |
1,527 | @Component("blRelatedProductProcessor")
public class RelatedProductProcessor extends AbstractModelVariableModifierProcessor {
@Resource(name = "blRelatedProductsService")
protected RelatedProductsService relatedProductsService;
/**
* Sets the name of this processor to be used in Thymeleaf template
*/
public RelatedProductProcessor() {
super("related_products");
}
@Override
public int getPrecedence() {
return 10000;
}
@Override
/**
* Controller method for the processor that readies the service call and adds the results to the model.
*/
protected void modifyModelAttributes(Arguments arguments, Element element) {
List<? extends PromotableProduct> relatedProducts = relatedProductsService.findRelatedProducts(buildDTO(arguments, element));
addToModel(arguments, getRelatedProductsResultVar(element), relatedProducts);
addToModel(arguments, getProductsResultVar(element), convertRelatedProductsToProducts(relatedProducts));
}
protected List<Product> convertRelatedProductsToProducts(List<? extends PromotableProduct> relatedProducts) {
List<Product> products = new ArrayList<Product>();
if (relatedProducts != null) {
for (PromotableProduct product : relatedProducts) {
products.add(product.getRelatedProduct());
}
}
return products;
}
private String getRelatedProductsResultVar(Element element) {
String resultVar = element.getAttributeValue("relatedProductsResultVar");
if (resultVar == null) {
resultVar = "relatedProducts";
}
return resultVar;
}
private String getProductsResultVar(Element element) {
String resultVar = element.getAttributeValue("productsResultVar");
if (resultVar == null) {
resultVar = "products";
}
return resultVar;
}
private RelatedProductDTO buildDTO(Arguments args, Element element) {
RelatedProductDTO relatedProductDTO = new RelatedProductDTO();
String productIdStr = element.getAttributeValue("productId");
String categoryIdStr = element.getAttributeValue("categoryId");
String quantityStr = element.getAttributeValue("quantity");
String typeStr = element.getAttributeValue("type");
if (productIdStr != null) {
Object productId = StandardExpressionProcessor.processExpression(args, productIdStr);
if (productId instanceof BigDecimal) {
productId = new Long(((BigDecimal) productId).toPlainString());
}
relatedProductDTO.setProductId((Long) productId);
}
if (categoryIdStr != null) {
Object categoryId = StandardExpressionProcessor.processExpression(args, categoryIdStr);
if (categoryId instanceof BigDecimal) {
categoryId = new Long(((BigDecimal) categoryId).toPlainString());
}
relatedProductDTO.setCategoryId((Long) categoryId);
}
if (quantityStr != null) {
relatedProductDTO.setQuantity(((BigDecimal) StandardExpressionProcessor.processExpression(args, quantityStr)).intValue());
}
if (typeStr != null && RelatedProductTypeEnum.getInstance(typeStr) != null) {
relatedProductDTO.setType(RelatedProductTypeEnum.getInstance(typeStr));
}
if ("false".equalsIgnoreCase(element.getAttributeValue("cumulativeResults"))) {
relatedProductDTO.setCumulativeResults(false);
}
return relatedProductDTO;
}
} | 1no label
| core_broadleaf-framework-web_src_main_java_org_broadleafcommerce_core_web_processor_RelatedProductProcessor.java |
1,903 | public class QueryEventFilter extends EntryEventFilter {
Predicate predicate = null;
public QueryEventFilter(boolean includeValue, Data key, Predicate predicate) {
super(includeValue, key);
this.predicate = predicate;
}
public QueryEventFilter() {
super();
}
public Object getPredicate() {
return predicate;
}
public boolean eval(Object arg) {
final QueryEntry entry = (QueryEntry) arg;
final Data keyData = entry.getKeyData();
return (key == null || key.equals(keyData)) && predicate.apply((Map.Entry)arg);
}
@Override
public void writeData(ObjectDataOutput out) throws IOException {
super.writeData(out);
out.writeObject(predicate);
}
@Override
public void readData(ObjectDataInput in) throws IOException {
super.readData(in);
predicate = in.readObject();
}
} | 1no label
| hazelcast_src_main_java_com_hazelcast_map_QueryEventFilter.java |
263 | public class SloppyMathTests extends ElasticsearchTestCase {
@Test
public void testAccuracy() {
for (double lat1 = -89; lat1 <= 89; lat1+=1) {
final double lon1 = randomLongitude();
for (double i = -180; i <= 180; i+=1) {
final double lon2 = i;
final double lat2 = randomLatitude();
assertAccurate(lat1, lon1, lat2, lon2);
}
}
}
@Test
public void testSloppyMath() {
assertThat(GeoDistance.SLOPPY_ARC.calculate(-46.645, -171.057, -46.644, -171.058, DistanceUnit.METERS), closeTo(134.87709, maxError(134.87709)));
assertThat(GeoDistance.SLOPPY_ARC.calculate(-77.912, -81.173, -77.912, -81.171, DistanceUnit.METERS), closeTo(46.57161, maxError(46.57161)));
assertThat(GeoDistance.SLOPPY_ARC.calculate(65.75, -20.708, 65.75, -20.709, DistanceUnit.METERS), closeTo(45.66996, maxError(45.66996)));
assertThat(GeoDistance.SLOPPY_ARC.calculate(-86.9, 53.738, -86.9, 53.741, DistanceUnit.METERS), closeTo(18.03998, maxError(18.03998)));
assertThat(GeoDistance.SLOPPY_ARC.calculate(89.041, 115.93, 89.04, 115.946, DistanceUnit.METERS), closeTo(115.11711, maxError(115.11711)));
testSloppyMath(DistanceUnit.METERS, 0.01, 5, 45, 90);
testSloppyMath(DistanceUnit.KILOMETERS, 0.01, 5, 45, 90);
testSloppyMath(DistanceUnit.INCH, 0.01, 5, 45, 90);
testSloppyMath(DistanceUnit.MILES, 0.01, 5, 45, 90);
}
private static double maxError(double distance) {
return distance / 1000.0;
}
private void testSloppyMath(DistanceUnit unit, double...deltaDeg) {
final double lat1 = randomLatitude();
final double lon1 = randomLongitude();
logger.info("testing SloppyMath with {} at \"{}, {}\"", unit, lat1, lon1);
for (int test = 0; test < deltaDeg.length; test++) {
for (int i = 0; i < 100; i++) {
// crop pole areas, sine we now there the function
// is not accurate around lat(89°, 90°) and lat(-90°, -89°)
final double lat2 = Math.max(-89.0, Math.min(+89.0, lat1 + (randomDouble() - 0.5) * 2 * deltaDeg[test]));
final double lon2 = lon1 + (randomDouble() - 0.5) * 2 * deltaDeg[test];
final double accurate = GeoDistance.ARC.calculate(lat1, lon1, lat2, lon2, unit);
final double dist = GeoDistance.SLOPPY_ARC.calculate(lat1, lon1, lat2, lon2, unit);
assertThat("distance between("+lat1+", "+lon1+") and ("+lat2+", "+lon2+"))", dist, closeTo(accurate, maxError(accurate)));
}
}
}
private static void assertAccurate(double lat1, double lon1, double lat2, double lon2) {
double accurate = GeoDistance.ARC.calculate(lat1, lon1, lat2, lon2, DistanceUnit.METERS);
double sloppy = GeoDistance.SLOPPY_ARC.calculate(lat1, lon1, lat2, lon2, DistanceUnit.METERS);
assertThat("distance between("+lat1+", "+lon1+") and ("+lat2+", "+lon2+"))", sloppy, closeTo(accurate, maxError(accurate)));
}
private static final double randomLatitude() {
// crop pole areas, sine we now there the function
// is not accurate around lat(89°, 90°) and lat(-90°, -89°)
return (getRandom().nextDouble() - 0.5) * 178.0;
}
private static final double randomLongitude() {
return (getRandom().nextDouble() - 0.5) * 360.0;
}
} | 0true
| src_test_java_org_apache_lucene_util_SloppyMathTests.java |
1,685 | @Entity
@Inheritance(strategy = InheritanceType.JOINED)
@Table(name = "BLC_ADMIN_ROLE")
@Cache(usage = CacheConcurrencyStrategy.READ_WRITE, region="blStandardElements")
@AdminPresentationClass(friendlyName = "AdminRoleImpl_baseAdminRole")
public class AdminRoleImpl implements AdminRole, AdminMainEntity {
private static final Log LOG = LogFactory.getLog(AdminRoleImpl.class);
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(generator = "AdminRoleId")
@GenericGenerator(
name="AdminRoleId",
strategy="org.broadleafcommerce.common.persistence.IdOverrideTableGenerator",
parameters = {
@Parameter(name="segment_value", value="AdminRoleImpl"),
@Parameter(name="entity_name", value="org.broadleafcommerce.openadmin.server.security.domain.AdminRoleImpl")
}
)
@Column(name = "ADMIN_ROLE_ID")
@AdminPresentation(friendlyName = "AdminRoleImpl_Admin_Role_ID", group = "AdminRoleImpl_Primary_Key", visibility = VisibilityEnum.HIDDEN_ALL)
protected Long id;
@Column(name = "NAME", nullable=false)
@AdminPresentation(friendlyName = "AdminRoleImpl_Name", order = 1, group = "AdminRoleImpl_Role",
validationConfigurations = { @ValidationConfiguration(
validationImplementation = "blRegexPropertyValidator",
configurationItems = { @ConfigurationItem(itemName = "regularExpression", itemValue = "ROLE_.+"),
@ConfigurationItem(itemName = ConfigurationItem.ERROR_MESSAGE, itemValue = "roleNameError") }
) })
protected String name;
@Column(name = "DESCRIPTION", nullable=false)
@AdminPresentation(friendlyName = "AdminRoleImpl_Description", order=2, group = "AdminRoleImpl_Role", prominent=true)
protected String description;
/** All users that have this role */
@ManyToMany(fetch = FetchType.LAZY, targetEntity = AdminUserImpl.class)
@JoinTable(name = "BLC_ADMIN_USER_ROLE_XREF", joinColumns = @JoinColumn(name = "ADMIN_ROLE_ID", referencedColumnName = "ADMIN_ROLE_ID"), inverseJoinColumns = @JoinColumn(name = "ADMIN_USER_ID", referencedColumnName = "ADMIN_USER_ID"))
@Cache(usage = CacheConcurrencyStrategy.READ_WRITE, region="blStandardElements")
@BatchSize(size = 50)
protected Set<AdminUser> allUsers = new HashSet<AdminUser>();
@ManyToMany(fetch = FetchType.LAZY, targetEntity = AdminPermissionImpl.class)
@JoinTable(name = "BLC_ADMIN_ROLE_PERMISSION_XREF", joinColumns = @JoinColumn(name = "ADMIN_ROLE_ID", referencedColumnName = "ADMIN_ROLE_ID"), inverseJoinColumns = @JoinColumn(name = "ADMIN_PERMISSION_ID", referencedColumnName = "ADMIN_PERMISSION_ID"))
@Cache(usage = CacheConcurrencyStrategy.READ_WRITE, region="blStandardElements")
@BatchSize(size = 50)
@AdminPresentationCollection(addType = AddMethodType.LOOKUP, friendlyName = "permissionListTitle", manyToField = "allRoles",
operationTypes = @AdminPresentationOperationTypes(removeType = OperationType.NONDESTRUCTIVEREMOVE))
protected Set<AdminPermission> allPermissions= new HashSet<AdminPermission>();
@Override
public Set<AdminPermission> getAllPermissions() {
return allPermissions;
}
@Override
public Long getId() {
return id;
}
@Override
public void setId(Long id) {
this.id = id;
}
@Override
public String getName() {
return name;
}
public Set<AdminUser> getAllUsers() {
return allUsers;
}
@Override
public void setName(String name) {
this.name = name;
}
@Override
public String getDescription() {
return description;
}
@Override
public void setDescription(String description) {
this.description = description;
}
public void setAllPermissions(Set<AdminPermission> allPermissions) {
this.allPermissions = allPermissions;
}
public void checkCloneable(AdminRole adminRole) throws CloneNotSupportedException, SecurityException, NoSuchMethodException {
Method cloneMethod = adminRole.getClass().getMethod("clone", new Class[]{});
if (cloneMethod.getDeclaringClass().getName().startsWith("org.broadleafcommerce") && !adminRole.getClass().getName().startsWith("org.broadleafcommerce")) {
//subclass is not implementing the clone method
throw new CloneNotSupportedException("Custom extensions and implementations should implement clone.");
}
}
@Override
public AdminRole clone() {
AdminRole clone;
try {
clone = (AdminRole) Class.forName(this.getClass().getName()).newInstance();
try {
checkCloneable(clone);
} catch (CloneNotSupportedException e) {
LOG.warn("Clone implementation missing in inheritance hierarchy outside of Broadleaf: " + clone.getClass().getName(), e);
}
clone.setId(id);
clone.setName(name);
clone.setDescription(description);
//don't clone the allUsers collection, as it would cause a recursion
if (allPermissions != null) {
for (AdminPermission permission : allPermissions) {
AdminPermission permissionClone = permission.clone();
clone.getAllPermissions().add(permissionClone);
}
}
} catch (Exception e) {
throw new RuntimeException(e);
}
return clone;
}
@Override
public String getMainEntityName() {
return getName();
}
} | 1no label
| admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_server_security_domain_AdminRoleImpl.java |
57 | public class OLockException extends OException {
private static final long serialVersionUID = 2215169397325875189L;
public OLockException(String iMessage) {
super(iMessage);
// OProfiler.getInstance().updateCounter("system.concurrency.OLockException", +1);
}
public OLockException(String iMessage, Exception iException) {
super(iMessage, iException);
// OProfiler.getInstance().updateCounter("system.concurrency.OLockException", +1);
}
} | 0true
| commons_src_main_java_com_orientechnologies_common_concur_lock_OLockException.java |
1,574 | public class AdornedTargetCollectionMetadata extends CollectionMetadata {
private boolean ignoreAdornedProperties;
private String parentObjectClass;
private String[] maintainedAdornedTargetFields = {};
private String[] gridVisibleFields = {};
public boolean isIgnoreAdornedProperties() {
return ignoreAdornedProperties;
}
public void setIgnoreAdornedProperties(boolean ignoreAdornedProperties) {
this.ignoreAdornedProperties = ignoreAdornedProperties;
}
public String getParentObjectClass() {
return parentObjectClass;
}
public void setParentObjectClass(String parentObjectClass) {
this.parentObjectClass = parentObjectClass;
}
public String[] getGridVisibleFields() {
return gridVisibleFields;
}
public void setGridVisibleFields(String[] gridVisibleFields) {
this.gridVisibleFields = gridVisibleFields;
}
public String[] getMaintainedAdornedTargetFields() {
return maintainedAdornedTargetFields;
}
public void setMaintainedAdornedTargetFields(String[] maintainedAdornedTargetFields) {
this.maintainedAdornedTargetFields = maintainedAdornedTargetFields;
}
@Override
public void accept(MetadataVisitor visitor) {
visitor.visit(this);
}
@Override
protected FieldMetadata populate(FieldMetadata metadata) {
((AdornedTargetCollectionMetadata) metadata).ignoreAdornedProperties = ignoreAdornedProperties;
((AdornedTargetCollectionMetadata) metadata).parentObjectClass = parentObjectClass;
((AdornedTargetCollectionMetadata) metadata).maintainedAdornedTargetFields = maintainedAdornedTargetFields;
((AdornedTargetCollectionMetadata) metadata).gridVisibleFields = gridVisibleFields;
return super.populate(metadata);
}
@Override
public FieldMetadata cloneFieldMetadata() {
AdornedTargetCollectionMetadata metadata = new AdornedTargetCollectionMetadata();
return populate(metadata);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof AdornedTargetCollectionMetadata)) return false;
if (!super.equals(o)) return false;
AdornedTargetCollectionMetadata metadata = (AdornedTargetCollectionMetadata) o;
if (ignoreAdornedProperties != metadata.ignoreAdornedProperties) return false;
if (!Arrays.equals(gridVisibleFields, metadata.gridVisibleFields)) return false;
if (!Arrays.equals(maintainedAdornedTargetFields, metadata.maintainedAdornedTargetFields)) return false;
if (parentObjectClass != null ? !parentObjectClass.equals(metadata.parentObjectClass) : metadata.parentObjectClass != null)
return false;
return true;
}
@Override
public int hashCode() {
int result = super.hashCode();
result = 31 * result + (ignoreAdornedProperties ? 1 : 0);
result = 31 * result + (parentObjectClass != null ? parentObjectClass.hashCode() : 0);
result = 31 * result + (maintainedAdornedTargetFields != null ? Arrays.hashCode(maintainedAdornedTargetFields) : 0);
result = 31 * result + (gridVisibleFields != null ? Arrays.hashCode(gridVisibleFields) : 0);
return result;
}
} | 1no label
| admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_dto_AdornedTargetCollectionMetadata.java |
552 | public class ClientTxnMultiMapProxy<K, V> extends ClientTxnProxy implements TransactionalMultiMap<K, V> {
public ClientTxnMultiMapProxy(String name, TransactionContextProxy proxy) {
super(name, proxy);
}
public boolean put(K key, V value) throws TransactionException {
TxnMultiMapPutRequest request = new TxnMultiMapPutRequest(getName(), toData(key), toData(value));
final Boolean result = invoke(request);
return result;
}
public Collection<V> get(K key) {
TxnMultiMapGetRequest request = new TxnMultiMapGetRequest(getName(), toData(key));
final PortableCollection portableCollection = invoke(request);
final Collection<Data> collection = portableCollection.getCollection();
Collection<V> coll;
if (collection instanceof List) {
coll = new ArrayList<V>(collection.size());
} else {
coll = new HashSet<V>(collection.size());
}
for (Data data : collection) {
coll.add((V) toObject(data));
}
return coll;
}
public boolean remove(Object key, Object value) {
TxnMultiMapRemoveRequest request = new TxnMultiMapRemoveRequest(getName(), toData(key), toData(value));
Boolean result = invoke(request);
return result;
}
public Collection<V> remove(Object key) {
TxnMultiMapRemoveAllRequest request = new TxnMultiMapRemoveAllRequest(getName(), toData(key));
PortableCollection portableCollection = invoke(request);
final Collection<Data> collection = portableCollection.getCollection();
Collection<V> coll;
if (collection instanceof List) {
coll = new ArrayList<V>(collection.size());
} else {
coll = new HashSet<V>(collection.size());
}
for (Data data : collection) {
coll.add((V) toObject(data));
}
return coll;
}
public int valueCount(K key) {
TxnMultiMapValueCountRequest request = new TxnMultiMapValueCountRequest(getName(), toData(key));
Integer result = invoke(request);
return result;
}
public int size() {
TxnMultiMapSizeRequest request = new TxnMultiMapSizeRequest(getName());
Integer result = invoke(request);
return result;
}
public String getName() {
return (String) getId();
}
@Override
public String getServiceName() {
return MultiMapService.SERVICE_NAME;
}
void onDestroy() {
}
} | 0true
| hazelcast-client_src_main_java_com_hazelcast_client_txn_proxy_ClientTxnMultiMapProxy.java |
935 | threadPool.executor(executor).execute(new Runnable() {
@Override
public void run() {
int shardIndex = -1;
for (final ShardIterator shardIt : shardsIts) {
shardIndex++;
final ShardRouting shard = shardIt.firstOrNull();
if (shard != null) {
if (shard.currentNodeId().equals(nodes.localNodeId())) {
performOperation(shardIt, shardIndex, false);
}
}
}
}
}); | 0true
| src_main_java_org_elasticsearch_action_support_broadcast_TransportBroadcastOperationAction.java |
143 | private static class LoaderConfiguration {
private static final Logger log =
LoggerFactory.getLogger(LoaderConfiguration.class);
private final boolean enabled;
private final List<ClassLoader> preferredLoaders;
private final boolean useCallerLoader;
private final boolean useThreadContextLoader;
private volatile boolean allInit = false;
private volatile boolean standardInit = false;
private LoaderConfiguration(boolean enabled, List<ClassLoader> preferredLoaders,
boolean useCallerLoader, boolean useThreadContextLoader) {
this.enabled = enabled;
this.preferredLoaders = preferredLoaders;
this.useCallerLoader = useCallerLoader;
this.useThreadContextLoader = useThreadContextLoader;
}
private LoaderConfiguration() {
enabled = getEnabledByDefault();
preferredLoaders = ImmutableList.of(ReflectiveConfigOptionLoader.class.getClassLoader());
useCallerLoader = true;
useThreadContextLoader = true;
}
private boolean getEnabledByDefault() {
List<String> sources =
Arrays.asList(System.getProperty(SYS_PROP_NAME), System.getenv(ENV_VAR_NAME));
for (String setting : sources) {
if (null != setting) {
boolean enabled = setting.equalsIgnoreCase("true");
log.debug("Option loading enabled={}", enabled);
return enabled;
}
}
log.debug("Option loading enabled by default");
return true;
}
LoaderConfiguration setEnabled(boolean b) {
return new LoaderConfiguration(b, preferredLoaders, useCallerLoader, useThreadContextLoader);
}
LoaderConfiguration setPreferredClassLoaders(List<ClassLoader> cl) {
return new LoaderConfiguration(enabled, cl, useCallerLoader, useThreadContextLoader);
}
LoaderConfiguration setUseCallerLoader(boolean b) {
return new LoaderConfiguration(enabled, preferredLoaders, b, useThreadContextLoader);
}
LoaderConfiguration setUseThreadContextLoader(boolean b) {
return new LoaderConfiguration(enabled, preferredLoaders, useCallerLoader, b);
}
} | 0true
| titan-core_src_main_java_com_thinkaurelius_titan_core_util_ReflectiveConfigOptionLoader.java |
1,766 | public class GeoUtils {
/** Earth ellipsoid major axis defined by WGS 84 in meters */
public static final double EARTH_SEMI_MAJOR_AXIS = 6378137.0; // meters (WGS 84)
/** Earth ellipsoid minor axis defined by WGS 84 in meters */
public static final double EARTH_SEMI_MINOR_AXIS = 6356752.314245; // meters (WGS 84)
/** Earth mean radius defined by WGS 84 in meters */
public static final double EARTH_MEAN_RADIUS = 6371008.7714D; // meters (WGS 84)
/** Earth axis ratio defined by WGS 84 (0.996647189335) */
public static final double EARTH_AXIS_RATIO = EARTH_SEMI_MINOR_AXIS / EARTH_SEMI_MAJOR_AXIS;
/** Earth ellipsoid equator length in meters */
public static final double EARTH_EQUATOR = 2*Math.PI * EARTH_SEMI_MAJOR_AXIS;
/** Earth ellipsoid polar distance in meters */
public static final double EARTH_POLAR_DISTANCE = Math.PI * EARTH_SEMI_MINOR_AXIS;
/**
* Calculate the width (in meters) of geohash cells at a specific level
* @param level geohash level must be greater or equal to zero
* @return the width of cells at level in meters
*/
public static double geoHashCellWidth(int level) {
assert level>=0;
// Geohash cells are split into 32 cells at each level. the grid
// alternates at each level between a 8x4 and a 4x8 grid
return EARTH_EQUATOR / (1L<<((((level+1)/2)*3) + ((level/2)*2)));
}
/**
* Calculate the width (in meters) of quadtree cells at a specific level
* @param level quadtree level must be greater or equal to zero
* @return the width of cells at level in meters
*/
public static double quadTreeCellWidth(int level) {
assert level >=0;
return EARTH_EQUATOR / (1L<<level);
}
/**
* Calculate the height (in meters) of geohash cells at a specific level
* @param level geohash level must be greater or equal to zero
* @return the height of cells at level in meters
*/
public static double geoHashCellHeight(int level) {
assert level>=0;
// Geohash cells are split into 32 cells at each level. the grid
// alternates at each level between a 8x4 and a 4x8 grid
return EARTH_POLAR_DISTANCE / (1L<<((((level+1)/2)*2) + ((level/2)*3)));
}
/**
* Calculate the height (in meters) of quadtree cells at a specific level
* @param level quadtree level must be greater or equal to zero
* @return the height of cells at level in meters
*/
public static double quadTreeCellHeight(int level) {
assert level>=0;
return EARTH_POLAR_DISTANCE / (1L<<level);
}
/**
* Calculate the size (in meters) of geohash cells at a specific level
* @param level geohash level must be greater or equal to zero
* @return the size of cells at level in meters
*/
public static double geoHashCellSize(int level) {
assert level>=0;
final double w = geoHashCellWidth(level);
final double h = geoHashCellHeight(level);
return Math.sqrt(w*w + h*h);
}
/**
* Calculate the size (in meters) of quadtree cells at a specific level
* @param level quadtree level must be greater or equal to zero
* @return the size of cells at level in meters
*/
public static double quadTreeCellSize(int level) {
assert level>=0;
return Math.sqrt(EARTH_POLAR_DISTANCE*EARTH_POLAR_DISTANCE + EARTH_EQUATOR*EARTH_EQUATOR) / (1L<<level);
}
/**
* Calculate the number of levels needed for a specific precision. Quadtree
* cells will not exceed the specified size (diagonal) of the precision.
* @param meters Maximum size of cells in meters (must greater than zero)
* @return levels need to achieve precision
*/
public static int quadTreeLevelsForPrecision(double meters) {
assert meters >= 0;
if(meters == 0) {
return QuadPrefixTree.MAX_LEVELS_POSSIBLE;
} else {
final double ratio = 1+(EARTH_POLAR_DISTANCE / EARTH_EQUATOR); // cell ratio
final double width = Math.sqrt((meters*meters)/(ratio*ratio)); // convert to cell width
final long part = Math.round(Math.ceil(EARTH_EQUATOR / width));
final int level = Long.SIZE - Long.numberOfLeadingZeros(part)-1; // (log_2)
return (part<=(1l<<level)) ?level :(level+1); // adjust level
}
}
/**
* Calculate the number of levels needed for a specific precision. QuadTree
* cells will not exceed the specified size (diagonal) of the precision.
* @param distance Maximum size of cells as unit string (must greater or equal to zero)
* @return levels need to achieve precision
*/
public static int quadTreeLevelsForPrecision(String distance) {
return quadTreeLevelsForPrecision(DistanceUnit.METERS.parse(distance, DistanceUnit.DEFAULT));
}
/**
* Calculate the number of levels needed for a specific precision. GeoHash
* cells will not exceed the specified size (diagonal) of the precision.
* @param meters Maximum size of cells in meters (must greater or equal to zero)
* @return levels need to achieve precision
*/
public static int geoHashLevelsForPrecision(double meters) {
assert meters >= 0;
if(meters == 0) {
return GeohashPrefixTree.getMaxLevelsPossible();
} else {
final double ratio = 1+(EARTH_POLAR_DISTANCE / EARTH_EQUATOR); // cell ratio
final double width = Math.sqrt((meters*meters)/(ratio*ratio)); // convert to cell width
final double part = Math.ceil(EARTH_EQUATOR / width);
if(part == 1)
return 1;
final int bits = (int)Math.round(Math.ceil(Math.log(part) / Math.log(2)));
final int full = bits / 5; // number of 5 bit subdivisions
final int left = bits - full*5; // bit representing the last level
final int even = full + (left>0?1:0); // number of even levels
final int odd = full + (left>3?1:0); // number of odd levels
return even+odd;
}
}
/**
* Calculate the number of levels needed for a specific precision. GeoHash
* cells will not exceed the specified size (diagonal) of the precision.
* @param distance Maximum size of cells as unit string (must greater or equal to zero)
* @return levels need to achieve precision
*/
public static int geoHashLevelsForPrecision(String distance) {
return geoHashLevelsForPrecision(DistanceUnit.METERS.parse(distance, DistanceUnit.DEFAULT));
}
/**
* Normalize longitude to lie within the -180 (exclusive) to 180 (inclusive) range.
*
* @param lon Longitude to normalize
* @return The normalized longitude.
*/
public static double normalizeLon(double lon) {
return centeredModulus(lon, 360);
}
/**
* Normalize latitude to lie within the -90 to 90 (both inclusive) range.
* <p/>
* Note: You should not normalize longitude and latitude separately,
* because when normalizing latitude it may be necessary to
* add a shift of 180° in the longitude.
* For this purpose, you should call the
* {@link #normalizePoint(GeoPoint)} function.
*
* @param lat Latitude to normalize
* @return The normalized latitude.
* @see #normalizePoint(GeoPoint)
*/
public static double normalizeLat(double lat) {
lat = centeredModulus(lat, 360);
if (lat < -90) {
lat = -180 - lat;
} else if (lat > 90) {
lat = 180 - lat;
}
return lat;
}
/**
* Normalize the geo {@code Point} for its coordinates to lie within their
* respective normalized ranges.
* <p/>
* Note: A shift of 180° is applied in the longitude if necessary,
* in order to normalize properly the latitude.
*
* @param point The point to normalize in-place.
*/
public static void normalizePoint(GeoPoint point) {
normalizePoint(point, true, true);
}
/**
* Normalize the geo {@code Point} for the given coordinates to lie within
* their respective normalized ranges.
* <p/>
* You can control which coordinate gets normalized with the two flags.
* <p/>
* Note: A shift of 180° is applied in the longitude if necessary,
* in order to normalize properly the latitude.
* If normalizing latitude but not longitude, it is assumed that
* the longitude is in the form x+k*360, with x in ]-180;180],
* and k is meaningful to the application.
* Therefore x will be adjusted while keeping k preserved.
*
* @param point The point to normalize in-place.
* @param normLat Whether to normalize latitude or leave it as is.
* @param normLon Whether to normalize longitude.
*/
public static void normalizePoint(GeoPoint point, boolean normLat, boolean normLon) {
double lat = point.lat();
double lon = point.lon();
normLat = normLat && (lat>90 || lat <= -90);
normLon = normLon && (lon>180 || lon <= -180);
if (normLat) {
lat = centeredModulus(lat, 360);
boolean shift = true;
if (lat < -90) {
lat = -180 - lat;
} else if (lat > 90) {
lat = 180 - lat;
} else {
// No need to shift the longitude, and the latitude is normalized
shift = false;
}
if (shift) {
if (normLon) {
lon += 180;
} else {
// Longitude won't be normalized,
// keep it in the form x+k*360 (with x in ]-180;180])
// by only changing x, assuming k is meaningful for the user application.
lon += normalizeLon(lon) > 0 ? -180 : 180;
}
}
}
if (normLon) {
lon = centeredModulus(lon, 360);
}
point.reset(lat, lon);
}
private static double centeredModulus(double dividend, double divisor) {
double rtn = dividend % divisor;
if (rtn <= 0) {
rtn += divisor;
}
if (rtn > divisor / 2) {
rtn -= divisor;
}
return rtn;
}
} | 1no label
| src_main_java_org_elasticsearch_common_geo_GeoUtils.java |
556 | public interface ORID extends OIdentifiable, OSerializableStream {
public static final char PREFIX = '#';
public static final char SEPARATOR = ':';
public static final int CLUSTER_MAX = 32767;
public static final int CLUSTER_ID_INVALID = -1;
public static final OClusterPosition CLUSTER_POS_INVALID = OClusterPosition.INVALID_POSITION;
public int getClusterId();
public OClusterPosition getClusterPosition();
public void reset();
public boolean isPersistent();
public boolean isValid();
public boolean isNew();
public boolean isTemporary();
public ORID copy();
public String next();
public ORID nextRid();
public int toStream(OutputStream iStream) throws IOException;
public StringBuilder toString(StringBuilder iBuffer);
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_id_ORID.java |
43 | proposals.add(new ICompletionProposal() {
@Override
public Point getSelection(IDocument document) {
return null;
}
@Override
public Image getImage() {
return CeylonResources.VERSION;
}
@Override
public String getDisplayString() {
return d.getVersion();
}
@Override
public IContextInformation getContextInformation() {
return null;
}
@Override
public String getAdditionalProposalInfo() {
return "Repository: " + d.getOrigin();
}
@Override
public void apply(IDocument document) {
try {
document.replace(selection.x, selection.y,
d.getVersion());
}
catch (BadLocationException e) {
e.printStackTrace();
}
linkedModeModel.exit(ILinkedModeListener.UPDATE_CARET);
}
}); | 0true
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_complete_ModuleCompletions.java |
112 | @RunWith(HazelcastParallelClassRunner.class)
@Category(QuickTest.class)
public class ClientNearCacheTest {
private static final int MAX_CACHE_SIZE = 100;
private static final int MAX_TTL_SECONDS = 3;
private static final int MAX_IDLE_SECONDS = 1;
private static final String NEAR_CACHE_WITH_NO_INVALIDATION = "NEAR_CACHE_WITH_NO_INVALIDATION";
private static final String NEAR_CACHE_WITH_MAX_SIZE = "NEAR_CACHE_WITH_MAX_SIZE";
private static final String NEAR_CACHE_WITH_TTL = "NEAR_CACHE_WITH_TTL";
private static final String NEAR_CACHE_WITH_IDLE = "NEAR_CACHE_WITH_IDLE";
private static final String NEAR_CACHE_WITH_INVALIDATION = "NEAR_CACHE_WITH_INVALIDATION";
private static HazelcastInstance h1;
private static HazelcastInstance h2;
private static HazelcastInstance client;
@BeforeClass
public static void setup() throws Exception {
h1 = Hazelcast.newHazelcastInstance();
h2 = Hazelcast.newHazelcastInstance();
ClientConfig clientConfig = new ClientConfig();
NearCacheConfig basicConfigNoInvalidation = new NearCacheConfig();
basicConfigNoInvalidation.setInMemoryFormat(InMemoryFormat.OBJECT);
basicConfigNoInvalidation.setName(NEAR_CACHE_WITH_NO_INVALIDATION + "*");
basicConfigNoInvalidation.setInvalidateOnChange(false);
clientConfig.addNearCacheConfig(basicConfigNoInvalidation);
NearCacheConfig maxSizeConfig = new NearCacheConfig();
maxSizeConfig.setMaxSize(MAX_CACHE_SIZE);
maxSizeConfig.setInvalidateOnChange(false);
maxSizeConfig.setName(NEAR_CACHE_WITH_MAX_SIZE + "*");
clientConfig.addNearCacheConfig(maxSizeConfig);
NearCacheConfig ttlConfig = new NearCacheConfig();
ttlConfig.setName(NEAR_CACHE_WITH_TTL + "*");
ttlConfig.setInvalidateOnChange(false);
ttlConfig.setTimeToLiveSeconds(MAX_TTL_SECONDS);
clientConfig.addNearCacheConfig(ttlConfig);
NearCacheConfig idleConfig = new NearCacheConfig();
idleConfig.setName(NEAR_CACHE_WITH_IDLE + "*");
idleConfig.setInvalidateOnChange(false);
idleConfig.setMaxIdleSeconds(MAX_IDLE_SECONDS);
clientConfig.addNearCacheConfig(idleConfig);
NearCacheConfig invalidateConfig = new NearCacheConfig();
invalidateConfig.setName(NEAR_CACHE_WITH_INVALIDATION + "*");
invalidateConfig.setInvalidateOnChange(true);
clientConfig.addNearCacheConfig(invalidateConfig);
client = HazelcastClient.newHazelcastClient(clientConfig);
}
@AfterClass
public static void cleanup() throws Exception {
HazelcastClient.shutdownAll();
Hazelcast.shutdownAll();
}
@Test
public void testNearCacheFasterThanGoingToTheCluster() {
final IMap map = client.getMap(randomMapName(NEAR_CACHE_WITH_INVALIDATION));
final int size = 2007;
for (int i = 0; i < size; i++) {
map.put(i, i);
}
long begin = System.currentTimeMillis();
for (int i = 0; i < size; i++) {
map.get(i);
}
long readFromClusterTime = System.currentTimeMillis() - begin;
begin = System.currentTimeMillis();
for (int i = 0; i < size; i++) {
map.get(i);
}
long readFromCacheTime = System.currentTimeMillis() - begin;
assertTrue("readFromCacheTime > readFromClusterTime", readFromCacheTime < readFromClusterTime);
}
@Test
public void testGetAllChecksNearCacheFirst() throws Exception {
final IMap map = client.getMap(randomMapName(NEAR_CACHE_WITH_NO_INVALIDATION));
final HashSet keys = new HashSet();
final int size = 1003;
for (int i = 0; i < size; i++) {
map.put(i, i);
keys.add(i);
}
//populate near cache
for (int i = 0; i < size; i++) {
map.get(i);
}
//getAll generates the near cache hits
map.getAll(keys);
NearCacheStats stats = map.getLocalMapStats().getNearCacheStats();
assertEquals(size, stats.getOwnedEntryCount());
assertEquals(size, stats.getHits());
}
@Test
public void testGetAllPopulatesNearCache() throws Exception {
final IMap map = client.getMap(randomMapName(NEAR_CACHE_WITH_NO_INVALIDATION));
final HashSet keys = new HashSet();
final int size = 1214;
for (int i = 0; i < size; i++) {
map.put(i, i);
keys.add(i);
}
//getAll populates near cache
map.getAll(keys);
NearCacheStats stats = map.getLocalMapStats().getNearCacheStats();
assertEquals(size, stats.getOwnedEntryCount());
}
@Test
public void testGetAsync() throws Exception {
final IMap map = client.getMap(randomMapName(NEAR_CACHE_WITH_NO_INVALIDATION));
int size = 1009;
for (int i = 0; i < size; i++) {
map.put(i, i);
}
//populate near cache
for (int i = 0; i < size; i++) {
map.get(i);
}
//generate near cache hits with async call
for (int i = 0; i < size; i++) {
Future async = map.getAsync(i);
async.get();
}
NearCacheStats stats = map.getLocalMapStats().getNearCacheStats();
assertEquals(size, stats.getOwnedEntryCount());
assertEquals(size, stats.getHits());
}
@Test
public void testGetAsyncPopulatesNearCache() throws Exception {
final IMap map = client.getMap(randomMapName(NEAR_CACHE_WITH_NO_INVALIDATION));
int size = 1239;
for (int i = 0; i < size; i++) {
map.put(i, i);
}
//populate near cache
for (int i = 0; i < size; i++) {
Future async = map.getAsync(i);
async.get();
}
//generate near cache hits with async call
for (int i = 0; i < size; i++) {
map.get(i);
}
NearCacheStats stats = map.getLocalMapStats().getNearCacheStats();
assertEquals(size, stats.getOwnedEntryCount());
assertEquals(size, stats.getHits());
}
@Test
public void testRemovedKeyValueNotInNearCache() throws Exception {
final IMap map = client.getMap(randomMapName(NEAR_CACHE_WITH_INVALIDATION));
int size = 1247;
for (int i = 0; i < size; i++) {
map.put(i, i);
}
//populate near cache
for (int i = 0; i < size; i++) {
map.get(i);
}
for (int i = 0; i < size; i++) {
map.remove(i);
assertNull(map.get(i));
}
}
@Test
public void testNearCachePopulatedAndHitsGenerated() throws Exception {
final IMap map = client.getMap(randomMapName(NEAR_CACHE_WITH_NO_INVALIDATION));
final int size = 1278;
for (int i = 0; i < size; i++) {
map.put(i, i);
map.get(i); //populate near cache
map.get(i); //generate near cache hits
}
NearCacheStats stats = map.getLocalMapStats().getNearCacheStats();
System.out.println("stats = " + stats);
assertEquals(size, stats.getOwnedEntryCount());
assertEquals(size, stats.getHits());
}
@Test
public void testNearCachePopulatedAndHitsGenerated2() throws Exception {
final IMap map = client.getMap(randomMapName(NEAR_CACHE_WITH_NO_INVALIDATION));
final int size = 1278;
for (int i = 0; i < size; i++) {
map.put(i, i);
}
for (int i = 0; i < size; i++) {
map.get(i); //populate near cache
}
for (int i = 0; i < size; i++) {
map.get(i); //generate near cache hits
}
NearCacheStats stats = map.getLocalMapStats().getNearCacheStats();
System.out.println("stats = " + stats);
assertEquals(size, stats.getOwnedEntryCount());
assertEquals(size, stats.getHits());
}
@Test
public void testIssue2009() throws Exception {
final IMap map = client.getMap(randomMapName(NEAR_CACHE_WITH_INVALIDATION));
NearCacheStats stats = map.getLocalMapStats().getNearCacheStats();
assertNotNull(stats);
}
@Test
public void testGetNearCacheStatsBeforePopulation() {
final IMap map = client.getMap(randomMapName(NEAR_CACHE_WITH_NO_INVALIDATION));
final int size = 101;
for (int i = 0; i < size; i++) {
map.put(i, i);
}
final NearCacheStats stats = map.getLocalMapStats().getNearCacheStats();
assertNotNull(stats);
}
@Test
public void testNearCacheMisses() {
final IMap map = client.getMap(randomMapName(NEAR_CACHE_WITH_NO_INVALIDATION));
final int size = 1321;
for (int i = 0; i < size; i++) {
map.get("NotThere" + i);
}
NearCacheStats stats = map.getLocalMapStats().getNearCacheStats();
assertEquals(size, stats.getMisses());
assertEquals(size, stats.getOwnedEntryCount());
}
@Test
public void testNearCacheMisses_whenRepeatedOnSameKey() {
final IMap map = client.getMap(randomMapName(NEAR_CACHE_WITH_INVALIDATION));
final int size = 17;
for (int i = 0; i < size; i++) {
map.get("NOT_THERE");
}
NearCacheStats stats = map.getLocalMapStats().getNearCacheStats();
assertEquals(1, stats.getOwnedEntryCount());
assertEquals(size, stats.getMisses());
}
@Test
public void testMapRemove_WithNearCache() {
final IMap map = client.getMap(randomMapName(NEAR_CACHE_WITH_INVALIDATION));
final int size = 1113;
for (int i = 0; i < size; i++) {
map.put(i, i);
}
for (int i = 0; i < size; i++) {
map.get(i);
}
for (int i = 0; i < size; i++) {
map.remove(i);
}
NearCacheStats stats = map.getLocalMapStats().getNearCacheStats();
assertEquals(size, stats.getMisses());
assertEquals(0, stats.getOwnedEntryCount());
}
@Test
public void testNearCacheMaxSize() {
final IMap map = client.getMap(randomMapName(NEAR_CACHE_WITH_MAX_SIZE));
for (int i = 0; i < MAX_CACHE_SIZE + 1; i++) {
map.put(i, i);
}
//populate near cache
for (int i = 0; i < MAX_CACHE_SIZE + 1; i++) {
map.get(i);
}
final int evictionSize = (int) (MAX_CACHE_SIZE * (ClientNearCache.EVICTION_PERCENTAGE / 100.0));
final int remainingSize = MAX_CACHE_SIZE - evictionSize;
HazelcastTestSupport.assertTrueEventually(new AssertTask() {
@Override
public void run() throws Exception {
final NearCacheStats stats = map.getLocalMapStats().getNearCacheStats();
assertEquals(remainingSize, stats.getOwnedEntryCount());
}
});
}
@Test
public void testNearCacheTTLCleanup() {
final IMap map = client.getMap(randomMapName(NEAR_CACHE_WITH_TTL));
final int size = 133;
for (int i = 0; i < size; i++) {
map.put(i, i);
}
//populate near cache
for (int i = 0; i < size; i++) {
map.get(i);
}
sleepSeconds(ClientNearCache.TTL_CLEANUP_INTERVAL_MILLS / 1000);
map.get(0);
final int expectedSize = 1;
HazelcastTestSupport.assertTrueEventually(new AssertTask() {
public void run() throws Exception {
final NearCacheStats stats = map.getLocalMapStats().getNearCacheStats();
assertEquals(expectedSize, stats.getOwnedEntryCount());
}
});
}
@Test
public void testNearCacheIdleRecordsEvicted() {
final IMap map = client.getMap(randomMapName(NEAR_CACHE_WITH_IDLE));
final int size = 147;
for (int i = 0; i < size; i++) {
map.put(i, i);
}
//populate near cache
for (int i = 0; i < size; i++) {
map.get(i);
}
//generate near cache hits
for (int i = 0; i < size; i++) {
map.get(i);
}
NearCacheStats stats = map.getLocalMapStats().getNearCacheStats();
long hitsBeforeIdleExpire = stats.getHits();
sleepSeconds(MAX_IDLE_SECONDS + 1);
for (int i = 0; i < size; i++) {
map.get(i);
}
stats = map.getLocalMapStats().getNearCacheStats();
assertEquals("as the hits are not equal, the entries were not cleared from near cash after MaxIdleSeconds", hitsBeforeIdleExpire, stats.getHits(), size);
}
@Test
public void testNearCacheInvalidateOnChange() {
final String mapName = randomMapName(NEAR_CACHE_WITH_INVALIDATION);
final IMap nodeMap = h1.getMap(mapName);
final IMap clientMap = client.getMap(mapName);
final int size = 118;
for (int i = 0; i < size; i++) {
nodeMap.put(i, i);
}
//populate near cache
for (int i = 0; i < size; i++) {
clientMap.get(i);
}
NearCacheStats stats = clientMap.getLocalMapStats().getNearCacheStats();
long OwnedEntryCountBeforeInvalidate = stats.getOwnedEntryCount();
//invalidate near cache from cluster
for (int i = 0; i < size; i++) {
nodeMap.put(i, i);
}
assertEquals(size, OwnedEntryCountBeforeInvalidate);
assertTrueEventually(new AssertTask() {
public void run() throws Exception {
NearCacheStats stats = clientMap.getLocalMapStats().getNearCacheStats();
assertEquals(0, stats.getOwnedEntryCount());
}
});
}
@Test(expected = NullPointerException.class)
public void testNearCacheContainsNullKey() {
final IMap map = client.getMap(randomMapName(NEAR_CACHE_WITH_INVALIDATION));
map.containsKey(null);
}
@Test
public void testNearCacheContainsKey() {
final IMap map = client.getMap(randomMapName(NEAR_CACHE_WITH_INVALIDATION));
final Object key = "key";
map.put(key, "value");
map.get(key);
assertTrue(map.containsKey(key));
}
@Test
public void testNearCacheContainsKey_whenKeyAbsent() {
final IMap map = client.getMap(randomMapName(NEAR_CACHE_WITH_INVALIDATION));
assertFalse(map.containsKey("NOT_THERE"));
}
@Test
public void testNearCacheContainsKey_afterRemove() {
final IMap map = client.getMap(randomMapName(NEAR_CACHE_WITH_INVALIDATION));
final Object key = "key";
map.put(key, "value");
map.get(key);
map.remove(key);
assertFalse(map.containsKey(key));
}
} | 0true
| hazelcast-client_src_test_java_com_hazelcast_client_ClientNearCacheTest.java |
6,420 | public class LocalTransportChannel implements TransportChannel {
private final LocalTransport sourceTransport;
// the transport we will *send to*
private final LocalTransport targetTransport;
private final String action;
private final long requestId;
private final Version version;
public LocalTransportChannel(LocalTransport sourceTransport, LocalTransport targetTransport, String action, long requestId, Version version) {
this.sourceTransport = sourceTransport;
this.targetTransport = targetTransport;
this.action = action;
this.requestId = requestId;
this.version = version;
}
@Override
public String action() {
return action;
}
@Override
public void sendResponse(TransportResponse response) throws IOException {
sendResponse(response, TransportResponseOptions.EMPTY);
}
@Override
public void sendResponse(TransportResponse response, TransportResponseOptions options) throws IOException {
BytesStreamOutput bStream = new BytesStreamOutput();
StreamOutput stream = new HandlesStreamOutput(bStream);
stream.setVersion(version);
stream.writeLong(requestId);
byte status = 0;
status = TransportStatus.setResponse(status);
stream.writeByte(status); // 0 for request, 1 for response.
response.writeTo(stream);
stream.close();
final byte[] data = bStream.bytes().toBytes();
targetTransport.threadPool().generic().execute(new Runnable() {
@Override
public void run() {
targetTransport.messageReceived(data, action, sourceTransport, version, null);
}
});
}
@Override
public void sendResponse(Throwable error) throws IOException {
BytesStreamOutput stream = new BytesStreamOutput();
try {
writeResponseExceptionHeader(stream);
RemoteTransportException tx = new RemoteTransportException(targetTransport.nodeName(), targetTransport.boundAddress().boundAddress(), action, error);
ThrowableObjectOutputStream too = new ThrowableObjectOutputStream(stream);
too.writeObject(tx);
too.close();
} catch (NotSerializableException e) {
stream.reset();
writeResponseExceptionHeader(stream);
RemoteTransportException tx = new RemoteTransportException(targetTransport.nodeName(), targetTransport.boundAddress().boundAddress(), action, new NotSerializableTransportException(error));
ThrowableObjectOutputStream too = new ThrowableObjectOutputStream(stream);
too.writeObject(tx);
too.close();
}
final byte[] data = stream.bytes().toBytes();
targetTransport.threadPool().generic().execute(new Runnable() {
@Override
public void run() {
targetTransport.messageReceived(data, action, sourceTransport, version, null);
}
});
}
private void writeResponseExceptionHeader(BytesStreamOutput stream) throws IOException {
stream.writeLong(requestId);
byte status = 0;
status = TransportStatus.setResponse(status);
status = TransportStatus.setError(status);
stream.writeByte(status);
}
} | 1no label
| src_main_java_org_elasticsearch_transport_local_LocalTransportChannel.java |
17 | private class ResponseThreadRunnable implements Runnable {
private final BlockingQueue<TextCommand> blockingQueue = new ArrayBlockingQueue<TextCommand>(200);
private final Object stopObject = new Object();
@edu.umd.cs.findbugs.annotations.SuppressWarnings("RV_RETURN_VALUE_IGNORED_BAD_PRACTICE")
public void sendResponse(TextCommand textCommand) {
blockingQueue.offer(textCommand);
}
@Override
public void run() {
while (running) {
try {
TextCommand textCommand = blockingQueue.take();
if (TextCommandConstants.TextCommandType.STOP == textCommand.getType()) {
synchronized (stopObject) {
stopObject.notify();
}
} else {
SocketTextWriter socketTextWriter = textCommand.getSocketTextWriter();
socketTextWriter.enqueue(textCommand);
}
} catch (InterruptedException e) {
return;
} catch (OutOfMemoryError e) {
OutOfMemoryErrorDispatcher.onOutOfMemory(e);
throw e;
}
}
}
@edu.umd.cs.findbugs.annotations.SuppressWarnings("RV_RETURN_VALUE_IGNORED_BAD_PRACTICE")
void stop() {
running = false;
synchronized (stopObject) {
try {
blockingQueue.offer(new AbstractTextCommand(TextCommandConstants.TextCommandType.STOP) {
@Override
public boolean readFrom(ByteBuffer cb) {
return true;
}
@Override
public boolean writeTo(ByteBuffer bb) {
return true;
}
});
//noinspection WaitNotInLoop
stopObject.wait(1000);
} catch (Exception ignored) {
}
}
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_ascii_TextCommandServiceImpl.java |
2,109 | public class DateMathParser {
private final FormatDateTimeFormatter dateTimeFormatter;
private final TimeUnit timeUnit;
public DateMathParser(FormatDateTimeFormatter dateTimeFormatter, TimeUnit timeUnit) {
this.dateTimeFormatter = dateTimeFormatter;
this.timeUnit = timeUnit;
}
public long parse(String text, long now) {
return parse(text, now, false);
}
public long parseRoundCeil(String text, long now) {
return parse(text, now, true);
}
public long parse(String text, long now, boolean roundCeil) {
long time;
String mathString;
if (text.startsWith("now")) {
time = now;
mathString = text.substring("now".length());
} else {
int index = text.indexOf("||");
String parseString;
if (index == -1) {
parseString = text;
mathString = ""; // nothing else
} else {
parseString = text.substring(0, index);
mathString = text.substring(index + 2);
}
if (roundCeil) {
time = parseRoundCeilStringValue(parseString);
} else {
time = parseStringValue(parseString);
}
}
if (mathString.isEmpty()) {
return time;
}
return parseMath(mathString, time, roundCeil);
}
private long parseMath(String mathString, long time, boolean roundUp) throws ElasticsearchParseException {
MutableDateTime dateTime = new MutableDateTime(time, DateTimeZone.UTC);
try {
for (int i = 0; i < mathString.length(); ) {
char c = mathString.charAt(i++);
int type;
if (c == '/') {
type = 0;
} else if (c == '+') {
type = 1;
} else if (c == '-') {
type = 2;
} else {
throw new ElasticsearchParseException("operator not supported for date math [" + mathString + "]");
}
int num;
if (!Character.isDigit(mathString.charAt(i))) {
num = 1;
} else {
int numFrom = i;
while (Character.isDigit(mathString.charAt(i))) {
i++;
}
num = Integer.parseInt(mathString.substring(numFrom, i));
}
if (type == 0) {
// rounding is only allowed on whole numbers
if (num != 1) {
throw new ElasticsearchParseException("rounding `/` can only be used on single unit types [" + mathString + "]");
}
}
char unit = mathString.charAt(i++);
switch (unit) {
case 'y':
if (type == 0) {
if (roundUp) {
dateTime.yearOfCentury().roundCeiling();
} else {
dateTime.yearOfCentury().roundFloor();
}
} else if (type == 1) {
dateTime.addYears(num);
} else if (type == 2) {
dateTime.addYears(-num);
}
break;
case 'M':
if (type == 0) {
if (roundUp) {
dateTime.monthOfYear().roundCeiling();
} else {
dateTime.monthOfYear().roundFloor();
}
} else if (type == 1) {
dateTime.addMonths(num);
} else if (type == 2) {
dateTime.addMonths(-num);
}
break;
case 'w':
if (type == 0) {
if (roundUp) {
dateTime.weekOfWeekyear().roundCeiling();
} else {
dateTime.weekOfWeekyear().roundFloor();
}
} else if (type == 1) {
dateTime.addWeeks(num);
} else if (type == 2) {
dateTime.addWeeks(-num);
}
break;
case 'd':
if (type == 0) {
if (roundUp) {
dateTime.dayOfMonth().roundCeiling();
} else {
dateTime.dayOfMonth().roundFloor();
}
} else if (type == 1) {
dateTime.addDays(num);
} else if (type == 2) {
dateTime.addDays(-num);
}
break;
case 'h':
case 'H':
if (type == 0) {
if (roundUp) {
dateTime.hourOfDay().roundCeiling();
} else {
dateTime.hourOfDay().roundFloor();
}
} else if (type == 1) {
dateTime.addHours(num);
} else if (type == 2) {
dateTime.addHours(-num);
}
break;
case 'm':
if (type == 0) {
if (roundUp) {
dateTime.minuteOfHour().roundCeiling();
} else {
dateTime.minuteOfHour().roundFloor();
}
} else if (type == 1) {
dateTime.addMinutes(num);
} else if (type == 2) {
dateTime.addMinutes(-num);
}
break;
case 's':
if (type == 0) {
if (roundUp) {
dateTime.secondOfMinute().roundCeiling();
} else {
dateTime.secondOfMinute().roundFloor();
}
} else if (type == 1) {
dateTime.addSeconds(num);
} else if (type == 2) {
dateTime.addSeconds(-num);
}
break;
default:
throw new ElasticsearchParseException("unit [" + unit + "] not supported for date math [" + mathString + "]");
}
}
} catch (Exception e) {
if (e instanceof ElasticsearchParseException) {
throw (ElasticsearchParseException) e;
}
throw new ElasticsearchParseException("failed to parse date math [" + mathString + "]");
}
return dateTime.getMillis();
}
private long parseStringValue(String value) {
try {
return dateTimeFormatter.parser().parseMillis(value);
} catch (RuntimeException e) {
try {
long time = Long.parseLong(value);
return timeUnit.toMillis(time);
} catch (NumberFormatException e1) {
throw new ElasticsearchParseException("failed to parse date field [" + value + "], tried both date format [" + dateTimeFormatter.format() + "], and timestamp number", e);
}
}
}
private long parseRoundCeilStringValue(String value) {
try {
// we create a date time for inclusive upper range, we "include" by default the day level data
// so something like 2011-01-01 will include the full first day of 2011.
// we also use 1970-01-01 as the base for it so we can handle searches like 10:12:55 (just time)
// since when we index those, the base is 1970-01-01
MutableDateTime dateTime = new MutableDateTime(1970, 1, 1, 23, 59, 59, 999, DateTimeZone.UTC);
int location = dateTimeFormatter.parser().parseInto(dateTime, value, 0);
// if we parsed all the string value, we are good
if (location == value.length()) {
return dateTime.getMillis();
}
// if we did not manage to parse, or the year is really high year which is unreasonable
// see if its a number
if (location <= 0 || dateTime.getYear() > 5000) {
try {
long time = Long.parseLong(value);
return timeUnit.toMillis(time);
} catch (NumberFormatException e1) {
throw new ElasticsearchParseException("failed to parse date field [" + value + "], tried both date format [" + dateTimeFormatter.format() + "], and timestamp number");
}
}
return dateTime.getMillis();
} catch (RuntimeException e) {
try {
long time = Long.parseLong(value);
return timeUnit.toMillis(time);
} catch (NumberFormatException e1) {
throw new ElasticsearchParseException("failed to parse date field [" + value + "], tried both date format [" + dateTimeFormatter.format() + "], and timestamp number", e);
}
}
}
} | 1no label
| src_main_java_org_elasticsearch_common_joda_DateMathParser.java |
3,145 | public class TxnPollBackupOperation extends QueueOperation {
long itemId;
public TxnPollBackupOperation() {
}
public TxnPollBackupOperation(String name, long itemId) {
super(name);
this.itemId = itemId;
}
@Override
public void run() throws Exception {
response = getOrCreateContainer().txnCommitPollBackup(itemId);
}
@Override
protected void writeInternal(ObjectDataOutput out) throws IOException {
super.writeInternal(out);
out.writeLong(itemId);
}
@Override
protected void readInternal(ObjectDataInput in) throws IOException {
super.readInternal(in);
itemId = in.readLong();
}
@Override
public int getId() {
return QueueDataSerializerHook.TXN_POLL_BACKUP;
}
} | 1no label
| hazelcast_src_main_java_com_hazelcast_queue_tx_TxnPollBackupOperation.java |
288 | @Repository("blDataDrivenEnumerationDao")
public class DataDrivenEnumerationDaoImpl implements DataDrivenEnumerationDao {
@PersistenceContext(unitName = "blPU")
protected EntityManager em;
@Resource(name = "blEntityConfiguration")
protected EntityConfiguration entityConfiguration;
@Override
public DataDrivenEnumeration readEnumByKey(String enumKey) {
TypedQuery<DataDrivenEnumeration> query = new TypedQueryBuilder<DataDrivenEnumeration>(DataDrivenEnumeration.class, "dde")
.addRestriction("dde.key", "=", enumKey)
.toQuery(em);
return query.getSingleResult();
}
@Override
public DataDrivenEnumerationValue readEnumValueByKey(String enumKey, String enumValueKey) {
TypedQuery<DataDrivenEnumerationValue> query =
new TypedQueryBuilder<DataDrivenEnumerationValue>(DataDrivenEnumerationValue.class, "ddev")
.addRestriction("ddev.type.key", "=", enumKey)
.addRestriction("ddev.key", "=", enumValueKey)
.toQuery(em);
return query.getSingleResult();
}
} | 0true
| common_src_main_java_org_broadleafcommerce_common_enumeration_dao_DataDrivenEnumerationDaoImpl.java |
708 | constructors[COLLECTION_REMOVE_LISTENER] = new ConstructorFunction<Integer, Portable>() {
public Portable createNew(Integer arg) {
return new CollectionRemoveListenerRequest();
}
}; | 0true
| hazelcast_src_main_java_com_hazelcast_collection_CollectionPortableHook.java |
1,057 | public static enum Flag {
// Do not change the order of these flags we use
// the ordinal for encoding! Only append to the end!
Positions, Offsets, Payloads, FieldStatistics, TermStatistics;
} | 0true
| src_main_java_org_elasticsearch_action_termvector_TermVectorRequest.java |
592 | public class IndicesSegmentResponse extends BroadcastOperationResponse implements ToXContent {
private ShardSegments[] shards;
private Map<String, IndexSegments> indicesSegments;
IndicesSegmentResponse() {
}
IndicesSegmentResponse(ShardSegments[] shards, ClusterState clusterState, int totalShards, int successfulShards, int failedShards, List<ShardOperationFailedException> shardFailures) {
super(totalShards, successfulShards, failedShards, shardFailures);
this.shards = shards;
}
public Map<String, IndexSegments> getIndices() {
if (indicesSegments != null) {
return indicesSegments;
}
Map<String, IndexSegments> indicesSegments = Maps.newHashMap();
Set<String> indices = Sets.newHashSet();
for (ShardSegments shard : shards) {
indices.add(shard.getIndex());
}
for (String index : indices) {
List<ShardSegments> shards = Lists.newArrayList();
for (ShardSegments shard : this.shards) {
if (shard.getShardRouting().index().equals(index)) {
shards.add(shard);
}
}
indicesSegments.put(index, new IndexSegments(index, shards.toArray(new ShardSegments[shards.size()])));
}
this.indicesSegments = indicesSegments;
return indicesSegments;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
shards = new ShardSegments[in.readVInt()];
for (int i = 0; i < shards.length; i++) {
shards[i] = ShardSegments.readShardSegments(in);
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeVInt(shards.length);
for (ShardSegments shard : shards) {
shard.writeTo(out);
}
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(Fields.INDICES);
for (IndexSegments indexSegments : getIndices().values()) {
builder.startObject(indexSegments.getIndex(), XContentBuilder.FieldCaseConversion.NONE);
builder.startObject(Fields.SHARDS);
for (IndexShardSegments indexSegment : indexSegments) {
builder.startArray(Integer.toString(indexSegment.getShardId().id()));
for (ShardSegments shardSegments : indexSegment) {
builder.startObject();
builder.startObject(Fields.ROUTING);
builder.field(Fields.STATE, shardSegments.getShardRouting().state());
builder.field(Fields.PRIMARY, shardSegments.getShardRouting().primary());
builder.field(Fields.NODE, shardSegments.getShardRouting().currentNodeId());
if (shardSegments.getShardRouting().relocatingNodeId() != null) {
builder.field(Fields.RELOCATING_NODE, shardSegments.getShardRouting().relocatingNodeId());
}
builder.endObject();
builder.field(Fields.NUM_COMMITTED_SEGMENTS, shardSegments.getNumberOfCommitted());
builder.field(Fields.NUM_SEARCH_SEGMENTS, shardSegments.getNumberOfSearch());
builder.startObject(Fields.SEGMENTS);
for (Segment segment : shardSegments) {
builder.startObject(segment.getName());
builder.field(Fields.GENERATION, segment.getGeneration());
builder.field(Fields.NUM_DOCS, segment.getNumDocs());
builder.field(Fields.DELETED_DOCS, segment.getDeletedDocs());
builder.byteSizeField(Fields.SIZE_IN_BYTES, Fields.SIZE, segment.getSizeInBytes());
builder.byteSizeField(Fields.MEMORY_IN_BYTES, Fields.MEMORY, segment.getMemoryInBytes());
builder.field(Fields.COMMITTED, segment.isCommitted());
builder.field(Fields.SEARCH, segment.isSearch());
if (segment.getVersion() != null) {
builder.field(Fields.VERSION, segment.getVersion());
}
if (segment.isCompound() != null) {
builder.field(Fields.COMPOUND, segment.isCompound());
}
if (segment.getMergeId() != null) {
builder.field(Fields.MERGE_ID, segment.getMergeId());
}
builder.endObject();
}
builder.endObject();
builder.endObject();
}
builder.endArray();
}
builder.endObject();
builder.endObject();
}
builder.endObject();
return builder;
}
static final class Fields {
static final XContentBuilderString INDICES = new XContentBuilderString("indices");
static final XContentBuilderString SHARDS = new XContentBuilderString("shards");
static final XContentBuilderString ROUTING = new XContentBuilderString("routing");
static final XContentBuilderString STATE = new XContentBuilderString("state");
static final XContentBuilderString PRIMARY = new XContentBuilderString("primary");
static final XContentBuilderString NODE = new XContentBuilderString("node");
static final XContentBuilderString RELOCATING_NODE = new XContentBuilderString("relocating_node");
static final XContentBuilderString SEGMENTS = new XContentBuilderString("segments");
static final XContentBuilderString GENERATION = new XContentBuilderString("generation");
static final XContentBuilderString NUM_COMMITTED_SEGMENTS = new XContentBuilderString("num_committed_segments");
static final XContentBuilderString NUM_SEARCH_SEGMENTS = new XContentBuilderString("num_search_segments");
static final XContentBuilderString NUM_DOCS = new XContentBuilderString("num_docs");
static final XContentBuilderString DELETED_DOCS = new XContentBuilderString("deleted_docs");
static final XContentBuilderString SIZE = new XContentBuilderString("size");
static final XContentBuilderString SIZE_IN_BYTES = new XContentBuilderString("size_in_bytes");
static final XContentBuilderString COMMITTED = new XContentBuilderString("committed");
static final XContentBuilderString SEARCH = new XContentBuilderString("search");
static final XContentBuilderString VERSION = new XContentBuilderString("version");
static final XContentBuilderString COMPOUND = new XContentBuilderString("compound");
static final XContentBuilderString MERGE_ID = new XContentBuilderString("merge_id");
static final XContentBuilderString MEMORY = new XContentBuilderString("memory");
static final XContentBuilderString MEMORY_IN_BYTES = new XContentBuilderString("memory_in_bytes");
}
} | 0true
| src_main_java_org_elasticsearch_action_admin_indices_segments_IndicesSegmentResponse.java |
75 | private static class ResourceElement
{
private Xid xid = null;
private XAResource resource = null;
private int status;
ResourceElement( Xid xid, XAResource resource )
{
this.xid = xid;
this.resource = resource;
status = RS_ENLISTED;
}
Xid getXid()
{
return xid;
}
XAResource getResource()
{
return resource;
}
int getStatus()
{
return status;
}
void setStatus( int status )
{
this.status = status;
}
@Override
public String toString()
{
String statusString;
switch ( status )
{
case RS_ENLISTED:
statusString = "ENLISTED";
break;
case RS_DELISTED:
statusString = "DELISTED";
break;
case RS_SUSPENDED:
statusString = "SUSPENDED";
break;
case RS_READONLY:
statusString = "READONLY";
break;
default:
statusString = "UNKNOWN";
}
return "Xid[" + xid + "] XAResource[" + resource + "] Status["
+ statusString + "]";
}
} | 0true
| community_kernel_src_main_java_org_neo4j_kernel_impl_transaction_TransactionImpl.java |
841 | public class ReferenceWrapper {
private Data value;
public Data get() {
return value;
}
public void set(Data value) {
this.value = value;
}
public boolean compareAndSet(Data expect, Data value) {
if (!contains(expect)) {
return false;
}
this.value = value;
return true;
}
public boolean contains(Data expected) {
if (value == null) {
return expected == null;
}
return value.equals(expected);
}
public Data getAndSet(Data value) {
Data tempValue = this.value;
this.value = value;
return tempValue;
}
public boolean isNull() {
return value == null;
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_concurrent_atomicreference_ReferenceWrapper.java |
653 | public final class OMVRBTreeIndexEngine<V> extends OSharedResourceAdaptiveExternal implements OIndexEngine<V> {
private int maxUpdatesBeforeSave;
private OMemoryWatchDog.Listener watchDog;
private OMVRBTreeDatabaseLazySave<Object, V> map;
public OMVRBTreeIndexEngine() {
super(OGlobalConfiguration.ENVIRONMENT_CONCURRENT.getValueAsBoolean(), OGlobalConfiguration.MVRBTREE_TIMEOUT
.getValueAsInteger(), true);
}
@Override
public void init() {
acquireExclusiveLock();
try {
watchDog = new OMemoryWatchDog.Listener() {
public void memoryUsageLow(final long iFreeMemory, final long iFreeMemoryPercentage) {
map.setOptimization(iFreeMemoryPercentage < 10 ? 2 : 1);
}
};
} finally {
releaseExclusiveLock();
}
}
@Override
public void flush() {
acquireExclusiveLock();
try {
map.lazySave();
} finally {
releaseExclusiveLock();
}
}
@Override
public void create(String indexName, OIndexDefinition indexDefinition, String clusterIndexName,
OStreamSerializer valueSerializer, boolean isAutomatic) {
acquireExclusiveLock();
try {
maxUpdatesBeforeSave = lazyUpdates(isAutomatic);
if (indexDefinition != null) {
if (indexDefinition instanceof ORuntimeKeyIndexDefinition) {
map = new OMVRBTreeDatabaseLazySave<Object, V>(clusterIndexName,
((ORuntimeKeyIndexDefinition) indexDefinition).getSerializer(), valueSerializer, 1, maxUpdatesBeforeSave);
} else {
final OBinarySerializer<?> keySerializer;
if (indexDefinition.getTypes().length > 1) {
keySerializer = OCompositeKeySerializer.INSTANCE;
} else {
keySerializer = OBinarySerializerFactory.INSTANCE.getObjectSerializer(indexDefinition.getTypes()[0]);
}
map = new OMVRBTreeDatabaseLazySave<Object, V>(clusterIndexName, (OBinarySerializer<Object>) keySerializer,
valueSerializer, indexDefinition.getTypes().length, maxUpdatesBeforeSave);
}
} else
map = new OMVRBTreeDatabaseLazySave<Object, V>(clusterIndexName, new OSimpleKeySerializer(), valueSerializer, 1,
maxUpdatesBeforeSave);
installHooks(indexName);
} finally {
releaseExclusiveLock();
}
}
private void installHooks(String indexName) {
final OProfilerMBean profiler = Orient.instance().getProfiler();
final String profilerPrefix = profiler.getDatabaseMetric(getDatabase().getName(), "index." + indexName + '.');
final String profilerMetadataPrefix = "db.*.index.*.";
profiler.registerHookValue(profilerPrefix + "items", "Index size", OProfiler.METRIC_TYPE.SIZE,
new OProfiler.OProfilerHookValue() {
public Object getValue() {
acquireSharedLock();
try {
return map != null ? map.size() : "-";
} finally {
releaseSharedLock();
}
}
}, profilerMetadataPrefix + "items");
profiler.registerHookValue(profilerPrefix + "entryPointSize", "Number of entrypoints in an index", OProfiler.METRIC_TYPE.SIZE,
new OProfiler.OProfilerHookValue() {
public Object getValue() {
return map != null ? map.getEntryPointSize() : "-";
}
}, profilerMetadataPrefix + "items");
profiler.registerHookValue(profilerPrefix + "maxUpdateBeforeSave", "Maximum number of updates in a index before force saving",
OProfiler.METRIC_TYPE.SIZE, new OProfiler.OProfilerHookValue() {
public Object getValue() {
return map != null ? map.getMaxUpdatesBeforeSave() : "-";
}
}, profilerMetadataPrefix + "maxUpdateBeforeSave");
Orient.instance().getMemoryWatchDog().addListener(watchDog);
}
@Override
public void delete() {
acquireExclusiveLock();
try {
if (map != null)
map.delete();
} finally {
releaseExclusiveLock();
}
}
@Override
public void deleteWithoutLoad(String indexName) {
throw new UnsupportedOperationException("deleteWithoutLoad");
}
public void load(ORID indexRid, String indexName, OIndexDefinition indexDefinition, boolean isAutomatic) {
acquireExclusiveLock();
try {
maxUpdatesBeforeSave = lazyUpdates(isAutomatic);
map = new OMVRBTreeDatabaseLazySave<Object, V>(getDatabase(), indexRid, maxUpdatesBeforeSave);
map.load();
installHooks(indexName);
} finally {
releaseExclusiveLock();
}
}
@Override
public boolean contains(Object key) {
acquireExclusiveLock();
try {
return map.containsKey(key);
} finally {
releaseExclusiveLock();
}
}
@Override
public ORID getIdentity() {
acquireSharedLock();
try {
return ((OMVRBTreeProviderAbstract<Object, ?>) map.getProvider()).getRecord().getIdentity();
} finally {
releaseSharedLock();
}
}
@Override
public void clear() {
acquireExclusiveLock();
try {
map.clear();
} finally {
releaseExclusiveLock();
}
}
@Override
public boolean remove(Object key) {
acquireExclusiveLock();
try {
return map.remove(key) != null;
} finally {
releaseExclusiveLock();
}
}
@Override
public Iterator<Map.Entry<Object, V>> iterator() {
acquireExclusiveLock();
try {
return map.entrySet().iterator();
} finally {
releaseExclusiveLock();
}
}
@Override
public Iterator<Map.Entry<Object, V>> inverseIterator() {
acquireExclusiveLock();
try {
return ((OMVRBTree.EntrySet) map.entrySet()).inverseIterator();
} finally {
releaseExclusiveLock();
}
}
@Override
public Iterable<Object> keys() {
acquireExclusiveLock();
try {
return map.keySet();
} finally {
releaseExclusiveLock();
}
}
@Override
public void unload() {
acquireExclusiveLock();
try {
map.unload();
} finally {
releaseExclusiveLock();
}
}
@Override
public void startTransaction() {
acquireExclusiveLock();
try {
map.setRunningTransaction(true);
} finally {
releaseExclusiveLock();
}
}
@Override
public void stopTransaction() {
acquireExclusiveLock();
try {
map.setRunningTransaction(false);
} finally {
releaseExclusiveLock();
}
}
@Override
public void afterTxRollback() {
acquireExclusiveLock();
try {
map.unload();
} finally {
releaseExclusiveLock();
}
}
@Override
public void afterTxCommit() {
acquireExclusiveLock();
try {
map.onAfterTxCommit();
} finally {
releaseExclusiveLock();
}
}
@Override
public void closeDb() {
acquireExclusiveLock();
try {
map.commitChanges(true);
// TODO: GO IN DEEP WHY THE UNLOAD CAUSE LOOSE OF INDEX ENTRIES!
// map.unload();
} finally {
releaseExclusiveLock();
}
}
@Override
public void close() {
}
@Override
public void beforeTxBegin() {
acquireExclusiveLock();
try {
map.commitChanges(true);
} finally {
releaseExclusiveLock();
}
}
@Override
public V get(Object key) {
acquireExclusiveLock();
try {
return map.get(key);
} finally {
releaseExclusiveLock();
}
}
@Override
public void put(Object key, V value) {
acquireExclusiveLock();
try {
map.put(key, value);
} finally {
releaseExclusiveLock();
}
}
@Override
public void getValuesBetween(Object rangeFrom, boolean fromInclusive, Object rangeTo, boolean toInclusive,
ValuesTransformer<V> transformer, ValuesResultListener valuesResultListener) {
acquireExclusiveLock();
try {
final OMVRBTreeEntry<Object, V> firstEntry;
if (fromInclusive)
firstEntry = map.getCeilingEntry(rangeFrom, OMVRBTree.PartialSearchMode.LOWEST_BOUNDARY);
else
firstEntry = map.getHigherEntry(rangeFrom);
if (firstEntry == null)
return;
final int firstEntryIndex = map.getPageIndex();
final OMVRBTreeEntry<Object, V> lastEntry;
if (toInclusive)
lastEntry = map.getHigherEntry(rangeTo);
else
lastEntry = map.getCeilingEntry(rangeTo, OMVRBTree.PartialSearchMode.LOWEST_BOUNDARY);
final int lastEntryIndex;
if (lastEntry != null)
lastEntryIndex = map.getPageIndex();
else
lastEntryIndex = -1;
OMVRBTreeEntry<Object, V> entry = firstEntry;
map.setPageIndex(firstEntryIndex);
while (entry != null && !(entry == lastEntry && map.getPageIndex() == lastEntryIndex)) {
final V value = entry.getValue();
boolean cont = addToResult(transformer, valuesResultListener, value);
if (!cont)
return;
entry = OMVRBTree.next(entry);
}
} finally {
releaseExclusiveLock();
}
}
@Override
public void getValuesMajor(Object fromKey, boolean isInclusive, ValuesTransformer<V> transformer,
ValuesResultListener valuesResultListener) {
acquireExclusiveLock();
try {
final OMVRBTreeEntry<Object, V> firstEntry;
if (isInclusive)
firstEntry = map.getCeilingEntry(fromKey, OMVRBTree.PartialSearchMode.LOWEST_BOUNDARY);
else
firstEntry = map.getHigherEntry(fromKey);
if (firstEntry == null)
return;
OMVRBTreeEntry<Object, V> entry = firstEntry;
while (entry != null) {
final V value = entry.getValue();
boolean cont = addToResult(transformer, valuesResultListener, value);
if (!cont)
return;
entry = OMVRBTree.next(entry);
}
} finally {
releaseExclusiveLock();
}
}
@Override
public void getValuesMinor(Object toKey, boolean isInclusive, ValuesTransformer<V> transformer,
ValuesResultListener valuesResultListener) {
acquireExclusiveLock();
try {
final OMVRBTreeEntry<Object, V> lastEntry;
if (isInclusive)
lastEntry = map.getFloorEntry(toKey, OMVRBTree.PartialSearchMode.HIGHEST_BOUNDARY);
else
lastEntry = map.getLowerEntry(toKey);
if (lastEntry == null)
return;
OMVRBTreeEntry<Object, V> entry = lastEntry;
while (entry != null) {
V value = entry.getValue();
boolean cont = addToResult(transformer, valuesResultListener, value);
if (!cont)
return;
entry = OMVRBTree.previous(entry);
}
} finally {
releaseExclusiveLock();
}
}
@Override
public void getEntriesMajor(Object fromKey, boolean isInclusive, ValuesTransformer<V> transformer,
EntriesResultListener entriesResultListener) {
acquireExclusiveLock();
try {
final OMVRBTreeEntry<Object, V> firstEntry;
if (isInclusive)
firstEntry = map.getCeilingEntry(fromKey, OMVRBTree.PartialSearchMode.LOWEST_BOUNDARY);
else
firstEntry = map.getHigherEntry(fromKey);
if (firstEntry == null)
return;
OMVRBTreeEntry<Object, V> entry = firstEntry;
while (entry != null) {
final Object key = entry.getKey();
final V value = entry.getValue();
boolean cont = addToEntriesResult(transformer, key, value, entriesResultListener);
if (!cont)
return;
entry = OMVRBTree.next(entry);
}
} finally {
releaseExclusiveLock();
}
}
@Override
public void getEntriesMinor(Object toKey, boolean isInclusive, ValuesTransformer<V> transformer,
EntriesResultListener entriesResultListener) {
acquireExclusiveLock();
try {
final OMVRBTreeEntry<Object, V> lastEntry;
if (isInclusive)
lastEntry = map.getFloorEntry(toKey, OMVRBTree.PartialSearchMode.HIGHEST_BOUNDARY);
else
lastEntry = map.getLowerEntry(toKey);
if (lastEntry == null)
return;
OMVRBTreeEntry<Object, V> entry = lastEntry;
while (entry != null) {
final Object key = entry.getKey();
final V value = entry.getValue();
boolean cont = addToEntriesResult(transformer, key, value, entriesResultListener);
if (!cont)
return;
entry = OMVRBTree.previous(entry);
}
} finally {
releaseExclusiveLock();
}
}
@Override
public void getEntriesBetween(Object iRangeFrom, Object iRangeTo, boolean iInclusive, ValuesTransformer<V> transformer,
EntriesResultListener entriesResultListener) {
acquireExclusiveLock();
try {
final OMVRBTreeEntry<Object, V> firstEntry;
if (iInclusive)
firstEntry = map.getCeilingEntry(iRangeFrom, OMVRBTree.PartialSearchMode.LOWEST_BOUNDARY);
else
firstEntry = map.getHigherEntry(iRangeFrom);
if (firstEntry == null)
return;
final int firstEntryIndex = map.getPageIndex();
final OMVRBTreeEntry<Object, V> lastEntry;
if (iInclusive)
lastEntry = map.getHigherEntry(iRangeTo);
else
lastEntry = map.getCeilingEntry(iRangeTo, OMVRBTree.PartialSearchMode.LOWEST_BOUNDARY);
final int lastEntryIndex;
if (lastEntry != null)
lastEntryIndex = map.getPageIndex();
else
lastEntryIndex = -1;
OMVRBTreeEntry<Object, V> entry = firstEntry;
map.setPageIndex(firstEntryIndex);
final Set<ODocument> result = new ODocumentFieldsHashSet();
while (entry != null && !(entry == lastEntry && map.getPageIndex() == lastEntryIndex)) {
final Object key = entry.getKey();
final V value = entry.getValue();
boolean cont = addToEntriesResult(transformer, key, value, entriesResultListener);
if (!cont)
return;
entry = OMVRBTree.next(entry);
}
} finally {
releaseExclusiveLock();
}
}
@Override
public long size(ValuesTransformer<V> valuesTransformer) {
acquireExclusiveLock();
try {
if (valuesTransformer == null)
return map.size();
OMVRBTreeEntry<Object, V> rootEntry = map.getRoot();
long size = 0;
OMVRBTreeEntry<Object, V> currentEntry = rootEntry;
map.setPageIndex(0);
while (currentEntry != null) {
size += valuesTransformer.transformFromValue(currentEntry.getValue()).size();
currentEntry = OMVRBTree.next(currentEntry);
}
map.setPageIndex(0);
currentEntry = OMVRBTree.previous(rootEntry);
while (currentEntry != null) {
size += valuesTransformer.transformFromValue(currentEntry.getValue()).size();
currentEntry = OMVRBTree.previous(currentEntry);
}
return size;
} finally {
releaseExclusiveLock();
}
}
@Override
public long count(Object rangeFrom, boolean fromInclusive, Object rangeTo, boolean toInclusive, int maxValuesToFetch,
ValuesTransformer<V> transformer) {
acquireExclusiveLock();
try {
final OMVRBTreeEntry<Object, V> firstEntry;
if (rangeFrom == null)
firstEntry = (OMVRBTreeEntry<Object, V>) map.firstEntry();
else if (fromInclusive)
firstEntry = map.getCeilingEntry(rangeFrom, OMVRBTree.PartialSearchMode.LOWEST_BOUNDARY);
else
firstEntry = map.getHigherEntry(rangeFrom);
if (firstEntry == null)
return 0;
long count = 0;
final int firstEntryIndex = map.getPageIndex();
final OMVRBTreeEntry<Object, V> lastEntry;
if (rangeFrom == null)
lastEntry = (OMVRBTreeEntry<Object, V>) map.lastEntry();
else if (toInclusive)
lastEntry = map.getHigherEntry(rangeTo);
else
lastEntry = map.getCeilingEntry(rangeTo, OMVRBTree.PartialSearchMode.LOWEST_BOUNDARY);
final int lastEntryIndex;
if (lastEntry != null)
lastEntryIndex = map.getPageIndex();
else
lastEntryIndex = -1;
OMVRBTreeEntry<Object, V> entry = firstEntry;
map.setPageIndex(firstEntryIndex);
while (entry != null && !(entry == lastEntry && map.getPageIndex() == lastEntryIndex)) {
final V value = entry.getValue();
if (transformer != null)
count += transformer.transformFromValue(value).size();
else
count++;
if (maxValuesToFetch > -1 && maxValuesToFetch == count)
return maxValuesToFetch;
entry = OMVRBTree.next(entry);
}
return count;
} finally {
releaseExclusiveLock();
}
}
@Override
public Iterator<V> valuesIterator() {
acquireExclusiveLock();
try {
return map.values().iterator();
} finally {
releaseExclusiveLock();
}
}
@Override
public Iterator<V> inverseValuesIterator() {
acquireExclusiveLock();
try {
return ((OMVRBTree.Values) map.values()).inverseIterator();
} finally {
releaseExclusiveLock();
}
}
@Override
public boolean hasRangeQuerySupport() {
return true;
}
private boolean addToResult(ValuesTransformer<V> transformer, ValuesResultListener valuesResultListener, V value) {
if (transformer != null) {
Collection<OIdentifiable> transformResult = transformer.transformFromValue(value);
for (OIdentifiable transformedValue : transformResult) {
boolean cont = valuesResultListener.addResult(transformedValue);
if (!cont)
return false;
}
return true;
} else
return valuesResultListener.addResult((OIdentifiable) value);
}
private boolean addToEntriesResult(ValuesTransformer<V> transformer, Object key, V value,
EntriesResultListener entriesResultListener) {
if (transformer != null) {
Collection<OIdentifiable> transformResult = transformer.transformFromValue(value);
for (OIdentifiable transformedValue : transformResult) {
final ODocument document = new ODocument();
document.field("key", key);
document.field("rid", transformedValue.getIdentity());
document.unsetDirty();
boolean cont = entriesResultListener.addResult(document);
if (!cont)
return false;
}
return true;
} else {
final ODocument document = new ODocument();
document.field("key", key);
document.field("rid", ((OIdentifiable) value).getIdentity());
document.unsetDirty();
return entriesResultListener.addResult(document);
}
}
private ODatabaseRecord getDatabase() {
return ODatabaseRecordThreadLocal.INSTANCE.get();
}
private int lazyUpdates(boolean isAutomatic) {
return isAutomatic ? OGlobalConfiguration.INDEX_AUTO_LAZY_UPDATES.getValueAsInteger()
: OGlobalConfiguration.INDEX_MANUAL_LAZY_UPDATES.getValueAsInteger();
}
} | 1no label
| core_src_main_java_com_orientechnologies_orient_core_index_engine_OMVRBTreeIndexEngine.java |
424 | private class ClientJob<KeyIn, ValueIn> extends AbstractJob<KeyIn, ValueIn> {
public ClientJob(String name, KeyValueSource<KeyIn, ValueIn> keyValueSource) {
super(name, ClientMapReduceProxy.this, keyValueSource);
}
@Override
protected <T> JobCompletableFuture<T> invoke(final Collator collator) {
try {
final String jobId = UuidUtil.buildRandomUuidString();
ClientContext context = getContext();
ClientInvocationService cis = context.getInvocationService();
ClientMapReduceRequest request = new ClientMapReduceRequest(name, jobId, keys,
predicate, mapper, combinerFactory, reducerFactory, keyValueSource,
chunkSize, topologyChangedStrategy);
final ClientCompletableFuture completableFuture = new ClientCompletableFuture(jobId);
ClientCallFuture future = (ClientCallFuture) cis.invokeOnRandomTarget(request, null);
future.andThen(new ExecutionCallback() {
@Override
public void onResponse(Object response) {
try {
if (collator != null) {
response = collator.collate(((Map) response).entrySet());
}
} finally {
completableFuture.setResult(response);
trackableJobs.remove(jobId);
}
}
@Override
public void onFailure(Throwable t) {
try {
if (t instanceof ExecutionException
&& t.getCause() instanceof CancellationException) {
t = t.getCause();
}
completableFuture.setResult(t);
} finally {
trackableJobs.remove(jobId);
}
}
});
Address runningMember = future.getConnection().getRemoteEndpoint();
trackableJobs.putIfAbsent(jobId, new ClientTrackableJob<T>(jobId, runningMember, completableFuture));
return completableFuture;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
} | 0true
| hazelcast-client_src_main_java_com_hazelcast_client_proxy_ClientMapReduceProxy.java |
536 | static class LicenseType {
private String name;
private String url;
public static LicenseType APACHE2 = new LicenseType("APACHE2", "http://www.apache.org/licenses/LICENSE-2.0.html");
public static LicenseType LGPL = new LicenseType("LGPL", "http://www.gnu.org/licenses/lgpl-3.0.html, http://www.gnu.org/licenses/lgpl-2.1.html,");
public static LicenseType MIT = new LicenseType("MIT", "http://opensource.org/licenses/MIT");
public static LicenseType JAVA_EXTENSION = new LicenseType("JAVA_EXTENSION", "n/a");
public static LicenseType OW2 = new LicenseType("OW2", "http://asm.ow2.org/license.html");
public static LicenseType XSTREAM_BSD = new LicenseType("XSTREAM_BSD", "http://xstream.codehaus.org/license.html");
public static LicenseType ANTLR_BSD = new LicenseType("ANTLR_BSD", "http://www.antlr.org/license.html");
public static LicenseType ANTISAMMY_BSD = new LicenseType("ANTISAMMY_BSD", "http://opensource.org/licenses/bsd-license.php");
public static LicenseType OTHER = new LicenseType("OTHER", "Unknown");
public static LicenseType ECLIPSE_PUBLIC = new LicenseType("ECLIPSE PUBLIC", "http://www.eclipse.org/legal/epl-v10.html");
public LicenseType(String name, String url) {
this.name = name;
this.url = url;
}
public String toString() {
return name.toString() + ":" + url;
}
} | 0true
| common_src_main_java_org_broadleafcommerce_common_util_PomEvaluator.java |
337 | public class ODatabaseFactory {
final WeakHashMap<ODatabaseComplex<?>, Thread> instances = new WeakHashMap<ODatabaseComplex<?>, Thread>();
public synchronized List<ODatabaseComplex<?>> getInstances(final String iDatabaseName) {
final List<ODatabaseComplex<?>> result = new ArrayList<ODatabaseComplex<?>>();
for (ODatabaseComplex<?> i : instances.keySet()) {
if (i != null && i.getName().equals(iDatabaseName))
result.add(i);
}
return result;
}
/**
* Registers a database.
*
* @param db
* @return
*/
public synchronized ODatabaseComplex<?> register(final ODatabaseComplex<?> db) {
instances.put(db, Thread.currentThread());
return db;
}
/**
* Unregisters a database.
*
* @param db
*/
public synchronized void unregister(final ODatabaseComplex<?> db) {
instances.remove(db);
}
/**
* Unregisters all the database instances that share the storage received as argument.
*
* @param iStorage
*/
public synchronized void unregister(final OStorage iStorage) {
for (ODatabaseComplex<?> db : new HashSet<ODatabaseComplex<?>>(instances.keySet())) {
if (db != null && db.getStorage() == iStorage) {
db.close();
instances.remove(db);
}
}
}
/**
* Closes all open databases.
*/
public synchronized void shutdown() {
if (instances.size() > 0) {
OLogManager.instance().debug(null,
"Found %d databases opened during OrientDB shutdown. Assure to always close database instances after usage",
instances.size());
for (ODatabaseComplex<?> db : new HashSet<ODatabaseComplex<?>>(instances.keySet())) {
if (db != null && !db.isClosed()) {
db.close();
}
}
}
}
public ODatabaseDocumentTx createDatabase(final String iType, final String url) {
if ("graph".equals(iType))
return new OGraphDatabase(url);
else
return new ODatabaseDocumentTx(url);
}
public ODatabaseDocumentTx createObjectDatabase(final String url) {
return new ODatabaseDocumentTx(url);
}
public OGraphDatabase createGraphDatabase(final String url) {
return new OGraphDatabase(url);
}
public ODatabaseDocumentTx createDocumentDatabase(final String url) {
return new ODatabaseDocumentTx(url);
}
} | 1no label
| core_src_main_java_com_orientechnologies_orient_core_db_ODatabaseFactory.java |
296 | public class OTraverseContext extends OBasicCommandContext {
private Set<ORID> history = new HashSet<ORID>();
private List<OTraverseAbstractProcess<?>> stack = new ArrayList<OTraverseAbstractProcess<?>>();
private int depth = -1;
public void push(final OTraverseAbstractProcess<?> iProcess) {
stack.add(iProcess);
}
public Map<String, Object> getVariables() {
final HashMap<String, Object> map = new HashMap<String, Object>();
map.put("depth", depth);
map.put("path", getPath());
map.put("stack", stack);
// DELEGATE
map.putAll(super.getVariables());
return map;
}
public Object getVariable(final String iName) {
final String name = iName.trim().toUpperCase();
if ("DEPTH".startsWith(name))
return depth;
else if (name.startsWith("PATH"))
return ODocumentHelper.getFieldValue(getPath(), iName.substring("PATH".length()));
else if (name.startsWith("STACK"))
return ODocumentHelper.getFieldValue(stack, iName.substring("STACK".length()));
else if (name.startsWith("HISTORY"))
return ODocumentHelper.getFieldValue(history, iName.substring("HISTORY".length()));
else
// DELEGATE
return super.getVariable(iName);
}
public OTraverseAbstractProcess<?> pop() {
if (stack.isEmpty())
throw new IllegalStateException("Traverse stack is empty");
return stack.remove(stack.size() - 1);
}
public OTraverseAbstractProcess<?> peek() {
return stack.isEmpty() ? null : stack.get(stack.size() - 1);
}
public OTraverseAbstractProcess<?> peek(final int iFromLast) {
return stack.size() + iFromLast < 0 ? null : stack.get(stack.size() + iFromLast);
}
public void reset() {
stack.clear();
}
public boolean isAlreadyTraversed(final OIdentifiable identity) {
return history.contains(identity.getIdentity());
}
public void addTraversed(final OIdentifiable identity) {
history.add(identity.getIdentity());
}
public int incrementDepth() {
return ++depth;
}
public int decrementDepth() {
return --depth;
}
public String getPath() {
final StringBuilder buffer = new StringBuilder();
for (OTraverseAbstractProcess<?> process : stack) {
final String status = process.getStatus();
if (status != null) {
if (buffer.length() > 0 && !status.startsWith("["))
buffer.append('.');
buffer.append(status);
}
}
return buffer.toString();
}
} | 1no label
| core_src_main_java_com_orientechnologies_orient_core_command_traverse_OTraverseContext.java |
663 | @Repository("blProductOptionDao")
public class ProductOptionDaoImpl implements ProductOptionDao {
@PersistenceContext(unitName="blPU")
protected EntityManager em;
@Resource(name="blEntityConfiguration")
protected EntityConfiguration entityConfiguration;
@Override
public List<ProductOption> readAllProductOptions() {
TypedQuery<ProductOption> query = em.createNamedQuery("BC_READ_ALL_PRODUCT_OPTIONS", ProductOption.class);
return query.getResultList();
}
public ProductOption saveProductOption(ProductOption option) {
return em.merge(option);
}
@Override
public ProductOption readProductOptionById(Long id) {
return em.find(ProductOptionImpl.class, id);
}
@Override
public ProductOptionValue readProductOptionValueById(Long id) {
return em.find(ProductOptionValueImpl.class, id);
}
} | 0true
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_catalog_dao_ProductOptionDaoImpl.java |