Unnamed: 0
int64
0
6.45k
func
stringlengths
37
143k
target
class label
2 classes
project
stringlengths
33
157
740
public class ProductBundlePricingModelType implements Serializable, BroadleafEnumerationType { private static final long serialVersionUID = 1L; private static final Map<String, ProductBundlePricingModelType> TYPES = new LinkedHashMap<String, ProductBundlePricingModelType>(); public static final ProductBundlePricingModelType ITEM_SUM = new ProductBundlePricingModelType("ITEM_SUM","Item Sum"); public static final ProductBundlePricingModelType BUNDLE = new ProductBundlePricingModelType("BUNDLE","Bundle"); public static ProductBundlePricingModelType getInstance(final String type) { return TYPES.get(type); } private String type; private String friendlyType; public ProductBundlePricingModelType() { //do nothing } public ProductBundlePricingModelType(final String type, final String friendlyType) { this.friendlyType = friendlyType; setType(type); } public String getType() { return type; } public String getFriendlyType() { return friendlyType; } private void setType(final String type) { this.type = type; if (!TYPES.containsKey(type)) { TYPES.put(type, this); } } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((type == null) ? 0 : type.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; ProductBundlePricingModelType other = (ProductBundlePricingModelType) obj; if (type == null) { if (other.type != null) return false; } else if (!type.equals(other.type)) return false; return true; } }
1no label
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_catalog_service_type_ProductBundlePricingModelType.java
657
public class PutIndexTemplateResponse extends AcknowledgedResponse { PutIndexTemplateResponse() { } PutIndexTemplateResponse(boolean acknowledged) { super(acknowledged); } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); readAcknowledged(in); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); writeAcknowledged(out); } }
0true
src_main_java_org_elasticsearch_action_admin_indices_template_put_PutIndexTemplateResponse.java
74
public abstract class CallableClientRequest extends ClientRequest implements Callable { @Override final void process() throws Exception { ClientEndpoint endpoint = getEndpoint(); try { Object result = call(); endpoint.sendResponse(result, getCallId()); } catch (Exception e) { clientEngine.getLogger(getClass()).warning(e); endpoint.sendResponse(e, getCallId()); } } }
0true
hazelcast_src_main_java_com_hazelcast_client_CallableClientRequest.java
29
new Visitor() { @Override public void visit(Tree.InvocationExpression that) { Tree.ArgumentList al = that.getPositionalArgumentList(); if (al==null) { al = that.getNamedArgumentList(); } if (al!=null) { Integer startIndex = al.getStartIndex(); Integer startIndex2 = node.getStartIndex(); if (startIndex!=null && startIndex2!=null && startIndex.intValue()==startIndex2.intValue()) { Tree.Primary primary = that.getPrimary(); if (primary instanceof Tree.MemberOrTypeExpression) { Tree.MemberOrTypeExpression mte = (Tree.MemberOrTypeExpression) primary; if (mte.getDeclaration()!=null && mte.getTarget()!=null) { result.add(new ParameterInfo(al.getStartIndex(), mte.getDeclaration(), mte.getTarget(), node.getScope(), cpc, al instanceof Tree.NamedArgumentList)); } } } } super.visit(that); } }.visit(cpc.getRootNode());
0true
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_complete_InvocationCompletionProposal.java
1,102
public class OSQLFunctionLast extends OSQLFunctionConfigurableAbstract { public static final String NAME = "last"; private Object last; public OSQLFunctionLast() { super(NAME, 1, 1); } public Object execute(final OIdentifiable iCurrentRecord, Object iCurrentResult, final Object[] iParameters, final OCommandContext iContext) { Object value = iParameters[0]; if (value instanceof OSQLFilterItem) value = ((OSQLFilterItem) value).getValue(iCurrentRecord, iContext); if (OMultiValue.isMultiValue(value)) value = OMultiValue.getLastValue(value); last = value; return value; } public boolean aggregateResults() { return configuredParameters.length == 1; } @Override public Object getResult() { return last; } @Override public boolean filterResult() { return true; } public String getSyntax() { return "Syntax error: last(<field>)"; } }
1no label
core_src_main_java_com_orientechnologies_orient_core_sql_functions_coll_OSQLFunctionLast.java
1,136
public class OSQLMethodAsFloat extends OAbstractSQLMethod { public static final String NAME = "asfloat"; public OSQLMethodAsFloat() { super(NAME); } @Override public Object execute(OIdentifiable iCurrentRecord, OCommandContext iContext, Object ioResult, Object[] iMethodParams) { if (ioResult instanceof Number) { ioResult = ((Number) ioResult).floatValue(); } else { ioResult = ioResult != null ? new Float(ioResult.toString().trim()) : null; } return ioResult; } }
1no label
core_src_main_java_com_orientechnologies_orient_core_sql_method_misc_OSQLMethodAsFloat.java
141
@Test public class DecimalSerializerTest { private final static int FIELD_SIZE = 9; private static final BigDecimal OBJECT = new BigDecimal(new BigInteger("20"), 2); private ODecimalSerializer decimalSerializer; private static final byte[] stream = new byte[FIELD_SIZE]; @BeforeClass public void beforeClass() { decimalSerializer = new ODecimalSerializer(); } public void testFieldSize() { Assert.assertEquals(decimalSerializer.getObjectSize(OBJECT), FIELD_SIZE); } public void testSerialize() { decimalSerializer.serialize(OBJECT, stream, 0); Assert.assertEquals(decimalSerializer.deserialize(stream, 0), OBJECT); } public void testSerializeNative() { decimalSerializer.serializeNative(OBJECT, stream, 0); Assert.assertEquals(decimalSerializer.deserializeNative(stream, 0), OBJECT); } public void testNativeDirectMemoryCompatibility() { decimalSerializer.serializeNative(OBJECT, stream, 0); ODirectMemoryPointer pointer = new ODirectMemoryPointer(stream); try { Assert.assertEquals(decimalSerializer.deserializeFromDirectMemory(pointer, 0), OBJECT); } finally { pointer.free(); } } }
0true
commons_src_test_java_com_orientechnologies_common_serialization_types_DecimalSerializerTest.java
3,984
public static abstract class AbstractDistanceScoreFunction extends ScoreFunction { private final double scale; protected final double offset; private final DecayFunction func; public AbstractDistanceScoreFunction(double userSuppiedScale, double decay, double offset, DecayFunction func) { super(CombineFunction.MULT); if (userSuppiedScale <= 0.0) { throw new ElasticsearchIllegalArgumentException(FunctionScoreQueryParser.NAME + " : scale must be > 0.0."); } if (decay <= 0.0 || decay >= 1.0) { throw new ElasticsearchIllegalArgumentException(FunctionScoreQueryParser.NAME + " : decay must be in the range [0..1]."); } this.scale = func.processScale(userSuppiedScale, decay); this.func = func; if (offset < 0.0d) { throw new ElasticsearchIllegalArgumentException(FunctionScoreQueryParser.NAME + " : offset must be > 0.0"); } this.offset = offset; } @Override public double score(int docId, float subQueryScore) { double value = distance(docId); return func.evaluate(value, scale); } /** * This function computes the distance from a defined origin. Since * the value of the document is read from the index, it cannot be * guaranteed that the value actually exists. If it does not, we assume * the user handles this case in the query and return 0. * */ protected abstract double distance(int docId); protected abstract String getDistanceString(int docId); protected abstract String getFieldName(); @Override public Explanation explainScore(int docId, Explanation subQueryExpl) { ComplexExplanation ce = new ComplexExplanation(); ce.setValue(CombineFunction.toFloat(score(docId, subQueryExpl.getValue()))); ce.setMatch(true); ce.setDescription("Function for field " + getFieldName() + ":"); ce.addDetail(func.explainFunction(getDistanceString(docId), distance(docId), scale)); return ce; } }
1no label
src_main_java_org_elasticsearch_index_query_functionscore_DecayFunctionParser.java
224
@LuceneTestCase.SuppressCodecs({"MockFixedIntBlock", "MockVariableIntBlock", "MockSep", "MockRandom", "Lucene3x"}) public class XPostingsHighlighterTests extends ElasticsearchLuceneTestCase { /* Tests changes needed to make possible to perform discrete highlighting. We want to highlight every field value separately in case of multiple values, at least when needing to return the whole field content This is needed to be able to get back a single snippet per value when number_of_fragments=0 */ @Test public void testDiscreteHighlightingPerValue() throws Exception { Directory dir = newDirectory(); IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); FieldType offsetsType = new FieldType(TextField.TYPE_STORED); offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); Field body = new Field("body", "", offsetsType); final String firstValue = "This is a test. Just a test highlighting from postings highlighter."; Document doc = new Document(); doc.add(body); body.setStringValue(firstValue); final String secondValue = "This is the second value to perform highlighting on."; Field body2 = new Field("body", "", offsetsType); doc.add(body2); body2.setStringValue(secondValue); final String thirdValue = "This is the third value to test highlighting with postings."; Field body3 = new Field("body", "", offsetsType); doc.add(body3); body3.setStringValue(thirdValue); iw.addDocument(doc); IndexReader ir = iw.getReader(); iw.close(); IndexSearcher searcher = newSearcher(ir); XPostingsHighlighter highlighter = new XPostingsHighlighter() { @Override protected BreakIterator getBreakIterator(String field) { return new WholeBreakIterator(); } @Override protected char getMultiValuedSeparator(String field) { //U+2029 PARAGRAPH SEPARATOR (PS): each value holds a discrete passage for highlighting return 8233; } }; Query query = new TermQuery(new Term("body", "highlighting")); TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); assertThat(topDocs.totalHits, equalTo(1)); String snippets[] = highlighter.highlight("body", query, searcher, topDocs); assertThat(snippets.length, equalTo(1)); String firstHlValue = "This is a test. Just a test <b>highlighting</b> from postings highlighter."; String secondHlValue = "This is the second value to perform <b>highlighting</b> on."; String thirdHlValue = "This is the third value to test <b>highlighting</b> with postings."; //default behaviour: using the WholeBreakIterator, despite the multi valued paragraph separator we get back a single snippet for multiple values assertThat(snippets[0], equalTo(firstHlValue + (char)8233 + secondHlValue + (char)8233 + thirdHlValue)); highlighter = new XPostingsHighlighter() { Iterator<String> valuesIterator = Arrays.asList(firstValue, secondValue, thirdValue).iterator(); Iterator<Integer> offsetsIterator = Arrays.asList(0, firstValue.length() + 1, firstValue.length() + secondValue.length() + 2).iterator(); @Override protected String[][] loadFieldValues(IndexSearcher searcher, String[] fields, int[] docids, int maxLength) throws IOException { return new String[][]{new String[]{valuesIterator.next()}}; } @Override protected int getOffsetForCurrentValue(String field, int docId) { return offsetsIterator.next(); } @Override protected BreakIterator getBreakIterator(String field) { return new WholeBreakIterator(); } }; //first call using the WholeBreakIterator, we get now only the first value properly highlighted as we wish snippets = highlighter.highlight("body", query, searcher, topDocs); assertThat(snippets.length, equalTo(1)); assertThat(snippets[0], equalTo(firstHlValue)); //second call using the WholeBreakIterator, we get now only the second value properly highlighted as we wish snippets = highlighter.highlight("body", query, searcher, topDocs); assertThat(snippets.length, equalTo(1)); assertThat(snippets[0], equalTo(secondHlValue)); //third call using the WholeBreakIterator, we get now only the third value properly highlighted as we wish snippets = highlighter.highlight("body", query, searcher, topDocs); assertThat(snippets.length, equalTo(1)); assertThat(snippets[0], equalTo(thirdHlValue)); ir.close(); dir.close(); } @Test public void testDiscreteHighlightingPerValue_secondValueWithoutMatches() throws Exception { Directory dir = newDirectory(); IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); FieldType offsetsType = new FieldType(TextField.TYPE_STORED); offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); Field body = new Field("body", "", offsetsType); final String firstValue = "This is a test. Just a test highlighting from postings highlighter."; Document doc = new Document(); doc.add(body); body.setStringValue(firstValue); final String secondValue = "This is the second value without matches."; Field body2 = new Field("body", "", offsetsType); doc.add(body2); body2.setStringValue(secondValue); final String thirdValue = "This is the third value to test highlighting with postings."; Field body3 = new Field("body", "", offsetsType); doc.add(body3); body3.setStringValue(thirdValue); iw.addDocument(doc); IndexReader ir = iw.getReader(); iw.close(); IndexSearcher searcher = newSearcher(ir); Query query = new TermQuery(new Term("body", "highlighting")); TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); assertThat(topDocs.totalHits, equalTo(1)); XPostingsHighlighter highlighter = new XPostingsHighlighter() { @Override protected BreakIterator getBreakIterator(String field) { return new WholeBreakIterator(); } @Override protected char getMultiValuedSeparator(String field) { //U+2029 PARAGRAPH SEPARATOR (PS): each value holds a discrete passage for highlighting return 8233; } @Override protected Passage[] getEmptyHighlight(String fieldName, BreakIterator bi, int maxPassages) { return new Passage[0]; } }; String snippets[] = highlighter.highlight("body", query, searcher, topDocs); assertThat(snippets.length, equalTo(1)); String firstHlValue = "This is a test. Just a test <b>highlighting</b> from postings highlighter."; String thirdHlValue = "This is the third value to test <b>highlighting</b> with postings."; //default behaviour: using the WholeBreakIterator, despite the multi valued paragraph separator we get back a single snippet for multiple values //but only the first and the third value are returned since there are no matches in the second one. assertThat(snippets[0], equalTo(firstHlValue + (char)8233 + secondValue + (char)8233 + thirdHlValue)); highlighter = new XPostingsHighlighter() { Iterator<String> valuesIterator = Arrays.asList(firstValue, secondValue, thirdValue).iterator(); Iterator<Integer> offsetsIterator = Arrays.asList(0, firstValue.length() + 1, firstValue.length() + secondValue.length() + 2).iterator(); @Override protected String[][] loadFieldValues(IndexSearcher searcher, String[] fields, int[] docids, int maxLength) throws IOException { return new String[][]{new String[]{valuesIterator.next()}}; } @Override protected int getOffsetForCurrentValue(String field, int docId) { return offsetsIterator.next(); } @Override protected BreakIterator getBreakIterator(String field) { return new WholeBreakIterator(); } @Override protected Passage[] getEmptyHighlight(String fieldName, BreakIterator bi, int maxPassages) { return new Passage[0]; } }; //first call using the WholeBreakIterator, we get now only the first value properly highlighted as we wish snippets = highlighter.highlight("body", query, searcher, topDocs); assertThat(snippets.length, equalTo(1)); assertThat(snippets[0], equalTo(firstHlValue)); //second call using the WholeBreakIterator, we get now nothing back because there's nothing to highlight in the second value snippets = highlighter.highlight("body", query, searcher, topDocs); assertThat(snippets.length, equalTo(1)); assertThat(snippets[0], nullValue()); //third call using the WholeBreakIterator, we get now only the third value properly highlighted as we wish snippets = highlighter.highlight("body", query, searcher, topDocs); assertThat(snippets.length, equalTo(1)); assertThat(snippets[0], equalTo(thirdHlValue)); ir.close(); dir.close(); } @Test public void testDiscreteHighlightingPerValue_MultipleMatches() throws Exception { Directory dir = newDirectory(); IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); FieldType offsetsType = new FieldType(TextField.TYPE_STORED); offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); Field body = new Field("body", "", offsetsType); final String firstValue = "This is a highlighting test. Just a test highlighting from postings highlighter."; Document doc = new Document(); doc.add(body); body.setStringValue(firstValue); final String secondValue = "This is the second highlighting value to test highlighting with postings."; Field body2 = new Field("body", "", offsetsType); doc.add(body2); body2.setStringValue(secondValue); iw.addDocument(doc); IndexReader ir = iw.getReader(); iw.close(); IndexSearcher searcher = newSearcher(ir); Query query = new TermQuery(new Term("body", "highlighting")); TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); assertThat(topDocs.totalHits, equalTo(1)); String firstHlValue = "This is a <b>highlighting</b> test. Just a test <b>highlighting</b> from postings highlighter."; String secondHlValue = "This is the second <b>highlighting</b> value to test <b>highlighting</b> with postings."; XPostingsHighlighter highlighter = new XPostingsHighlighter() { Iterator<String> valuesIterator = Arrays.asList(firstValue, secondValue).iterator(); Iterator<Integer> offsetsIterator = Arrays.asList(0, firstValue.length() + 1).iterator(); @Override protected String[][] loadFieldValues(IndexSearcher searcher, String[] fields, int[] docids, int maxLength) throws IOException { return new String[][]{new String[]{valuesIterator.next()}}; } @Override protected int getOffsetForCurrentValue(String field, int docId) { return offsetsIterator.next(); } @Override protected BreakIterator getBreakIterator(String field) { return new WholeBreakIterator(); } @Override protected Passage[] getEmptyHighlight(String fieldName, BreakIterator bi, int maxPassages) { return new Passage[0]; } }; //first call using the WholeBreakIterator, we get now only the first value properly highlighted as we wish String[] snippets = highlighter.highlight("body", query, searcher, topDocs); assertThat(snippets.length, equalTo(1)); assertThat(snippets[0], equalTo(firstHlValue)); //second call using the WholeBreakIterator, we get now only the second value properly highlighted as we wish snippets = highlighter.highlight("body", query, searcher, topDocs); assertThat(snippets.length, equalTo(1)); assertThat(snippets[0], equalTo(secondHlValue)); ir.close(); dir.close(); } @Test public void testDiscreteHighlightingPerValue_MultipleQueryTerms() throws Exception { Directory dir = newDirectory(); IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); FieldType offsetsType = new FieldType(TextField.TYPE_STORED); offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); Field body = new Field("body", "", offsetsType); final String firstValue = "This is the first sentence. This is the second sentence."; Document doc = new Document(); doc.add(body); body.setStringValue(firstValue); final String secondValue = "This is the third sentence. This is the fourth sentence."; Field body2 = new Field("body", "", offsetsType); doc.add(body2); body2.setStringValue(secondValue); final String thirdValue = "This is the fifth sentence"; Field body3 = new Field("body", "", offsetsType); doc.add(body3); body3.setStringValue(thirdValue); iw.addDocument(doc); IndexReader ir = iw.getReader(); iw.close(); IndexSearcher searcher = newSearcher(ir); BooleanQuery query = new BooleanQuery(); query.add(new BooleanClause(new TermQuery(new Term("body", "third")), BooleanClause.Occur.SHOULD)); query.add(new BooleanClause(new TermQuery(new Term("body", "seventh")), BooleanClause.Occur.SHOULD)); query.add(new BooleanClause(new TermQuery(new Term("body", "fifth")), BooleanClause.Occur.SHOULD)); query.setMinimumNumberShouldMatch(1); TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); assertThat(topDocs.totalHits, equalTo(1)); String secondHlValue = "This is the <b>third</b> sentence. This is the fourth sentence."; String thirdHlValue = "This is the <b>fifth</b> sentence"; XPostingsHighlighter highlighter = new XPostingsHighlighter() { Iterator<String> valuesIterator = Arrays.asList(firstValue, secondValue, thirdValue).iterator(); Iterator<Integer> offsetsIterator = Arrays.asList(0, firstValue.length() + 1, secondValue.length() + 1).iterator(); @Override protected String[][] loadFieldValues(IndexSearcher searcher, String[] fields, int[] docids, int maxLength) throws IOException { return new String[][]{new String[]{valuesIterator.next()}}; } @Override protected int getOffsetForCurrentValue(String field, int docId) { return offsetsIterator.next(); } @Override protected BreakIterator getBreakIterator(String field) { return new WholeBreakIterator(); } @Override protected Passage[] getEmptyHighlight(String fieldName, BreakIterator bi, int maxPassages) { return new Passage[0]; } }; //first call using the WholeBreakIterator, we get now null as the first value doesn't hold any match String[] snippets = highlighter.highlight("body", query, searcher, topDocs); assertThat(snippets.length, equalTo(1)); assertThat(snippets[0], nullValue()); //second call using the WholeBreakIterator, we get now only the second value properly highlighted as we wish snippets = highlighter.highlight("body", query, searcher, topDocs); assertThat(snippets.length, equalTo(1)); assertThat(snippets[0], equalTo(secondHlValue)); //second call using the WholeBreakIterator, we get now only the third value properly highlighted as we wish snippets = highlighter.highlight("body", query, searcher, topDocs); assertThat(snippets.length, equalTo(1)); assertThat(snippets[0], equalTo(thirdHlValue)); ir.close(); dir.close(); } /* The following are tests that we added to make sure that certain behaviours are possible using the postings highlighter They don't require our forked version, but only custom versions of methods that can be overridden and are already exposed to subclasses */ /* Tests that it's possible to obtain different fragments per document instead of a big string of concatenated fragments. We use our own PassageFormatter for that and override the getFormatter method. */ @Test public void testCustomPassageFormatterMultipleFragments() throws Exception { Directory dir = newDirectory(); IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); FieldType offsetsType = new FieldType(TextField.TYPE_STORED); offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); Field body = new Field("body", "", offsetsType); Document doc = new Document(); doc.add(body); body.setStringValue("This test is another test. Not a good sentence. Test test test test."); iw.addDocument(doc); IndexReader ir = iw.getReader(); iw.close(); XPostingsHighlighter highlighter = new XPostingsHighlighter(); IndexSearcher searcher = newSearcher(ir); Query query = new TermQuery(new Term("body", "test")); TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); assertThat(topDocs.totalHits, equalTo(1)); String snippets[] = highlighter.highlight("body", query, searcher, topDocs, 5); assertThat(snippets.length, equalTo(1)); //default behaviour that we want to change assertThat(snippets[0], equalTo("This <b>test</b> is another test. ... <b>Test</b> <b>test</b> <b>test</b> test.")); final CustomPassageFormatter passageFormatter = new CustomPassageFormatter("<b>", "</b>", new DefaultEncoder()); highlighter = new XPostingsHighlighter() { @Override protected PassageFormatter getFormatter(String field) { return passageFormatter; } }; final ScoreDoc scoreDocs[] = topDocs.scoreDocs; int docids[] = new int[scoreDocs.length]; int maxPassages[] = new int[scoreDocs.length]; for (int i = 0; i < docids.length; i++) { docids[i] = scoreDocs[i].doc; maxPassages[i] = 5; } Map<String, Object[]> highlights = highlighter.highlightFieldsAsObjects(new String[]{"body"}, query, searcher, docids, maxPassages); assertThat(highlights, notNullValue()); assertThat(highlights.size(), equalTo(1)); Object[] objectSnippets = highlights.get("body"); assertThat(objectSnippets, notNullValue()); assertThat(objectSnippets.length, equalTo(1)); assertThat(objectSnippets[0], instanceOf(Snippet[].class)); Snippet[] snippetsSnippet = (Snippet[]) objectSnippets[0]; assertThat(snippetsSnippet.length, equalTo(2)); //multiple fragments as we wish assertThat(snippetsSnippet[0].getText(), equalTo("This <b>test</b> is another test.")); assertThat(snippetsSnippet[1].getText(), equalTo("<b>Test</b> <b>test</b> <b>test</b> test.")); ir.close(); dir.close(); } /* Tests that it's possible to return no fragments when there's nothing to highlight We do that by overriding the getEmptyHighlight method */ @Test public void testHighlightWithNoMatches() throws Exception { Directory dir = newDirectory(); IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); FieldType offsetsType = new FieldType(TextField.TYPE_STORED); offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); Field body = new Field("body", "", offsetsType); Field none = new Field("none", "", offsetsType); Document doc = new Document(); doc.add(body); doc.add(none); body.setStringValue("This is a test. Just a test highlighting from postings. Feel free to ignore."); none.setStringValue(body.stringValue()); iw.addDocument(doc); body.setStringValue("Highlighting the first term. Hope it works."); none.setStringValue(body.stringValue()); iw.addDocument(doc); IndexReader ir = iw.getReader(); iw.close(); IndexSearcher searcher = newSearcher(ir); XPostingsHighlighter highlighter = new XPostingsHighlighter(); Query query = new TermQuery(new Term("none", "highlighting")); TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); assertThat(topDocs.totalHits, equalTo(2)); String snippets[] = highlighter.highlight("body", query, searcher, topDocs, 1); //Two null snippets if there are no matches (thanks to our own custom passage formatter) assertThat(snippets.length, equalTo(2)); //default behaviour: returns the first sentence with num passages = 1 assertThat(snippets[0], equalTo("This is a test. ")); assertThat(snippets[1], equalTo("Highlighting the first term. ")); highlighter = new XPostingsHighlighter() { @Override protected Passage[] getEmptyHighlight(String fieldName, BreakIterator bi, int maxPassages) { return new Passage[0]; } }; snippets = highlighter.highlight("body", query, searcher, topDocs); //Two null snippets if there are no matches, as we wish assertThat(snippets.length, equalTo(2)); assertThat(snippets[0], nullValue()); assertThat(snippets[1], nullValue()); ir.close(); dir.close(); } /* Tests that it's possible to avoid having fragments that span across different values We do that by overriding the getMultiValuedSeparator and using a proper separator between values */ @Test public void testCustomMultiValuedSeparator() throws Exception { Directory dir = newDirectory(); IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); FieldType offsetsType = new FieldType(TextField.TYPE_STORED); offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); Field body = new Field("body", "", offsetsType); Document doc = new Document(); doc.add(body); body.setStringValue("This is a test. Just a test highlighting from postings"); Field body2 = new Field("body", "", offsetsType); doc.add(body2); body2.setStringValue("highlighter."); iw.addDocument(doc); IndexReader ir = iw.getReader(); iw.close(); IndexSearcher searcher = newSearcher(ir); XPostingsHighlighter highlighter = new XPostingsHighlighter(); Query query = new TermQuery(new Term("body", "highlighting")); TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); assertThat(topDocs.totalHits, equalTo(1)); String snippets[] = highlighter.highlight("body", query, searcher, topDocs); assertThat(snippets.length, equalTo(1)); //default behaviour: getting a fragment that spans across different values assertThat(snippets[0], equalTo("Just a test <b>highlighting</b> from postings highlighter.")); highlighter = new XPostingsHighlighter() { @Override protected char getMultiValuedSeparator(String field) { //U+2029 PARAGRAPH SEPARATOR (PS): each value holds a discrete passage for highlighting return 8233; } }; snippets = highlighter.highlight("body", query, searcher, topDocs); assertThat(snippets.length, equalTo(1)); //getting a fragment that doesn't span across different values since we used the paragraph separator between the different values assertThat(snippets[0], equalTo("Just a test <b>highlighting</b> from postings" + (char)8233)); ir.close(); dir.close(); } /* The following are all the existing postings highlighter tests, to make sure we don't have regression in our own fork */ @Test public void testBasics() throws Exception { Directory dir = newDirectory(); IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); FieldType offsetsType = new FieldType(TextField.TYPE_STORED); offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); Field body = new Field("body", "", offsetsType); Document doc = new Document(); doc.add(body); body.setStringValue("This is a test. Just a test highlighting from postings. Feel free to ignore."); iw.addDocument(doc); body.setStringValue("Highlighting the first term. Hope it works."); iw.addDocument(doc); IndexReader ir = iw.getReader(); iw.close(); IndexSearcher searcher = newSearcher(ir); XPostingsHighlighter highlighter = new XPostingsHighlighter(); Query query = new TermQuery(new Term("body", "highlighting")); TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); assertEquals(2, topDocs.totalHits); String snippets[] = highlighter.highlight("body", query, searcher, topDocs); assertEquals(2, snippets.length); assertEquals("Just a test <b>highlighting</b> from postings. ", snippets[0]); assertEquals("<b>Highlighting</b> the first term. ", snippets[1]); ir.close(); dir.close(); } public void testFormatWithMatchExceedingContentLength2() throws Exception { String bodyText = "123 TEST 01234 TEST"; String[] snippets = formatWithMatchExceedingContentLength(bodyText); assertEquals(1, snippets.length); assertEquals("123 <b>TEST</b> 01234 TE", snippets[0]); } public void testFormatWithMatchExceedingContentLength3() throws Exception { String bodyText = "123 5678 01234 TEST TEST"; String[] snippets = formatWithMatchExceedingContentLength(bodyText); assertEquals(1, snippets.length); assertEquals("123 5678 01234 TE", snippets[0]); } public void testFormatWithMatchExceedingContentLength() throws Exception { String bodyText = "123 5678 01234 TEST"; String[] snippets = formatWithMatchExceedingContentLength(bodyText); assertEquals(1, snippets.length); // LUCENE-5166: no snippet assertEquals("123 5678 01234 TE", snippets[0]); } private String[] formatWithMatchExceedingContentLength(String bodyText) throws IOException { int maxLength = 17; final Analyzer analyzer = new MockAnalyzer(random()); Directory dir = newDirectory(); IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); final FieldType fieldType = new FieldType(TextField.TYPE_STORED); fieldType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); final Field body = new Field("body", bodyText, fieldType); Document doc = new Document(); doc.add(body); iw.addDocument(doc); IndexReader ir = iw.getReader(); iw.close(); IndexSearcher searcher = newSearcher(ir); Query query = new TermQuery(new Term("body", "test")); TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); assertEquals(1, topDocs.totalHits); XPostingsHighlighter highlighter = new XPostingsHighlighter(maxLength); String snippets[] = highlighter.highlight("body", query, searcher, topDocs); ir.close(); dir.close(); return snippets; } // simple test highlighting last word. public void testHighlightLastWord() throws Exception { Directory dir = newDirectory(); IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); FieldType offsetsType = new FieldType(TextField.TYPE_STORED); offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); Field body = new Field("body", "", offsetsType); Document doc = new Document(); doc.add(body); body.setStringValue("This is a test"); iw.addDocument(doc); IndexReader ir = iw.getReader(); iw.close(); IndexSearcher searcher = newSearcher(ir); XPostingsHighlighter highlighter = new XPostingsHighlighter(); Query query = new TermQuery(new Term("body", "test")); TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); assertEquals(1, topDocs.totalHits); String snippets[] = highlighter.highlight("body", query, searcher, topDocs); assertEquals(1, snippets.length); assertEquals("This is a <b>test</b>", snippets[0]); ir.close(); dir.close(); } // simple test with one sentence documents. @Test public void testOneSentence() throws Exception { Directory dir = newDirectory(); // use simpleanalyzer for more natural tokenization (else "test." is a token) IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.SIMPLE, true)); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); FieldType offsetsType = new FieldType(TextField.TYPE_STORED); offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); Field body = new Field("body", "", offsetsType); Document doc = new Document(); doc.add(body); body.setStringValue("This is a test."); iw.addDocument(doc); body.setStringValue("Test a one sentence document."); iw.addDocument(doc); IndexReader ir = iw.getReader(); iw.close(); IndexSearcher searcher = newSearcher(ir); XPostingsHighlighter highlighter = new XPostingsHighlighter(); Query query = new TermQuery(new Term("body", "test")); TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); assertEquals(2, topDocs.totalHits); String snippets[] = highlighter.highlight("body", query, searcher, topDocs); assertEquals(2, snippets.length); assertEquals("This is a <b>test</b>.", snippets[0]); assertEquals("<b>Test</b> a one sentence document.", snippets[1]); ir.close(); dir.close(); } // simple test with multiple values that make a result longer than maxLength. @Test public void testMaxLengthWithMultivalue() throws Exception { Directory dir = newDirectory(); // use simpleanalyzer for more natural tokenization (else "test." is a token) IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.SIMPLE, true)); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); FieldType offsetsType = new FieldType(TextField.TYPE_STORED); offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); Document doc = new Document(); for(int i = 0; i < 3 ; i++) { Field body = new Field("body", "", offsetsType); body.setStringValue("This is a multivalued field"); doc.add(body); } iw.addDocument(doc); IndexReader ir = iw.getReader(); iw.close(); IndexSearcher searcher = newSearcher(ir); XPostingsHighlighter highlighter = new XPostingsHighlighter(40); Query query = new TermQuery(new Term("body", "field")); TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); assertEquals(1, topDocs.totalHits); String snippets[] = highlighter.highlight("body", query, searcher, topDocs); assertEquals(1, snippets.length); assertTrue("Snippet should have maximum 40 characters plus the pre and post tags", snippets[0].length() == (40 + "<b></b>".length())); ir.close(); dir.close(); } @Test public void testMultipleFields() throws Exception { Directory dir = newDirectory(); IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.SIMPLE, true)); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); FieldType offsetsType = new FieldType(TextField.TYPE_STORED); offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); Field body = new Field("body", "", offsetsType); Field title = new Field("title", "", offsetsType); Document doc = new Document(); doc.add(body); doc.add(title); body.setStringValue("This is a test. Just a test highlighting from postings. Feel free to ignore."); title.setStringValue("I am hoping for the best."); iw.addDocument(doc); body.setStringValue("Highlighting the first term. Hope it works."); title.setStringValue("But best may not be good enough."); iw.addDocument(doc); IndexReader ir = iw.getReader(); iw.close(); IndexSearcher searcher = newSearcher(ir); XPostingsHighlighter highlighter = new XPostingsHighlighter(); BooleanQuery query = new BooleanQuery(); query.add(new TermQuery(new Term("body", "highlighting")), BooleanClause.Occur.SHOULD); query.add(new TermQuery(new Term("title", "best")), BooleanClause.Occur.SHOULD); TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); assertEquals(2, topDocs.totalHits); Map<String,String[]> snippets = highlighter.highlightFields(new String [] { "body", "title" }, query, searcher, topDocs); assertEquals(2, snippets.size()); assertEquals("Just a test <b>highlighting</b> from postings. ", snippets.get("body")[0]); assertEquals("<b>Highlighting</b> the first term. ", snippets.get("body")[1]); assertEquals("I am hoping for the <b>best</b>.", snippets.get("title")[0]); assertEquals("But <b>best</b> may not be good enough.", snippets.get("title")[1]); ir.close(); dir.close(); } @Test public void testMultipleTerms() throws Exception { Directory dir = newDirectory(); IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); FieldType offsetsType = new FieldType(TextField.TYPE_STORED); offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); Field body = new Field("body", "", offsetsType); Document doc = new Document(); doc.add(body); body.setStringValue("This is a test. Just a test highlighting from postings. Feel free to ignore."); iw.addDocument(doc); body.setStringValue("Highlighting the first term. Hope it works."); iw.addDocument(doc); IndexReader ir = iw.getReader(); iw.close(); IndexSearcher searcher = newSearcher(ir); XPostingsHighlighter highlighter = new XPostingsHighlighter(); BooleanQuery query = new BooleanQuery(); query.add(new TermQuery(new Term("body", "highlighting")), BooleanClause.Occur.SHOULD); query.add(new TermQuery(new Term("body", "just")), BooleanClause.Occur.SHOULD); query.add(new TermQuery(new Term("body", "first")), BooleanClause.Occur.SHOULD); TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); assertEquals(2, topDocs.totalHits); String snippets[] = highlighter.highlight("body", query, searcher, topDocs); assertEquals(2, snippets.length); assertEquals("<b>Just</b> a test <b>highlighting</b> from postings. ", snippets[0]); assertEquals("<b>Highlighting</b> the <b>first</b> term. ", snippets[1]); ir.close(); dir.close(); } @Test public void testMultiplePassages() throws Exception { Directory dir = newDirectory(); IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.SIMPLE, true)); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); FieldType offsetsType = new FieldType(TextField.TYPE_STORED); offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); Field body = new Field("body", "", offsetsType); Document doc = new Document(); doc.add(body); body.setStringValue("This is a test. Just a test highlighting from postings. Feel free to ignore."); iw.addDocument(doc); body.setStringValue("This test is another test. Not a good sentence. Test test test test."); iw.addDocument(doc); IndexReader ir = iw.getReader(); iw.close(); IndexSearcher searcher = newSearcher(ir); XPostingsHighlighter highlighter = new XPostingsHighlighter(); Query query = new TermQuery(new Term("body", "test")); TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); assertEquals(2, topDocs.totalHits); String snippets[] = highlighter.highlight("body", query, searcher, topDocs, 2); assertEquals(2, snippets.length); assertEquals("This is a <b>test</b>. Just a <b>test</b> highlighting from postings. ", snippets[0]); assertEquals("This <b>test</b> is another <b>test</b>. ... <b>Test</b> <b>test</b> <b>test</b> <b>test</b>.", snippets[1]); ir.close(); dir.close(); } @Test public void testUserFailedToIndexOffsets() throws Exception { Directory dir = newDirectory(); IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.SIMPLE, true)); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); FieldType positionsType = new FieldType(TextField.TYPE_STORED); positionsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS); Field body = new Field("body", "", positionsType); Field title = new StringField("title", "", Field.Store.YES); Document doc = new Document(); doc.add(body); doc.add(title); body.setStringValue("This is a test. Just a test highlighting from postings. Feel free to ignore."); title.setStringValue("test"); iw.addDocument(doc); body.setStringValue("This test is another test. Not a good sentence. Test test test test."); title.setStringValue("test"); iw.addDocument(doc); IndexReader ir = iw.getReader(); iw.close(); IndexSearcher searcher = newSearcher(ir); XPostingsHighlighter highlighter = new XPostingsHighlighter(); Query query = new TermQuery(new Term("body", "test")); TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); assertEquals(2, topDocs.totalHits); try { highlighter.highlight("body", query, searcher, topDocs, 2); fail("did not hit expected exception"); } catch (IllegalArgumentException iae) { // expected } try { highlighter.highlight("title", new TermQuery(new Term("title", "test")), searcher, topDocs, 2); fail("did not hit expected exception"); } catch (IllegalArgumentException iae) { // expected } ir.close(); dir.close(); } @Test public void testBuddhism() throws Exception { String text = "This eight-volume set brings together seminal papers in Buddhist studies from a vast " + "range of academic disciplines published over the last forty years. With a new introduction " + "by the editor, this collection is a unique and unrivalled research resource for both " + "student and scholar. Coverage includes: - Buddhist origins; early history of Buddhism in " + "South and Southeast Asia - early Buddhist Schools and Doctrinal History; Theravada Doctrine " + "- the Origins and nature of Mahayana Buddhism; some Mahayana religious topics - Abhidharma " + "and Madhyamaka - Yogacara, the Epistemological tradition, and Tathagatagarbha - Tantric " + "Buddhism (Including China and Japan); Buddhism in Nepal and Tibet - Buddhism in South and " + "Southeast Asia, and - Buddhism in China, East Asia, and Japan."; Directory dir = newDirectory(); Analyzer analyzer = new MockAnalyzer(random(), MockTokenizer.SIMPLE, true); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, analyzer); FieldType positionsType = new FieldType(TextField.TYPE_STORED); positionsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); Field body = new Field("body", text, positionsType); Document document = new Document(); document.add(body); iw.addDocument(document); IndexReader ir = iw.getReader(); iw.close(); IndexSearcher searcher = newSearcher(ir); PhraseQuery query = new PhraseQuery(); query.add(new Term("body", "buddhist")); query.add(new Term("body", "origins")); TopDocs topDocs = searcher.search(query, 10); assertEquals(1, topDocs.totalHits); XPostingsHighlighter highlighter = new XPostingsHighlighter(); String snippets[] = highlighter.highlight("body", query, searcher, topDocs, 2); assertEquals(1, snippets.length); assertTrue(snippets[0].contains("<b>Buddhist</b> <b>origins</b>")); ir.close(); dir.close(); } @Test public void testCuriousGeorge() throws Exception { String text = "It’s the formula for success for preschoolers—Curious George and fire trucks! " + "Curious George and the Firefighters is a story based on H. A. and Margret Rey’s " + "popular primate and painted in the original watercolor and charcoal style. " + "Firefighters are a famously brave lot, but can they withstand a visit from one curious monkey?"; Directory dir = newDirectory(); Analyzer analyzer = new MockAnalyzer(random(), MockTokenizer.SIMPLE, true); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, analyzer); FieldType positionsType = new FieldType(TextField.TYPE_STORED); positionsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); Field body = new Field("body", text, positionsType); Document document = new Document(); document.add(body); iw.addDocument(document); IndexReader ir = iw.getReader(); iw.close(); IndexSearcher searcher = newSearcher(ir); PhraseQuery query = new PhraseQuery(); query.add(new Term("body", "curious")); query.add(new Term("body", "george")); TopDocs topDocs = searcher.search(query, 10); assertEquals(1, topDocs.totalHits); XPostingsHighlighter highlighter = new XPostingsHighlighter(); String snippets[] = highlighter.highlight("body", query, searcher, topDocs, 2); assertEquals(1, snippets.length); assertFalse(snippets[0].contains("<b>Curious</b>Curious")); ir.close(); dir.close(); } @Test public void testCambridgeMA() throws Exception { BufferedReader r = new BufferedReader(new InputStreamReader( this.getClass().getResourceAsStream("CambridgeMA.utf8"), "UTF-8")); String text = r.readLine(); r.close(); Directory dir = newDirectory(); Analyzer analyzer = new MockAnalyzer(random(), MockTokenizer.SIMPLE, true); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, analyzer); FieldType positionsType = new FieldType(TextField.TYPE_STORED); positionsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); Field body = new Field("body", text, positionsType); Document document = new Document(); document.add(body); iw.addDocument(document); IndexReader ir = iw.getReader(); iw.close(); IndexSearcher searcher = newSearcher(ir); BooleanQuery query = new BooleanQuery(); query.add(new TermQuery(new Term("body", "porter")), BooleanClause.Occur.SHOULD); query.add(new TermQuery(new Term("body", "square")), BooleanClause.Occur.SHOULD); query.add(new TermQuery(new Term("body", "massachusetts")), BooleanClause.Occur.SHOULD); TopDocs topDocs = searcher.search(query, 10); assertEquals(1, topDocs.totalHits); XPostingsHighlighter highlighter = new XPostingsHighlighter(Integer.MAX_VALUE-1); String snippets[] = highlighter.highlight("body", query, searcher, topDocs, 2); assertEquals(1, snippets.length); assertTrue(snippets[0].contains("<b>Square</b>")); assertTrue(snippets[0].contains("<b>Porter</b>")); ir.close(); dir.close(); } @Test public void testPassageRanking() throws Exception { Directory dir = newDirectory(); IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.SIMPLE, true)); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); FieldType offsetsType = new FieldType(TextField.TYPE_STORED); offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); Field body = new Field("body", "", offsetsType); Document doc = new Document(); doc.add(body); body.setStringValue("This is a test. Just highlighting from postings. This is also a much sillier test. Feel free to test test test test test test test."); iw.addDocument(doc); IndexReader ir = iw.getReader(); iw.close(); IndexSearcher searcher = newSearcher(ir); XPostingsHighlighter highlighter = new XPostingsHighlighter(); Query query = new TermQuery(new Term("body", "test")); TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); assertEquals(1, topDocs.totalHits); String snippets[] = highlighter.highlight("body", query, searcher, topDocs, 2); assertEquals(1, snippets.length); assertEquals("This is a <b>test</b>. ... Feel free to <b>test</b> <b>test</b> <b>test</b> <b>test</b> <b>test</b> <b>test</b> <b>test</b>.", snippets[0]); ir.close(); dir.close(); } @Test public void testBooleanMustNot() throws Exception { Directory dir = newDirectory(); Analyzer analyzer = new MockAnalyzer(random(), MockTokenizer.SIMPLE, true); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, analyzer); FieldType positionsType = new FieldType(TextField.TYPE_STORED); positionsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); Field body = new Field("body", "This sentence has both terms. This sentence has only terms.", positionsType); Document document = new Document(); document.add(body); iw.addDocument(document); IndexReader ir = iw.getReader(); iw.close(); IndexSearcher searcher = newSearcher(ir); BooleanQuery query = new BooleanQuery(); query.add(new TermQuery(new Term("body", "terms")), BooleanClause.Occur.SHOULD); BooleanQuery query2 = new BooleanQuery(); query.add(query2, BooleanClause.Occur.SHOULD); query2.add(new TermQuery(new Term("body", "both")), BooleanClause.Occur.MUST_NOT); TopDocs topDocs = searcher.search(query, 10); assertEquals(1, topDocs.totalHits); XPostingsHighlighter highlighter = new XPostingsHighlighter(Integer.MAX_VALUE-1); String snippets[] = highlighter.highlight("body", query, searcher, topDocs, 2); assertEquals(1, snippets.length); assertFalse(snippets[0].contains("<b>both</b>")); ir.close(); dir.close(); } @Test public void testHighlightAllText() throws Exception { Directory dir = newDirectory(); IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.SIMPLE, true)); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); FieldType offsetsType = new FieldType(TextField.TYPE_STORED); offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); Field body = new Field("body", "", offsetsType); Document doc = new Document(); doc.add(body); body.setStringValue("This is a test. Just highlighting from postings. This is also a much sillier test. Feel free to test test test test test test test."); iw.addDocument(doc); IndexReader ir = iw.getReader(); iw.close(); IndexSearcher searcher = newSearcher(ir); XPostingsHighlighter highlighter = new XPostingsHighlighter(10000) { @Override protected BreakIterator getBreakIterator(String field) { return new WholeBreakIterator(); } }; Query query = new TermQuery(new Term("body", "test")); TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); assertEquals(1, topDocs.totalHits); String snippets[] = highlighter.highlight("body", query, searcher, topDocs, 2); assertEquals(1, snippets.length); assertEquals("This is a <b>test</b>. Just highlighting from postings. This is also a much sillier <b>test</b>. Feel free to <b>test</b> <b>test</b> <b>test</b> <b>test</b> <b>test</b> <b>test</b> <b>test</b>.", snippets[0]); ir.close(); dir.close(); } @Test public void testSpecificDocIDs() throws Exception { Directory dir = newDirectory(); IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); FieldType offsetsType = new FieldType(TextField.TYPE_STORED); offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); Field body = new Field("body", "", offsetsType); Document doc = new Document(); doc.add(body); body.setStringValue("This is a test. Just a test highlighting from postings. Feel free to ignore."); iw.addDocument(doc); body.setStringValue("Highlighting the first term. Hope it works."); iw.addDocument(doc); IndexReader ir = iw.getReader(); iw.close(); IndexSearcher searcher = newSearcher(ir); XPostingsHighlighter highlighter = new XPostingsHighlighter(); Query query = new TermQuery(new Term("body", "highlighting")); TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); assertEquals(2, topDocs.totalHits); ScoreDoc[] hits = topDocs.scoreDocs; int[] docIDs = new int[2]; docIDs[0] = hits[0].doc; docIDs[1] = hits[1].doc; String snippets[] = highlighter.highlightFields(new String[] {"body"}, query, searcher, docIDs, new int[] { 1 }).get("body"); assertEquals(2, snippets.length); assertEquals("Just a test <b>highlighting</b> from postings. ", snippets[0]); assertEquals("<b>Highlighting</b> the first term. ", snippets[1]); ir.close(); dir.close(); } @Test public void testCustomFieldValueSource() throws Exception { Directory dir = newDirectory(); IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.SIMPLE, true)); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); Document doc = new Document(); FieldType offsetsType = new FieldType(TextField.TYPE_NOT_STORED); offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); final String text = "This is a test. Just highlighting from postings. This is also a much sillier test. Feel free to test test test test test test test."; Field body = new Field("body", text, offsetsType); doc.add(body); iw.addDocument(doc); IndexReader ir = iw.getReader(); iw.close(); IndexSearcher searcher = newSearcher(ir); XPostingsHighlighter highlighter = new XPostingsHighlighter(10000) { @Override protected String[][] loadFieldValues(IndexSearcher searcher, String[] fields, int[] docids, int maxLength) throws IOException { assertThat(fields.length, equalTo(1)); assertThat(docids.length, equalTo(1)); String[][] contents = new String[1][1]; contents[0][0] = text; return contents; } @Override protected BreakIterator getBreakIterator(String field) { return new WholeBreakIterator(); } }; Query query = new TermQuery(new Term("body", "test")); TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); assertEquals(1, topDocs.totalHits); String snippets[] = highlighter.highlight("body", query, searcher, topDocs, 2); assertEquals(1, snippets.length); assertEquals("This is a <b>test</b>. Just highlighting from postings. This is also a much sillier <b>test</b>. Feel free to <b>test</b> <b>test</b> <b>test</b> <b>test</b> <b>test</b> <b>test</b> <b>test</b>.", snippets[0]); ir.close(); dir.close(); } /** Make sure highlighter returns first N sentences if * there were no hits. */ @Test public void testEmptyHighlights() throws Exception { Directory dir = newDirectory(); IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); FieldType offsetsType = new FieldType(TextField.TYPE_STORED); offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); Document doc = new Document(); Field body = new Field("body", "test this is. another sentence this test has. far away is that planet.", offsetsType); doc.add(body); iw.addDocument(doc); IndexReader ir = iw.getReader(); iw.close(); IndexSearcher searcher = newSearcher(ir); XPostingsHighlighter highlighter = new XPostingsHighlighter(); Query query = new TermQuery(new Term("body", "highlighting")); int[] docIDs = new int[] {0}; String snippets[] = highlighter.highlightFields(new String[] {"body"}, query, searcher, docIDs, new int[] { 2 }).get("body"); assertEquals(1, snippets.length); assertEquals("test this is. another sentence this test has. ", snippets[0]); ir.close(); dir.close(); } /** Make sure highlighter we can customize how emtpy * highlight is returned. */ @Test public void testCustomEmptyHighlights() throws Exception { Directory dir = newDirectory(); IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); FieldType offsetsType = new FieldType(TextField.TYPE_STORED); offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); Document doc = new Document(); Field body = new Field("body", "test this is. another sentence this test has. far away is that planet.", offsetsType); doc.add(body); iw.addDocument(doc); IndexReader ir = iw.getReader(); iw.close(); IndexSearcher searcher = newSearcher(ir); XPostingsHighlighter highlighter = new XPostingsHighlighter() { @Override public Passage[] getEmptyHighlight(String fieldName, BreakIterator bi, int maxPassages) { return new Passage[0]; } }; Query query = new TermQuery(new Term("body", "highlighting")); int[] docIDs = new int[] {0}; String snippets[] = highlighter.highlightFields(new String[] {"body"}, query, searcher, docIDs, new int[] { 2 }).get("body"); assertEquals(1, snippets.length); assertNull(snippets[0]); ir.close(); dir.close(); } /** Make sure highlighter returns whole text when there * are no hits and BreakIterator is null. */ @Test public void testEmptyHighlightsWhole() throws Exception { Directory dir = newDirectory(); IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); FieldType offsetsType = new FieldType(TextField.TYPE_STORED); offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); Document doc = new Document(); Field body = new Field("body", "test this is. another sentence this test has. far away is that planet.", offsetsType); doc.add(body); iw.addDocument(doc); IndexReader ir = iw.getReader(); iw.close(); IndexSearcher searcher = newSearcher(ir); XPostingsHighlighter highlighter = new XPostingsHighlighter(10000) { @Override protected BreakIterator getBreakIterator(String field) { return new WholeBreakIterator(); } }; Query query = new TermQuery(new Term("body", "highlighting")); int[] docIDs = new int[] {0}; String snippets[] = highlighter.highlightFields(new String[] {"body"}, query, searcher, docIDs, new int[] { 2 }).get("body"); assertEquals(1, snippets.length); assertEquals("test this is. another sentence this test has. far away is that planet.", snippets[0]); ir.close(); dir.close(); } /** Make sure highlighter is OK with entirely missing * field. */ @Test public void testFieldIsMissing() throws Exception { Directory dir = newDirectory(); IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); FieldType offsetsType = new FieldType(TextField.TYPE_STORED); offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); Document doc = new Document(); Field body = new Field("body", "test this is. another sentence this test has. far away is that planet.", offsetsType); doc.add(body); iw.addDocument(doc); IndexReader ir = iw.getReader(); iw.close(); IndexSearcher searcher = newSearcher(ir); XPostingsHighlighter highlighter = new XPostingsHighlighter(); Query query = new TermQuery(new Term("bogus", "highlighting")); int[] docIDs = new int[] {0}; String snippets[] = highlighter.highlightFields(new String[] {"bogus"}, query, searcher, docIDs, new int[] { 2 }).get("bogus"); assertEquals(1, snippets.length); assertNull(snippets[0]); ir.close(); dir.close(); } @Test public void testFieldIsJustSpace() throws Exception { Directory dir = newDirectory(); IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); FieldType offsetsType = new FieldType(TextField.TYPE_STORED); offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); Document doc = new Document(); doc.add(new Field("body", " ", offsetsType)); doc.add(new Field("id", "id", offsetsType)); iw.addDocument(doc); doc = new Document(); doc.add(new Field("body", "something", offsetsType)); iw.addDocument(doc); IndexReader ir = iw.getReader(); iw.close(); IndexSearcher searcher = newSearcher(ir); XPostingsHighlighter highlighter = new XPostingsHighlighter(); int docID = searcher.search(new TermQuery(new Term("id", "id")), 1).scoreDocs[0].doc; Query query = new TermQuery(new Term("body", "highlighting")); int[] docIDs = new int[1]; docIDs[0] = docID; String snippets[] = highlighter.highlightFields(new String[] {"body"}, query, searcher, docIDs, new int[] { 2 }).get("body"); assertEquals(1, snippets.length); assertEquals(" ", snippets[0]); ir.close(); dir.close(); } @Test public void testFieldIsEmptyString() throws Exception { Directory dir = newDirectory(); IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); FieldType offsetsType = new FieldType(TextField.TYPE_STORED); offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); Document doc = new Document(); doc.add(new Field("body", "", offsetsType)); doc.add(new Field("id", "id", offsetsType)); iw.addDocument(doc); doc = new Document(); doc.add(new Field("body", "something", offsetsType)); iw.addDocument(doc); IndexReader ir = iw.getReader(); iw.close(); IndexSearcher searcher = newSearcher(ir); XPostingsHighlighter highlighter = new XPostingsHighlighter(); int docID = searcher.search(new TermQuery(new Term("id", "id")), 1).scoreDocs[0].doc; Query query = new TermQuery(new Term("body", "highlighting")); int[] docIDs = new int[1]; docIDs[0] = docID; String snippets[] = highlighter.highlightFields(new String[] {"body"}, query, searcher, docIDs, new int[] { 2 }).get("body"); assertEquals(1, snippets.length); assertNull(snippets[0]); ir.close(); dir.close(); } @Test public void testMultipleDocs() throws Exception { Directory dir = newDirectory(); IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); FieldType offsetsType = new FieldType(TextField.TYPE_STORED); offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); int numDocs = atLeast(100); for(int i=0;i<numDocs;i++) { Document doc = new Document(); String content = "the answer is " + i; if ((i & 1) == 0) { content += " some more terms"; } doc.add(new Field("body", content, offsetsType)); doc.add(newStringField("id", ""+i, Field.Store.YES)); iw.addDocument(doc); if (random().nextInt(10) == 2) { iw.commit(); } } IndexReader ir = iw.getReader(); iw.close(); IndexSearcher searcher = newSearcher(ir); XPostingsHighlighter highlighter = new XPostingsHighlighter(); Query query = new TermQuery(new Term("body", "answer")); TopDocs hits = searcher.search(query, numDocs); assertEquals(numDocs, hits.totalHits); String snippets[] = highlighter.highlight("body", query, searcher, hits); assertEquals(numDocs, snippets.length); for(int hit=0;hit<numDocs;hit++) { Document doc = searcher.doc(hits.scoreDocs[hit].doc); int id = Integer.parseInt(doc.get("id")); String expected = "the <b>answer</b> is " + id; if ((id & 1) == 0) { expected += " some more terms"; } assertEquals(expected, snippets[hit]); } ir.close(); dir.close(); } @Test public void testMultipleSnippetSizes() throws Exception { Directory dir = newDirectory(); IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.SIMPLE, true)); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); FieldType offsetsType = new FieldType(TextField.TYPE_STORED); offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); Field body = new Field("body", "", offsetsType); Field title = new Field("title", "", offsetsType); Document doc = new Document(); doc.add(body); doc.add(title); body.setStringValue("This is a test. Just a test highlighting from postings. Feel free to ignore."); title.setStringValue("This is a test. Just a test highlighting from postings. Feel free to ignore."); iw.addDocument(doc); IndexReader ir = iw.getReader(); iw.close(); IndexSearcher searcher = newSearcher(ir); XPostingsHighlighter highlighter = new XPostingsHighlighter(); BooleanQuery query = new BooleanQuery(); query.add(new TermQuery(new Term("body", "test")), BooleanClause.Occur.SHOULD); query.add(new TermQuery(new Term("title", "test")), BooleanClause.Occur.SHOULD); Map<String,String[]> snippets = highlighter.highlightFields(new String[] { "title", "body" }, query, searcher, new int[] { 0 }, new int[] { 1, 2 }); String titleHighlight = snippets.get("title")[0]; String bodyHighlight = snippets.get("body")[0]; assertEquals("This is a <b>test</b>. ", titleHighlight); assertEquals("This is a <b>test</b>. Just a <b>test</b> highlighting from postings. ", bodyHighlight); ir.close(); dir.close(); } public void testEncode() throws Exception { Directory dir = newDirectory(); IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); FieldType offsetsType = new FieldType(TextField.TYPE_STORED); offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); Field body = new Field("body", "", offsetsType); Document doc = new Document(); doc.add(body); body.setStringValue("This is a test. Just a test highlighting from <i>postings</i>. Feel free to ignore."); iw.addDocument(doc); IndexReader ir = iw.getReader(); iw.close(); IndexSearcher searcher = newSearcher(ir); PostingsHighlighter highlighter = new PostingsHighlighter() { @Override protected PassageFormatter getFormatter(String field) { return new DefaultPassageFormatter("<b>", "</b>", "... ", true); } }; Query query = new TermQuery(new Term("body", "highlighting")); TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); assertEquals(1, topDocs.totalHits); String snippets[] = highlighter.highlight("body", query, searcher, topDocs); assertEquals(1, snippets.length); assertEquals("Just&#32;a&#32;test&#32;<b>highlighting</b>&#32;from&#32;&lt;i&gt;postings&lt;&#x2F;i&gt;&#46;&#32;", snippets[0]); ir.close(); dir.close(); } /** customizing the gap separator to force a sentence break */ public void testGapSeparator() throws Exception { Directory dir = newDirectory(); // use simpleanalyzer for more natural tokenization (else "test." is a token) IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.SIMPLE, true)); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); FieldType offsetsType = new FieldType(TextField.TYPE_STORED); offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); Document doc = new Document(); Field body1 = new Field("body", "", offsetsType); body1.setStringValue("This is a multivalued field"); doc.add(body1); Field body2 = new Field("body", "", offsetsType); body2.setStringValue("This is something different"); doc.add(body2); iw.addDocument(doc); IndexReader ir = iw.getReader(); iw.close(); IndexSearcher searcher = newSearcher(ir); PostingsHighlighter highlighter = new PostingsHighlighter() { @Override protected char getMultiValuedSeparator(String field) { assert field.equals("body"); return '\u2029'; } }; Query query = new TermQuery(new Term("body", "field")); TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); assertEquals(1, topDocs.totalHits); String snippets[] = highlighter.highlight("body", query, searcher, topDocs); assertEquals(1, snippets.length); assertEquals("This is a multivalued <b>field</b>\u2029", snippets[0]); ir.close(); dir.close(); } // LUCENE-4906 public void testObjectFormatter() throws Exception { Directory dir = newDirectory(); IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); FieldType offsetsType = new FieldType(TextField.TYPE_STORED); offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); Field body = new Field("body", "", offsetsType); Document doc = new Document(); doc.add(body); body.setStringValue("This is a test. Just a test highlighting from postings. Feel free to ignore."); iw.addDocument(doc); IndexReader ir = iw.getReader(); iw.close(); IndexSearcher searcher = newSearcher(ir); XPostingsHighlighter highlighter = new XPostingsHighlighter() { @Override protected PassageFormatter getFormatter(String field) { return new PassageFormatter() { PassageFormatter defaultFormatter = new DefaultPassageFormatter(); @Override public String[] format(Passage passages[], String content) { // Just turns the String snippet into a length 2 // array of String return new String[] {"blah blah", defaultFormatter.format(passages, content).toString()}; } }; } }; Query query = new TermQuery(new Term("body", "highlighting")); TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); assertEquals(1, topDocs.totalHits); int[] docIDs = new int[1]; docIDs[0] = topDocs.scoreDocs[0].doc; Map<String,Object[]> snippets = highlighter.highlightFieldsAsObjects(new String[]{"body"}, query, searcher, docIDs, new int[] {1}); Object[] bodySnippets = snippets.get("body"); assertEquals(1, bodySnippets.length); assertTrue(Arrays.equals(new String[] {"blah blah", "Just a test <b>highlighting</b> from postings. "}, (String[]) bodySnippets[0])); ir.close(); dir.close(); } }
0true
src_test_java_org_apache_lucene_search_postingshighlight_XPostingsHighlighterTests.java
4,183
public class IndexShardSnapshotAndRestoreService extends AbstractIndexShardComponent { private final InternalIndexShard indexShard; private final RepositoriesService repositoriesService; private final RestoreService restoreService; @Inject public IndexShardSnapshotAndRestoreService(ShardId shardId, @IndexSettings Settings indexSettings, IndexShard indexShard, RepositoriesService repositoriesService, RestoreService restoreService) { super(shardId, indexSettings); this.indexShard = (InternalIndexShard) indexShard; this.repositoriesService = repositoriesService; this.restoreService = restoreService; } /** * Creates shard snapshot * * @param snapshotId snapshot id * @param snapshotStatus snapshot status */ public void snapshot(final SnapshotId snapshotId, final IndexShardSnapshotStatus snapshotStatus) { IndexShardRepository indexShardRepository = repositoriesService.indexShardRepository(snapshotId.getRepository()); if (!indexShard.routingEntry().primary()) { throw new IndexShardSnapshotFailedException(shardId, "snapshot should be performed only on primary"); } if (indexShard.routingEntry().relocating()) { // do not snapshot when in the process of relocation of primaries so we won't get conflicts throw new IndexShardSnapshotFailedException(shardId, "cannot snapshot while relocating"); } if (indexShard.state() == IndexShardState.CREATED || indexShard.state() == IndexShardState.RECOVERING) { // shard has just been created, or still recovering throw new IndexShardSnapshotFailedException(shardId, "shard didn't fully recover yet"); } try { SnapshotIndexCommit snapshotIndexCommit = indexShard.snapshotIndex(); try { indexShardRepository.snapshot(snapshotId, shardId, snapshotIndexCommit, snapshotStatus); if (logger.isDebugEnabled()) { StringBuilder sb = new StringBuilder(); sb.append("snapshot (").append(snapshotId.getSnapshot()).append(") completed to ").append(indexShardRepository).append(", took [").append(TimeValue.timeValueMillis(snapshotStatus.time())).append("]\n"); sb.append(" index : version [").append(snapshotStatus.indexVersion()).append("], number_of_files [").append(snapshotStatus.numberOfFiles()).append("] with total_size [").append(new ByteSizeValue(snapshotStatus.totalSize())).append("]\n"); logger.debug(sb.toString()); } } finally { snapshotIndexCommit.release(); } } catch (SnapshotFailedEngineException e) { throw e; } catch (IndexShardSnapshotFailedException e) { throw e; } catch (Throwable e) { throw new IndexShardSnapshotFailedException(shardId, "Failed to snapshot", e); } } /** * Restores shard from {@link RestoreSource} associated with this shard in routing table * * @param recoveryStatus recovery status */ public void restore(final RecoveryStatus recoveryStatus) { RestoreSource restoreSource = indexShard.routingEntry().restoreSource(); if (restoreSource == null) { throw new IndexShardRestoreFailedException(shardId, "empty restore source"); } if (logger.isTraceEnabled()) { logger.trace("[{}] restoring shard [{}]", restoreSource.snapshotId(), shardId); } try { IndexShardRepository indexShardRepository = repositoriesService.indexShardRepository(restoreSource.snapshotId().getRepository()); ShardId snapshotShardId = shardId; if (!shardId.getIndex().equals(restoreSource.index())) { snapshotShardId = new ShardId(restoreSource.index(), shardId.id()); } indexShardRepository.restore(restoreSource.snapshotId(), shardId, snapshotShardId, recoveryStatus); restoreService.indexShardRestoreCompleted(restoreSource.snapshotId(), shardId); } catch (Throwable t) { throw new IndexShardRestoreFailedException(shardId, "restore failed", t); } } }
1no label
src_main_java_org_elasticsearch_index_snapshots_IndexShardSnapshotAndRestoreService.java
781
METRIC_TYPE.COUNTER, new OProfilerHookValue() { public Object getValue() { return alertTimes; } });
1no label
core_src_main_java_com_orientechnologies_orient_core_memory_OMemoryWatchDog.java
1,988
@Entity @Inheritance(strategy = InheritanceType.JOINED) @Table(name = "BLC_CUSTOMER_PHONE", uniqueConstraints = @UniqueConstraint(name="CSTMR_PHONE_UNIQUE_CNSTRNT", columnNames = { "CUSTOMER_ID", "PHONE_NAME" })) @AdminPresentationMergeOverrides( { @AdminPresentationMergeOverride(name = "phone.phoneNumber", mergeEntries = @AdminPresentationMergeEntry(propertyType = PropertyType.AdminPresentation.PROMINENT, booleanOverrideValue = true)) } ) @AdminPresentationClass(populateToOneFields = PopulateToOneFieldsEnum.TRUE) public class CustomerPhoneImpl implements CustomerPhone{ private static final long serialVersionUID = 1L; @Id @GeneratedValue(generator = "CustomerPhoneId") @GenericGenerator( name="CustomerPhoneId", strategy="org.broadleafcommerce.common.persistence.IdOverrideTableGenerator", parameters = { @Parameter(name="segment_value", value="CustomerPhoneImpl"), @Parameter(name="entity_name", value="org.broadleafcommerce.profile.core.domain.CustomerPhoneImpl") } ) @Column(name = "CUSTOMER_PHONE_ID") protected Long id; @Column(name = "PHONE_NAME") @AdminPresentation(friendlyName = "CustomerPhoneImpl_Phone_Name", order=1, group = "CustomerPhoneImpl_Identification", groupOrder = 1, prominent = true, gridOrder = 1) protected String phoneName; @ManyToOne(cascade = {CascadeType.PERSIST, CascadeType.MERGE}, targetEntity = CustomerImpl.class, optional=false) @JoinColumn(name = "CUSTOMER_ID") @AdminPresentation(excluded = true, visibility = VisibilityEnum.HIDDEN_ALL) protected Customer customer; @ManyToOne(cascade = CascadeType.ALL, targetEntity = PhoneImpl.class, optional=false) @JoinColumn(name = "PHONE_ID") @Index(name="CUSTPHONE_PHONE_INDEX", columnNames={"PHONE_ID"}) protected Phone phone; @Override public Long getId() { return id; } @Override public void setId(Long id) { this.id = id; } @Override public String getPhoneName() { return phoneName; } @Override public void setPhoneName(String phoneName) { this.phoneName = phoneName; } @Override public Customer getCustomer() { return customer; } @Override public void setCustomer(Customer customer) { this.customer = customer; } @Override public Phone getPhone() { return phone; } @Override public void setPhone(Phone phone) { this.phone = phone; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((customer == null) ? 0 : customer.hashCode()); result = prime * result + ((phone == null) ? 0 : phone.hashCode()); result = prime * result + ((phoneName == null) ? 0 : phoneName.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; CustomerPhoneImpl other = (CustomerPhoneImpl) obj; if (id != null && other.id != null) { return id.equals(other.id); } if (customer == null) { if (other.customer != null) return false; } else if (!customer.equals(other.customer)) return false; if (phone == null) { if (other.phone != null) return false; } else if (!phone.equals(other.phone)) return false; if (phoneName == null) { if (other.phoneName != null) return false; } else if (!phoneName.equals(other.phoneName)) return false; return true; } }
1no label
core_broadleaf-profile_src_main_java_org_broadleafcommerce_profile_core_domain_CustomerPhoneImpl.java
656
public class PutIndexTemplateRequestBuilder extends MasterNodeOperationRequestBuilder<PutIndexTemplateRequest, PutIndexTemplateResponse, PutIndexTemplateRequestBuilder> { public PutIndexTemplateRequestBuilder(IndicesAdminClient indicesClient) { super((InternalIndicesAdminClient) indicesClient, new PutIndexTemplateRequest()); } public PutIndexTemplateRequestBuilder(IndicesAdminClient indicesClient, String name) { super((InternalIndicesAdminClient) indicesClient, new PutIndexTemplateRequest(name)); } /** * Sets the template match expression that will be used to match on indices created. */ public PutIndexTemplateRequestBuilder setTemplate(String template) { request.template(template); return this; } /** * Sets the order of this template if more than one template matches. */ public PutIndexTemplateRequestBuilder setOrder(int order) { request.order(order); return this; } /** * Set to <tt>true</tt> to force only creation, not an update of an index template. If it already * exists, it will fail with an {@link org.elasticsearch.indices.IndexTemplateAlreadyExistsException}. */ public PutIndexTemplateRequestBuilder setCreate(boolean create) { request.create(create); return this; } /** * The settings to created the index template with. */ public PutIndexTemplateRequestBuilder setSettings(Settings settings) { request.settings(settings); return this; } /** * The settings to created the index template with. */ public PutIndexTemplateRequestBuilder setSettings(Settings.Builder settings) { request.settings(settings); return this; } /** * The settings to crete the index template with (either json/yaml/properties format) */ public PutIndexTemplateRequestBuilder setSettings(String source) { request.settings(source); return this; } /** * The settings to crete the index template with (either json/yaml/properties format) */ public PutIndexTemplateRequestBuilder setSettings(Map<String, Object> source) { request.settings(source); return this; } /** * Adds mapping that will be added when the index template gets created. * * @param type The mapping type * @param source The mapping source */ public PutIndexTemplateRequestBuilder addMapping(String type, String source) { request.mapping(type, source); return this; } /** * The cause for this index template creation. */ public PutIndexTemplateRequestBuilder cause(String cause) { request.cause(cause); return this; } /** * Adds mapping that will be added when the index template gets created. * * @param type The mapping type * @param source The mapping source */ public PutIndexTemplateRequestBuilder addMapping(String type, XContentBuilder source) { request.mapping(type, source); return this; } /** * Adds mapping that will be added when the index gets created. * * @param type The mapping type * @param source The mapping source */ public PutIndexTemplateRequestBuilder addMapping(String type, Map<String, Object> source) { request.mapping(type, source); return this; } /** * The template source definition. */ public PutIndexTemplateRequestBuilder setSource(XContentBuilder templateBuilder) { request.source(templateBuilder); return this; } /** * The template source definition. */ public PutIndexTemplateRequestBuilder setSource(Map templateSource) { request.source(templateSource); return this; } /** * The template source definition. */ public PutIndexTemplateRequestBuilder setSource(String templateSource) { request.source(templateSource); return this; } /** * The template source definition. */ public PutIndexTemplateRequestBuilder setSource(BytesReference templateSource) { request.source(templateSource); return this; } /** * The template source definition. */ public PutIndexTemplateRequestBuilder setSource(byte[] templateSource) { request.source(templateSource); return this; } /** * The template source definition. */ public PutIndexTemplateRequestBuilder setSource(byte[] templateSource, int offset, int length) { request.source(templateSource, offset, length); return this; } @Override protected void doExecute(ActionListener<PutIndexTemplateResponse> listener) { ((IndicesAdminClient) client).putTemplate(request, listener); } }
0true
src_main_java_org_elasticsearch_action_admin_indices_template_put_PutIndexTemplateRequestBuilder.java
111
public class TestDeadlockDetection { @Test public void testDeadlockDetection() throws Exception { ResourceObject r1 = newResourceObject( "R1" ); ResourceObject r2 = newResourceObject( "R2" ); ResourceObject r3 = newResourceObject( "R3" ); ResourceObject r4 = newResourceObject( "R4" ); PlaceboTm tm = new PlaceboTm( null, null ); LockManager lm = new LockManagerImpl( new RagManager() ); tm.setLockManager( lm ); LockWorker t1 = new LockWorker( "T1", lm ); LockWorker t2 = new LockWorker( "T2", lm ); LockWorker t3 = new LockWorker( "T3", lm ); LockWorker t4 = new LockWorker( "T4", lm ); try { t1.getReadLock( r1, true ); t1.getReadLock( r4, true ); t2.getReadLock( r2, true ); t2.getReadLock( r3, true ); t3.getReadLock( r3, true ); t3.getWriteLock( r1, false ); // t3-r1-t1 // T3 t2.getWriteLock( r4, false ); // t2-r4-t1 t1.getWriteLock( r2, true ); assertTrue( t1.isLastGetLockDeadLock() ); // t1-r2-t2-r4-t1 // resolve and try one more time t1.releaseReadLock( r4 ); // will give r4 to t2 t1.getWriteLock( r2, false ); // t1-r2-t2 t2.releaseReadLock( r2 ); // will give r2 to t1 t1.getWriteLock( r4, false ); // t1-r4-t2 // T1 // dead lock t2.getWriteLock( r2, true ); // T2 assertTrue( t2.isLastGetLockDeadLock() ); // t2-r2-t3-r1-t1-r4-t2 or t2-r2-t1-r4-t2 t2.releaseWriteLock( r4 ); // give r4 to t1 t1.releaseWriteLock( r4 ); t2.getReadLock( r4, true ); t1.releaseWriteLock( r2 ); t1.getReadLock( r2, true ); t1.releaseReadLock( r1 ); // give r1 to t3 t3.getReadLock( r2, true ); t3.releaseWriteLock( r1 ); t1.getReadLock( r1, true ); // give r1->t1 t1.getWriteLock( r4, false ); t3.getWriteLock( r1, false ); t4.getReadLock( r2, true ); // deadlock t2.getWriteLock( r2, true ); assertTrue( t2.isLastGetLockDeadLock() ); // t2-r2-t3-r1-t1-r4-t2 // resolve t2.releaseReadLock( r4 ); t1.releaseWriteLock( r4 ); t1.releaseReadLock( r1 ); t2.getReadLock( r4, true ); // give r1 to t3 t3.releaseWriteLock( r1 ); t1.getReadLock( r1, true ); // give r1 to t1 t1.getWriteLock( r4, false ); t3.releaseReadLock( r2 ); t3.getWriteLock( r1, false ); // cleanup t2.releaseReadLock( r4 ); // give r4 to t1 t1.releaseWriteLock( r4 ); t1.releaseReadLock( r1 ); // give r1 to t3 t3.releaseWriteLock( r1 ); t1.releaseReadLock( r2 ); t4.releaseReadLock( r2 ); t2.releaseReadLock( r3 ); t3.releaseReadLock( r3 ); // -- special case... t1.getReadLock( r1, true ); t2.getReadLock( r1, true ); t1.getWriteLock( r1, false ); // t1->r1-t1&t2 t2.getWriteLock( r1, true ); assertTrue( t2.isLastGetLockDeadLock() ); // t2->r1->t1->r1->t2 t2.releaseReadLock( r1 ); t1.releaseReadLock( r1 ); t1.releaseWriteLock( r1 ); } catch ( Exception e ) { File file = new LockWorkFailureDump( getClass() ).dumpState( lm, new LockWorker[] { t1, t2, t3, t4 } ); throw new RuntimeException( "Failed, forensics information dumped to " + file.getAbsolutePath(), e ); } } public static class StressThread extends Thread { private static final Object READ = new Object(); private static final Object WRITE = new Object(); private static ResourceObject resources[] = new ResourceObject[10]; private final Random rand = new Random( currentTimeMillis() ); static { for ( int i = 0; i < resources.length; i++ ) resources[i] = new ResourceObject( "RX" + i ); } private final CountDownLatch startSignal; private final String name; private final int numberOfIterations; private final int depthCount; private final float readWriteRatio; private final LockManager lm; private volatile Exception error; private final Transaction tx = mock( Transaction.class ); public volatile Long startedWaiting = null; StressThread( String name, int numberOfIterations, int depthCount, float readWriteRatio, LockManager lm, CountDownLatch startSignal ) { super(); this.name = name; this.numberOfIterations = numberOfIterations; this.depthCount = depthCount; this.readWriteRatio = readWriteRatio; this.lm = lm; this.startSignal = startSignal; } @Override public void run() { try { startSignal.await(); java.util.Stack<Object> lockStack = new java.util.Stack<Object>(); java.util.Stack<ResourceObject> resourceStack = new java.util.Stack<ResourceObject>(); for ( int i = 0; i < numberOfIterations; i++ ) { try { int depth = depthCount; do { float f = rand.nextFloat(); int n = rand.nextInt( resources.length ); if ( f < readWriteRatio ) { startedWaiting = currentTimeMillis(); lm.getReadLock( resources[n], tx ); startedWaiting = null; lockStack.push( READ ); } else { startedWaiting = currentTimeMillis(); lm.getWriteLock( resources[n], tx ); startedWaiting = null; lockStack.push( WRITE ); } resourceStack.push( resources[n] ); } while ( --depth > 0 ); } catch ( DeadlockDetectedException e ) { // This is good } finally { releaseAllLocks( lockStack, resourceStack ); } } } catch ( Exception e ) { error = e; } } private void releaseAllLocks( Stack<Object> lockStack, Stack<ResourceObject> resourceStack ) { while ( !lockStack.isEmpty() ) { if ( lockStack.pop() == READ ) { lm.releaseReadLock( resourceStack.pop(), tx ); } else { lm.releaseWriteLock( resourceStack.pop(), tx ); } } } @Override public String toString() { return this.name; } } @Test public void testStressMultipleThreads() throws Exception { /* This test starts a bunch of threads, and randomly takes read or write locks on random resources. No thread should wait more than five seconds for a lock - if it does, we consider it a failure. Successful outcomes are when threads either finish with all their lock taking and releasing, or are terminated with a DeadlockDetectedException. */ for ( int i = 0; i < StressThread.resources.length; i++ ) { StressThread.resources[i] = new ResourceObject( "RX" + i ); } StressThread stressThreads[] = new StressThread[50]; PlaceboTm tm = new PlaceboTm( null, null ); LockManager lm = new LockManagerImpl( new RagManager() ); tm.setLockManager( lm ); CountDownLatch startSignal = new CountDownLatch( 1 ); for ( int i = 0; i < stressThreads.length; i++ ) { int numberOfIterations = 100; int depthCount = 10; float readWriteRatio = 0.80f; stressThreads[i] = new StressThread( "T" + i, numberOfIterations, depthCount, readWriteRatio, lm, startSignal ); } for ( Thread thread : stressThreads ) { thread.start(); } startSignal.countDown(); while ( anyAliveAndAllWell( stressThreads ) ) { throwErrorsIfAny( stressThreads ); sleepALittle(); } } private String diagnostics( StressThread culprit, StressThread[] stressThreads, long waited ) { StringBuilder builder = new StringBuilder(); for ( StressThread stressThread : stressThreads ) { if ( stressThread.isAlive() ) { if ( stressThread == culprit ) { builder.append( "This is the thread that waited too long. It waited: " ).append( waited ).append( " milliseconds" ); } for ( StackTraceElement element : stressThread.getStackTrace() ) { builder.append( element.toString() ).append( "\n" ); } } builder.append( "\n" ); } return builder.toString(); } private void throwErrorsIfAny( StressThread[] stressThreads ) throws Exception { for ( StressThread stressThread : stressThreads ) { if ( stressThread.error != null ) { throw stressThread.error; } } } private void sleepALittle() { try { Thread.sleep( 1000 ); } catch ( InterruptedException e ) { Thread.interrupted(); } } private boolean anyAliveAndAllWell( StressThread[] stressThreads ) { for ( StressThread stressThread : stressThreads ) { if ( stressThread.isAlive() ) { Long startedWaiting = stressThread.startedWaiting; if ( startedWaiting != null ) { long waitingTime = currentTimeMillis() - startedWaiting; if ( waitingTime > 5000 ) { fail( "One of the threads waited far too long. Diagnostics: \n" + diagnostics( stressThread, stressThreads, waitingTime) ); } } return true; } } return false; } }
0true
community_kernel_src_test_java_org_neo4j_kernel_impl_transaction_TestDeadlockDetection.java
8
@Component("blProductCustomPersistenceHandler") public class ProductCustomPersistenceHandler extends CustomPersistenceHandlerAdapter { @Resource(name = "blCatalogService") protected CatalogService catalogService; private static final Log LOG = LogFactory.getLog(ProductCustomPersistenceHandler.class); @Override public Boolean canHandleAdd(PersistencePackage persistencePackage) { String ceilingEntityFullyQualifiedClassname = persistencePackage.getCeilingEntityFullyQualifiedClassname(); String[] customCriteria = persistencePackage.getCustomCriteria(); return !ArrayUtils.isEmpty(customCriteria) && "productDirectEdit".equals(customCriteria[0]) && Product.class.getName().equals(ceilingEntityFullyQualifiedClassname); } @Override public Boolean canHandleUpdate(PersistencePackage persistencePackage) { return canHandleAdd(persistencePackage); } @Override public Entity add(PersistencePackage persistencePackage, DynamicEntityDao dynamicEntityDao, RecordHelper helper) throws ServiceException { Entity entity = persistencePackage.getEntity(); try { PersistencePerspective persistencePerspective = persistencePackage.getPersistencePerspective(); Product adminInstance = (Product) Class.forName(entity.getType()[0]).newInstance(); Map<String, FieldMetadata> adminProperties = helper.getSimpleMergedProperties(Product.class.getName(), persistencePerspective); if (adminInstance instanceof ProductBundle) { removeBundleFieldRestrictions((ProductBundle)adminInstance, adminProperties, entity); } adminInstance = (Product) helper.createPopulatedInstance(adminInstance, entity, adminProperties, false); adminInstance = (Product) dynamicEntityDao.merge(adminInstance); CategoryProductXref categoryXref = new CategoryProductXrefImpl(); categoryXref.setCategory(adminInstance.getDefaultCategory()); categoryXref.setProduct(adminInstance); if (adminInstance.getDefaultCategory() != null && !adminInstance.getAllParentCategoryXrefs().contains(categoryXref)) { categoryXref = (CategoryProductXref) dynamicEntityDao.merge(categoryXref); adminInstance.getAllParentCategoryXrefs().add(categoryXref); } //Since none of the Sku fields are required, it's possible that the user did not fill out //any Sku fields, and thus a Sku would not be created. Product still needs a default Sku so instantiate one if (adminInstance.getDefaultSku() == null) { Sku newSku = catalogService.createSku(); adminInstance.setDefaultSku(newSku); adminInstance = (Product) dynamicEntityDao.merge(adminInstance); } //also set the default product for the Sku adminInstance.getDefaultSku().setDefaultProduct(adminInstance); dynamicEntityDao.merge(adminInstance.getDefaultSku()); return helper.getRecord(adminProperties, adminInstance, null, null); } catch (Exception e) { throw new ServiceException("Unable to add entity for " + entity.getType()[0], e); } } @Override public Entity update(PersistencePackage persistencePackage, DynamicEntityDao dynamicEntityDao, RecordHelper helper) throws ServiceException { Entity entity = persistencePackage.getEntity(); try { PersistencePerspective persistencePerspective = persistencePackage.getPersistencePerspective(); Map<String, FieldMetadata> adminProperties = helper.getSimpleMergedProperties(Product.class.getName(), persistencePerspective); Object primaryKey = helper.getPrimaryKey(entity, adminProperties); Product adminInstance = (Product) dynamicEntityDao.retrieve(Class.forName(entity.getType()[0]), primaryKey); if (adminInstance instanceof ProductBundle) { removeBundleFieldRestrictions((ProductBundle)adminInstance, adminProperties, entity); } adminInstance = (Product) helper.createPopulatedInstance(adminInstance, entity, adminProperties, false); adminInstance = (Product) dynamicEntityDao.merge(adminInstance); CategoryProductXref categoryXref = new CategoryProductXrefImpl(); categoryXref.setCategory(adminInstance.getDefaultCategory()); categoryXref.setProduct(adminInstance); if (adminInstance.getDefaultCategory() != null && !adminInstance.getAllParentCategoryXrefs().contains(categoryXref)) { adminInstance.getAllParentCategoryXrefs().add(categoryXref); } return helper.getRecord(adminProperties, adminInstance, null, null); } catch (Exception e) { throw new ServiceException("Unable to update entity for " + entity.getType()[0], e); } } /** * If the pricing model is of type item_sum, that property should not be required * @param adminInstance * @param adminProperties * @param entity */ protected void removeBundleFieldRestrictions(ProductBundle adminInstance, Map<String, FieldMetadata> adminProperties, Entity entity) { //no required validation for product bundles if (entity.getPMap().get("pricingModel") != null) { if (ProductBundlePricingModelType.ITEM_SUM.getType().equals(entity.getPMap().get("pricingModel").getValue())) { ((BasicFieldMetadata)adminProperties.get("defaultSku.retailPrice")).setRequiredOverride(false); } } } }
1no label
admin_broadleaf-admin-module_src_main_java_org_broadleafcommerce_admin_server_service_handler_ProductCustomPersistenceHandler.java
343
@RunWith(HazelcastParallelClassRunner.class) @Category(QuickTest.class) public class ClientMapTryLockConcurrentTests { static HazelcastInstance client; static HazelcastInstance server; @BeforeClass public static void init() { server = Hazelcast.newHazelcastInstance(); client = HazelcastClient.newHazelcastClient(); } @AfterClass public static void destroy() { HazelcastClient.shutdownAll(); Hazelcast.shutdownAll(); } @Test public void concurrent_MapTryLockTest() throws InterruptedException { concurrent_MapTryLock(false); } @Test public void concurrent_MapTryLockTimeOutTest() throws InterruptedException { concurrent_MapTryLock(true); } private void concurrent_MapTryLock(boolean withTimeOut) throws InterruptedException { final int maxThreads = 8; final IMap<String, Integer> map = client.getMap(randomString()); final String upKey = "upKey"; final String downKey = "downKey"; map.put(upKey, 0); map.put(downKey, 0); Thread threads[] = new Thread[maxThreads]; for ( int i=0; i< threads.length; i++ ) { Thread t; if(withTimeOut){ t = new MapTryLockTimeOutThread(map, upKey, downKey); }else{ t = new MapTryLockThread(map, upKey, downKey); } t.start(); threads[i] = t; } assertJoinable(threads); int upTotal = map.get(upKey); int downTotal = map.get(downKey); assertTrue("concurrent access to locked code caused wrong total", upTotal + downTotal == 0); } static class MapTryLockThread extends TestHelper { public MapTryLockThread(IMap map, String upKey, String downKey){ super(map, upKey, downKey); } public void doRun() throws Exception{ if(map.tryLock(upKey)){ try{ if(map.tryLock(downKey)){ try { work(); }finally { map.unlock(downKey); } } }finally { map.unlock(upKey); } } } } static class MapTryLockTimeOutThread extends TestHelper { public MapTryLockTimeOutThread(IMap map, String upKey, String downKey){ super(map, upKey, downKey); } public void doRun() throws Exception{ if(map.tryLock(upKey, 1, TimeUnit.MILLISECONDS)){ try{ if(map.tryLock(downKey, 1, TimeUnit.MILLISECONDS )){ try { work(); }finally { map.unlock(downKey); } } }finally { map.unlock(upKey); } } } } static abstract class TestHelper extends Thread { protected static final int ITERATIONS = 1000*10; protected final Random random = new Random(); protected final IMap<String, Integer> map; protected final String upKey; protected final String downKey; public TestHelper(IMap map, String upKey, String downKey){ this.map = map; this.upKey = upKey; this.downKey = downKey; } public void run() { try{ for ( int i=0; i < ITERATIONS; i++ ) { doRun(); } }catch(Exception e){ throw new RuntimeException("Test Thread crashed with ", e); } } abstract void doRun()throws Exception; public void work(){ int upTotal = map.get(upKey); int downTotal = map.get(downKey); int dif = random.nextInt(1000); upTotal += dif; downTotal -= dif; map.put(upKey, upTotal); map.put(downKey, downTotal); } } }
0true
hazelcast-client_src_test_java_com_hazelcast_client_map_ClientMapTryLockConcurrentTests.java
500
indexStateService.closeIndex(updateRequest, new ClusterStateUpdateListener() { @Override public void onResponse(ClusterStateUpdateResponse response) { listener.onResponse(new CloseIndexResponse(response.isAcknowledged())); } @Override public void onFailure(Throwable t) { logger.debug("failed to close indices [{}]", t, request.indices()); listener.onFailure(t); } });
1no label
src_main_java_org_elasticsearch_action_admin_indices_close_TransportCloseIndexAction.java
475
public class GetAliasesAction extends IndicesAction<GetAliasesRequest, GetAliasesResponse, GetAliasesRequestBuilder> { public static final GetAliasesAction INSTANCE = new GetAliasesAction(); public static final String NAME = "indices/get/aliases"; private GetAliasesAction() { super(NAME); } @Override public GetAliasesRequestBuilder newRequestBuilder(IndicesAdminClient client) { return new GetAliasesRequestBuilder(client); } @Override public GetAliasesResponse newResponse() { return new GetAliasesResponse(); } }
0true
src_main_java_org_elasticsearch_action_admin_indices_alias_get_GetAliasesAction.java
2,137
public class Lucene { public static final Version VERSION = Version.LUCENE_46; public static final Version ANALYZER_VERSION = VERSION; public static final Version QUERYPARSER_VERSION = VERSION; public static final NamedAnalyzer STANDARD_ANALYZER = new NamedAnalyzer("_standard", AnalyzerScope.GLOBAL, new StandardAnalyzer(ANALYZER_VERSION)); public static final NamedAnalyzer KEYWORD_ANALYZER = new NamedAnalyzer("_keyword", AnalyzerScope.GLOBAL, new KeywordAnalyzer()); public static final int NO_DOC = -1; public static ScoreDoc[] EMPTY_SCORE_DOCS = new ScoreDoc[0]; @SuppressWarnings("deprecation") public static Version parseVersion(@Nullable String version, Version defaultVersion, ESLogger logger) { if (version == null) { return defaultVersion; } if ("4.6".equals(version)) { return VERSION.LUCENE_46; } if ("4.5".equals(version)) { return VERSION.LUCENE_45; } if ("4.4".equals(version)) { return VERSION.LUCENE_44; } if ("4.3".equals(version)) { return Version.LUCENE_43; } if ("4.2".equals(version)) { return Version.LUCENE_42; } if ("4.1".equals(version)) { return Version.LUCENE_41; } if ("4.0".equals(version)) { return Version.LUCENE_40; } if ("3.6".equals(version)) { return Version.LUCENE_36; } if ("3.5".equals(version)) { return Version.LUCENE_35; } if ("3.4".equals(version)) { return Version.LUCENE_34; } if ("3.3".equals(version)) { return Version.LUCENE_33; } if ("3.2".equals(version)) { return Version.LUCENE_32; } if ("3.1".equals(version)) { return Version.LUCENE_31; } if ("3.0".equals(version)) { return Version.LUCENE_30; } logger.warn("no version match {}, default to {}", version, defaultVersion); return defaultVersion; } /** * Reads the segments infos, failing if it fails to load */ public static SegmentInfos readSegmentInfos(Directory directory) throws IOException { final SegmentInfos sis = new SegmentInfos(); sis.read(directory); return sis; } public static long count(IndexSearcher searcher, Query query) throws IOException { TotalHitCountCollector countCollector = new TotalHitCountCollector(); // we don't need scores, so wrap it in a constant score query if (!(query instanceof ConstantScoreQuery)) { query = new ConstantScoreQuery(query); } searcher.search(query, countCollector); return countCollector.getTotalHits(); } /** * Closes the index writer, returning <tt>false</tt> if it failed to close. */ public static boolean safeClose(IndexWriter writer) { if (writer == null) { return true; } try { writer.close(); return true; } catch (Throwable e) { return false; } } public static TopDocs readTopDocs(StreamInput in) throws IOException { if (!in.readBoolean()) { // no docs return null; } if (in.readBoolean()) { int totalHits = in.readVInt(); float maxScore = in.readFloat(); SortField[] fields = new SortField[in.readVInt()]; for (int i = 0; i < fields.length; i++) { String field = null; if (in.readBoolean()) { field = in.readString(); } fields[i] = new SortField(field, readSortType(in), in.readBoolean()); } FieldDoc[] fieldDocs = new FieldDoc[in.readVInt()]; for (int i = 0; i < fieldDocs.length; i++) { Comparable[] cFields = new Comparable[in.readVInt()]; for (int j = 0; j < cFields.length; j++) { byte type = in.readByte(); if (type == 0) { cFields[j] = null; } else if (type == 1) { cFields[j] = in.readString(); } else if (type == 2) { cFields[j] = in.readInt(); } else if (type == 3) { cFields[j] = in.readLong(); } else if (type == 4) { cFields[j] = in.readFloat(); } else if (type == 5) { cFields[j] = in.readDouble(); } else if (type == 6) { cFields[j] = in.readByte(); } else if (type == 7) { cFields[j] = in.readShort(); } else if (type == 8) { cFields[j] = in.readBoolean(); } else if (type == 9) { cFields[j] = in.readBytesRef(); } else { throw new IOException("Can't match type [" + type + "]"); } } fieldDocs[i] = new FieldDoc(in.readVInt(), in.readFloat(), cFields); } return new TopFieldDocs(totalHits, fieldDocs, fields, maxScore); } else { int totalHits = in.readVInt(); float maxScore = in.readFloat(); ScoreDoc[] scoreDocs = new ScoreDoc[in.readVInt()]; for (int i = 0; i < scoreDocs.length; i++) { scoreDocs[i] = new ScoreDoc(in.readVInt(), in.readFloat()); } return new TopDocs(totalHits, scoreDocs, maxScore); } } public static void writeTopDocs(StreamOutput out, TopDocs topDocs, int from) throws IOException { if (topDocs.scoreDocs.length - from < 0) { out.writeBoolean(false); return; } out.writeBoolean(true); if (topDocs instanceof TopFieldDocs) { out.writeBoolean(true); TopFieldDocs topFieldDocs = (TopFieldDocs) topDocs; out.writeVInt(topDocs.totalHits); out.writeFloat(topDocs.getMaxScore()); out.writeVInt(topFieldDocs.fields.length); for (SortField sortField : topFieldDocs.fields) { if (sortField.getField() == null) { out.writeBoolean(false); } else { out.writeBoolean(true); out.writeString(sortField.getField()); } if (sortField.getComparatorSource() != null) { writeSortType(out, ((IndexFieldData.XFieldComparatorSource) sortField.getComparatorSource()).reducedType()); } else { writeSortType(out, sortField.getType()); } out.writeBoolean(sortField.getReverse()); } out.writeVInt(topDocs.scoreDocs.length - from); int index = 0; for (ScoreDoc doc : topFieldDocs.scoreDocs) { if (index++ < from) { continue; } FieldDoc fieldDoc = (FieldDoc) doc; out.writeVInt(fieldDoc.fields.length); for (Object field : fieldDoc.fields) { if (field == null) { out.writeByte((byte) 0); } else { Class type = field.getClass(); if (type == String.class) { out.writeByte((byte) 1); out.writeString((String) field); } else if (type == Integer.class) { out.writeByte((byte) 2); out.writeInt((Integer) field); } else if (type == Long.class) { out.writeByte((byte) 3); out.writeLong((Long) field); } else if (type == Float.class) { out.writeByte((byte) 4); out.writeFloat((Float) field); } else if (type == Double.class) { out.writeByte((byte) 5); out.writeDouble((Double) field); } else if (type == Byte.class) { out.writeByte((byte) 6); out.writeByte((Byte) field); } else if (type == Short.class) { out.writeByte((byte) 7); out.writeShort((Short) field); } else if (type == Boolean.class) { out.writeByte((byte) 8); out.writeBoolean((Boolean) field); } else if (type == BytesRef.class) { out.writeByte((byte) 9); out.writeBytesRef((BytesRef) field); } else { throw new IOException("Can't handle sort field value of type [" + type + "]"); } } } out.writeVInt(doc.doc); out.writeFloat(doc.score); } } else { out.writeBoolean(false); out.writeVInt(topDocs.totalHits); out.writeFloat(topDocs.getMaxScore()); out.writeVInt(topDocs.scoreDocs.length - from); int index = 0; for (ScoreDoc doc : topDocs.scoreDocs) { if (index++ < from) { continue; } out.writeVInt(doc.doc); out.writeFloat(doc.score); } } } // LUCENE 4 UPGRADE: We might want to maintain our own ordinal, instead of Lucene's ordinal public static SortField.Type readSortType(StreamInput in) throws IOException { return SortField.Type.values()[in.readVInt()]; } public static void writeSortType(StreamOutput out, SortField.Type sortType) throws IOException { out.writeVInt(sortType.ordinal()); } public static Explanation readExplanation(StreamInput in) throws IOException { float value = in.readFloat(); String description = in.readString(); Explanation explanation = new Explanation(value, description); if (in.readBoolean()) { int size = in.readVInt(); for (int i = 0; i < size; i++) { explanation.addDetail(readExplanation(in)); } } return explanation; } public static void writeExplanation(StreamOutput out, Explanation explanation) throws IOException { out.writeFloat(explanation.getValue()); out.writeString(explanation.getDescription()); Explanation[] subExplanations = explanation.getDetails(); if (subExplanations == null) { out.writeBoolean(false); } else { out.writeBoolean(true); out.writeVInt(subExplanations.length); for (Explanation subExp : subExplanations) { writeExplanation(out, subExp); } } } public static class ExistsCollector extends Collector { private boolean exists; public void reset() { exists = false; } public boolean exists() { return exists; } @Override public void setScorer(Scorer scorer) throws IOException { this.exists = false; } @Override public void collect(int doc) throws IOException { exists = true; } @Override public void setNextReader(AtomicReaderContext context) throws IOException { } @Override public boolean acceptsDocsOutOfOrder() { return true; } } private Lucene() { } public static final boolean indexExists(final Directory directory) throws IOException { return DirectoryReader.indexExists(directory); } }
1no label
src_main_java_org_elasticsearch_common_lucene_Lucene.java
248
@Test public class ODefaultCacheTest { public void enabledAfterStartup() { // Given cache created // And not started // And not enabled OCache sut = newCache(); // When started sut.startup(); // Then it should be enabled assertTrue(sut.isEnabled()); } public void disabledAfterShutdown() { // Given running cache OCache sut = runningCache(); // When started sut.shutdown(); // Then it should be disabled assertFalse(sut.isEnabled()); } public void disablesOnlyIfWasEnabled() { // Given enabled cache OCache sut = enabledCache(); // When disabled more than once boolean disableConfirmed = sut.disable(); boolean disableNotConfirmed = sut.disable(); // Then should return confirmation of switching from enabled to disabled state for first time // And no confirmation on subsequent disables assertTrue(disableConfirmed); assertFalse(disableNotConfirmed); } public void enablesOnlyIfWasDisabled() { // Given disabled cache OCache sut = newCache(); // When enabled more than once boolean enableConfirmed = sut.enable(); boolean enableNotConfirmed = sut.enable(); // Then should return confirmation of switching from disabled to enabled state for first time // And no confirmation on subsequent enables assertTrue(enableConfirmed); assertFalse(enableNotConfirmed); } public void doesNothingWhileDisabled() { // Given cache created // And not started // And not enabled OCache sut = new ODefaultCache(null, 1); // When any operation called on it ODocument record = new ODocument(); ORID recordId = record.getIdentity(); sut.put(record); ORecordInternal<?> recordGot = sut.get(recordId); int cacheSizeAfterPut = sut.size(); ORecordInternal<?> recordRemoved = sut.remove(recordId); int cacheSizeAfterRemove = sut.size(); // Then it has no effect on cache's state assertEquals(sut.isEnabled(), false, "Cache should be disabled at creation"); assertEquals(recordGot, null, "Cache should return empty records while disabled"); assertEquals(recordRemoved, null, "Cache should return empty records while disabled"); assertEquals(cacheSizeAfterPut, 0, "Cache should ignore insert while disabled"); assertEquals(cacheSizeAfterRemove, cacheSizeAfterPut, "Cache should ignore remove while disabled"); } public void hasZeroSizeAfterClear() { // Given enabled non-empty cache OCache sut = enabledNonEmptyCache(); // When cleared sut.clear(); // Then size of cache should be zero assertEquals(sut.size(), 0, "Cache was not cleaned up"); } public void providesAccessToAllKeysInCache() { // Given enabled non-empty cache OCache sut = enabledNonEmptyCache(); // When asked for keys Collection<ORID> keys = sut.keys(); // Then keys count should be same as size of cache // And records available for keys assertEquals(keys.size(), sut.size(), "Cache provided not all keys?"); for (ORID key : keys) { assertNotNull(sut.get(key)); } } public void storesRecordsUsingTheirIdentity() { // Given an enabled cache OCache sut = enabledCache(); // When new record put into ORecordId id = new ORecordId(1, OClusterPositionFactory.INSTANCE.valueOf(1)); ODocument record = new ODocument(id); sut.put(record); // Then it can be retrieved later by it's id assertEquals(sut.get(id), record); } public void storesRecordsOnlyOnceForEveryIdentity() { // Given an enabled cache OCache sut = enabledCache(); final int initialSize = sut.size(); // When some records with same identity put in several times ODocument first = new ODocument(new ORecordId(1, OClusterPositionFactory.INSTANCE.valueOf(1))); ODocument last = new ODocument(new ORecordId(1, OClusterPositionFactory.INSTANCE.valueOf(1))); sut.put(first); sut.put(last); // Then cache ends up storing only one item assertEquals(sut.size(), initialSize + 1); } public void removesOnlyOnce() { // Given an enabled cache with records in it OCache sut = enabledCache(); ORecordId id = new ORecordId(1, OClusterPositionFactory.INSTANCE.valueOf(1)); ODocument record = new ODocument(id); sut.put(record); sut.remove(id); // When removing already removed record ORecordInternal<?> removedSecond = sut.remove(id); // Then empty result returned assertNull(removedSecond); } public void storesNoMoreElementsThanSpecifiedLimit() { // Given an enabled cache OCache sut = enabledCache(); // When stored more distinct elements than cache limit allows for (int i = sut.limit() + 2; i > 0; i--) sut.put(new ODocument(new ORecordId(i, OClusterPositionFactory.INSTANCE.valueOf(i)))); // Then size of cache should be exactly as it's limit assertEquals(sut.size(), sut.limit(), "Cache doesn't meet limit requirements"); } private ODefaultCache newCache() { return new ODefaultCache(null, 5); } private OCache enabledCache() { ODefaultCache cache = newCache(); cache.enable(); return cache; } private OCache enabledNonEmptyCache() { OCache cache = enabledCache(); cache.put(new ODocument(new ORecordId(1, OClusterPositionFactory.INSTANCE.valueOf(1)))); cache.put(new ODocument(new ORecordId(2, OClusterPositionFactory.INSTANCE.valueOf(2)))); return cache; } private OCache runningCache() { ODefaultCache cache = newCache(); cache.startup(); return cache; } }
0true
core_src_test_java_com_orientechnologies_orient_core_cache_ODefaultCacheTest.java
1,322
new SingleSourceUnitPackage(pkg, sourceUnitFullPath), moduleManager, CeylonBuilder.getProjectTypeChecker(project), tokens, originalProject) { @Override protected boolean reuseExistingDescriptorModels() { return true; } };
1no label
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_core_model_JDTModule.java
253
public class OUnboundedWeakCache extends OAbstractMapCache<WeakHashMap<ORID, ORecordInternal<?>>> implements OCache { public OUnboundedWeakCache() { super(new WeakHashMap<ORID, ORecordInternal<?>>()); } @Override public int limit() { return Integer.MAX_VALUE; } }
1no label
core_src_main_java_com_orientechnologies_orient_core_cache_OUnboundedWeakCache.java
2,368
public class ByteSizeValue implements Serializable, Streamable { private long size; private ByteSizeUnit sizeUnit; private ByteSizeValue() { } public ByteSizeValue(long bytes) { this(bytes, ByteSizeUnit.BYTES); } public ByteSizeValue(long size, ByteSizeUnit sizeUnit) { this.size = size; this.sizeUnit = sizeUnit; } public int bytesAsInt() throws ElasticsearchIllegalArgumentException { long bytes = bytes(); if (bytes > Integer.MAX_VALUE) { throw new ElasticsearchIllegalArgumentException("size [" + toString() + "] is bigger than max int"); } return (int) bytes; } public long bytes() { return sizeUnit.toBytes(size); } public long getBytes() { return bytes(); } public long kb() { return sizeUnit.toKB(size); } public long getKb() { return kb(); } public long mb() { return sizeUnit.toMB(size); } public long getMb() { return mb(); } public long gb() { return sizeUnit.toGB(size); } public long getGb() { return gb(); } public long tb() { return sizeUnit.toTB(size); } public long getTb() { return tb(); } public long pb() { return sizeUnit.toPB(size); } public long getPb() { return pb(); } public double kbFrac() { return ((double) bytes()) / ByteSizeUnit.C1; } public double getKbFrac() { return kbFrac(); } public double mbFrac() { return ((double) bytes()) / ByteSizeUnit.C2; } public double getMbFrac() { return mbFrac(); } public double gbFrac() { return ((double) bytes()) / ByteSizeUnit.C3; } public double getGbFrac() { return gbFrac(); } public double tbFrac() { return ((double) bytes()) / ByteSizeUnit.C4; } public double getTbFrac() { return tbFrac(); } public double pbFrac() { return ((double) bytes()) / ByteSizeUnit.C5; } public double getPbFrac() { return pbFrac(); } @Override public String toString() { long bytes = bytes(); double value = bytes; String suffix = "b"; if (bytes >= ByteSizeUnit.C5) { value = pbFrac(); suffix = "pb"; } else if (bytes >= ByteSizeUnit.C4) { value = tbFrac(); suffix = "tb"; } else if (bytes >= ByteSizeUnit.C3) { value = gbFrac(); suffix = "gb"; } else if (bytes >= ByteSizeUnit.C2) { value = mbFrac(); suffix = "mb"; } else if (bytes >= ByteSizeUnit.C1) { value = kbFrac(); suffix = "kb"; } return Strings.format1Decimals(value, suffix); } public static ByteSizeValue parseBytesSizeValue(String sValue) throws ElasticsearchParseException { return parseBytesSizeValue(sValue, null); } public static ByteSizeValue parseBytesSizeValue(String sValue, ByteSizeValue defaultValue) throws ElasticsearchParseException { if (sValue == null) { return defaultValue; } long bytes; try { String lastTwoChars = sValue.substring(sValue.length() - Math.min(2, sValue.length())).toLowerCase(Locale.ROOT); if (lastTwoChars.endsWith("k")) { bytes = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 1)) * ByteSizeUnit.C1); } else if (lastTwoChars.endsWith("kb")) { bytes = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 2)) * ByteSizeUnit.C1); } else if (lastTwoChars.endsWith("m")) { bytes = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 1)) * ByteSizeUnit.C2); } else if (lastTwoChars.endsWith("mb")) { bytes = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 2)) * ByteSizeUnit.C2); } else if (lastTwoChars.endsWith("g")) { bytes = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 1)) * ByteSizeUnit.C3); } else if (lastTwoChars.endsWith("gb")) { bytes = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 2)) * ByteSizeUnit.C3); } else if (lastTwoChars.endsWith("t")) { bytes = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 1)) * ByteSizeUnit.C4); } else if (lastTwoChars.endsWith("tb")) { bytes = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 2)) * ByteSizeUnit.C4); } else if (lastTwoChars.endsWith("p")) { bytes = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 1)) * ByteSizeUnit.C5); } else if (lastTwoChars.endsWith("pb")) { bytes = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 2)) * ByteSizeUnit.C5); } else if (lastTwoChars.endsWith("b")) { bytes = Long.parseLong(sValue.substring(0, sValue.length() - 1)); } else { bytes = Long.parseLong(sValue); } } catch (NumberFormatException e) { throw new ElasticsearchParseException("Failed to parse [" + sValue + "]", e); } return new ByteSizeValue(bytes, ByteSizeUnit.BYTES); } public static ByteSizeValue readBytesSizeValue(StreamInput in) throws IOException { ByteSizeValue sizeValue = new ByteSizeValue(); sizeValue.readFrom(in); return sizeValue; } @Override public void readFrom(StreamInput in) throws IOException { size = in.readVLong(); sizeUnit = ByteSizeUnit.BYTES; } @Override public void writeTo(StreamOutput out) throws IOException { out.writeVLong(bytes()); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ByteSizeValue sizeValue = (ByteSizeValue) o; if (size != sizeValue.size) return false; if (sizeUnit != sizeValue.sizeUnit) return false; return true; } @Override public int hashCode() { int result = (int) (size ^ (size >>> 32)); result = 31 * result + (sizeUnit != null ? sizeUnit.hashCode() : 0); return result; } }
1no label
src_main_java_org_elasticsearch_common_unit_ByteSizeValue.java
317
public class NodesHotThreadsAction extends ClusterAction<NodesHotThreadsRequest, NodesHotThreadsResponse, NodesHotThreadsRequestBuilder> { public static final NodesHotThreadsAction INSTANCE = new NodesHotThreadsAction(); public static final String NAME = "cluster/nodes/hot_threads"; private NodesHotThreadsAction() { super(NAME); } @Override public NodesHotThreadsResponse newResponse() { return new NodesHotThreadsResponse(); } @Override public NodesHotThreadsRequestBuilder newRequestBuilder(ClusterAdminClient client) { return new NodesHotThreadsRequestBuilder(client); } }
0true
src_main_java_org_elasticsearch_action_admin_cluster_node_hotthreads_NodesHotThreadsAction.java
245
assertTrueEventually(new AssertTask() { public void run() throws Exception { assertTrue(map.containsKey(member.getUuid())); } });
0true
hazelcast-client_src_test_java_com_hazelcast_client_executor_ClientExecutorServiceSubmitTest.java
30
public class EmbeddedBlueprintsTest extends AbstractCassandraBlueprintsTest { @Override protected WriteConfiguration getGraphConfig() { return CassandraStorageSetup.getEmbeddedGraphConfiguration(getClass().getSimpleName()); } @Override public void extraCleanUp(String uid) throws BackendException { ModifiableConfiguration mc = new ModifiableConfiguration(GraphDatabaseConfiguration.ROOT_NS, getGraphConfig(), Restriction.NONE); StoreManager m = new CassandraEmbeddedStoreManager(mc); m.clearStorage(); m.close(); } }
0true
titan-cassandra_src_test_java_com_thinkaurelius_titan_blueprints_EmbeddedBlueprintsTest.java
284
@SuppressWarnings("unchecked") public class OScriptDocumentDatabaseWrapper { protected ODatabaseDocumentTx database; public OScriptDocumentDatabaseWrapper(final ODatabaseDocumentTx database) { this.database = database; } public OScriptDocumentDatabaseWrapper(final ODatabaseRecordTx database) { this.database = new ODatabaseDocumentTx(database); } public OScriptDocumentDatabaseWrapper(final String iURL) { this.database = new ODatabaseDocumentTx(iURL); } public void switchUser(final String iUserName, final String iUserPassword) { if (!database.isClosed()) database.close(); database.open(iUserName, iUserPassword); } public OIdentifiable[] query(final String iText) { return query(iText, (Object[]) null); } public OIdentifiable[] query(final String iText, final Object... iParameters) { final List<OIdentifiable> res = database.query(new OSQLSynchQuery<Object>(iText), convertParameters(iParameters)); if (res == null) return new OIdentifiable[] {}; return res.toArray(new OIdentifiable[res.size()]); } /** * To maintain the compatibility with JS API. */ public Object executeCommand(final String iText) { return command(iText, (Object[]) null); } /** * To maintain the compatibility with JS API. */ public Object executeCommand(final String iText, final Object... iParameters) { return command(iText, iParameters); } public Object command(final String iText) { return command(iText, (Object[]) null); } public Object command(final String iText, final Object... iParameters) { Object res = database.command(new OCommandSQL(iText)).execute(convertParameters(iParameters)); if (res instanceof List) { final List<OIdentifiable> list = (List<OIdentifiable>) res; return list.toArray(new OIdentifiable[list.size()]); } return res; } public Object process(final String iType, final String iName, final Object... iParameters) { final OComposableProcessor process = (OComposableProcessor) OProcessorManager.getInstance().get(iType); if (process == null) throw new OProcessException("Process type '" + iType + "' is undefined"); final OBasicCommandContext context = new OBasicCommandContext(); if (iParameters != null) { int argIdx = 0; for (Object p : iParameters) context.setVariable("arg" + (argIdx++), p); } Object res; try { res = process.processFromFile(iName, context, false); } catch (Exception e) { throw new OProcessException("Error on processing '" + iName + "' field of '" + getName() + "' block", e); } return res; } public OIndex<?> getIndex(final String iName) { return database.getMetadata().getIndexManager().getIndex(iName); } public boolean exists() { return database.exists(); } public ODocument newInstance() { return database.newInstance(); } public void reload() { database.reload(); } public ODocument newInstance(String iClassName) { return database.newInstance(iClassName); } public ORecordIteratorClass<ODocument> browseClass(String iClassName) { return database.browseClass(iClassName); } public STATUS getStatus() { return database.getStatus(); } public ORecordIteratorClass<ODocument> browseClass(String iClassName, boolean iPolymorphic) { return database.browseClass(iClassName, iPolymorphic); } public <THISDB extends ODatabase> THISDB setStatus(STATUS iStatus) { return (THISDB) database.setStatus(iStatus); } public void drop() { database.drop(); } public String getName() { return database.getName(); } public int addCluster(String iType, String iClusterName, String iLocation, String iDataSegmentName, Object... iParameters) { return database.addCluster(iType, iClusterName, iLocation, iDataSegmentName, iParameters); } public String getURL() { return database.getURL(); } public ORecordIteratorCluster<ODocument> browseCluster(String iClusterName) { return database.browseCluster(iClusterName); } public boolean isClosed() { return database.isClosed(); } public <THISDB extends ODatabase> THISDB open(String iUserName, String iUserPassword) { return (THISDB) database.open(iUserName, iUserPassword); } public ODocument save(final Map<String, Object> iObject) { return database.save(new ODocument().fields(iObject)); } public ODocument save(final String iString) { // return database.save((ORecordInternal<?>) new ODocument().fromJSON(iString)); return database.save((ORecordInternal<?>) new ODocument().fromJSON(iString, true)); } public ODocument save(ORecordInternal<?> iRecord) { return database.save(iRecord); } public boolean dropCluster(String iClusterName, final boolean iTruncate) { return database.dropCluster(iClusterName, iTruncate); } public <THISDB extends ODatabase> THISDB create() { return (THISDB) database.create(); } public boolean dropCluster(int iClusterId, final boolean iTruncate) { return database.dropCluster(iClusterId, true); } public void close() { database.close(); } public int getClusters() { return database.getClusters(); } public Collection<String> getClusterNames() { return database.getClusterNames(); } public int addDataSegment(String iName, String iLocation) { return database.addDataSegment(iName, iLocation); } public String getClusterType(String iClusterName) { return database.getClusterType(iClusterName); } public OTransaction getTransaction() { return database.getTransaction(); } public int getDataSegmentIdByName(String iDataSegmentName) { return database.getDataSegmentIdByName(iDataSegmentName); } public ODatabaseComplex<ORecordInternal<?>> begin() { return database.begin(); } public String getDataSegmentNameById(int iDataSegmentId) { return database.getDataSegmentNameById(iDataSegmentId); } public int getClusterIdByName(String iClusterName) { return database.getClusterIdByName(iClusterName); } public boolean isMVCC() { return database.isMVCC(); } public String getClusterNameById(int iClusterId) { return database.getClusterNameById(iClusterId); } public <RET extends ODatabaseComplex<?>> RET setMVCC(boolean iValue) { return (RET) database.setMVCC(iValue); } public long getClusterRecordSizeById(int iClusterId) { return database.getClusterRecordSizeById(iClusterId); } public boolean isValidationEnabled() { return database.isValidationEnabled(); } public long getClusterRecordSizeByName(String iClusterName) { return database.getClusterRecordSizeByName(iClusterName); } public <RET extends ODatabaseRecord> RET setValidationEnabled(boolean iValue) { return (RET) database.setValidationEnabled(iValue); } public OUser getUser() { return database.getUser(); } public void setUser(OUser user) { database.setUser(user); } public ODocument save(ORecordInternal<?> iRecord, OPERATION_MODE iMode, boolean iForceCreate, final ORecordCallback<? extends Number> iRecordCreatedCallback, ORecordCallback<ORecordVersion> iRecordUpdatedCallback) { return database.save(iRecord, iMode, iForceCreate, iRecordCreatedCallback, iRecordUpdatedCallback); } public OMetadata getMetadata() { return database.getMetadata(); } public ODictionary<ORecordInternal<?>> getDictionary() { return database.getDictionary(); } public byte getRecordType() { return database.getRecordType(); } public ODatabaseComplex<ORecordInternal<?>> delete(ORID iRid) { return database.delete(iRid); } public boolean dropDataSegment(String name) { return database.dropDataSegment(name); } public <RET extends ORecordInternal<?>> RET load(ORID iRecordId) { return (RET) database.load(iRecordId); } public <RET extends ORecordInternal<?>> RET load(ORID iRecordId, String iFetchPlan) { return (RET) database.load(iRecordId, iFetchPlan); } public <RET extends ORecordInternal<?>> RET load(ORID iRecordId, String iFetchPlan, boolean iIgnoreCache) { return (RET) database.load(iRecordId, iFetchPlan, iIgnoreCache); } public <RET extends ORecordInternal<?>> RET getRecord(OIdentifiable iIdentifiable) { return (RET) database.getRecord(iIdentifiable); } public int getDefaultClusterId() { return database.getDefaultClusterId(); } public <RET extends ORecordInternal<?>> RET load(ORecordInternal<?> iRecord) { return (RET) database.load(iRecord); } public boolean declareIntent(OIntent iIntent) { return database.declareIntent(iIntent); } public <RET extends ORecordInternal<?>> RET load(ORecordInternal<?> iRecord, String iFetchPlan) { return (RET) database.load(iRecord, iFetchPlan); } public <RET extends ORecordInternal<?>> RET load(ORecordInternal<?> iRecord, String iFetchPlan, boolean iIgnoreCache) { return (RET) database.load(iRecord, iFetchPlan, iIgnoreCache); } public ODatabaseComplex<?> setDatabaseOwner(ODatabaseComplex<?> iOwner) { return database.setDatabaseOwner(iOwner); } public void reload(ORecordInternal<?> iRecord) { database.reload(iRecord); } public void reload(ORecordInternal<?> iRecord, String iFetchPlan, boolean iIgnoreCache) { database.reload(iRecord, iFetchPlan, iIgnoreCache); } public Object setProperty(String iName, Object iValue) { return database.setProperty(iName, iValue); } public ODocument save(ORecordInternal<?> iRecord, String iClusterName) { return database.save(iRecord, iClusterName); } public Object getProperty(String iName) { return database.getProperty(iName); } public Iterator<Entry<String, Object>> getProperties() { return database.getProperties(); } public Object get(ATTRIBUTES iAttribute) { return database.get(iAttribute); } public <THISDB extends ODatabase> THISDB set(ATTRIBUTES attribute, Object iValue) { return (THISDB) database.set(attribute, iValue); } public void setInternal(ATTRIBUTES attribute, Object iValue) { database.setInternal(attribute, iValue); } public boolean isRetainRecords() { return database.isRetainRecords(); } public ODatabaseRecord setRetainRecords(boolean iValue) { return database.setRetainRecords(iValue); } public long getSize() { return database.getSize(); } public ORecordInternal<?> getRecordByUserObject(Object iUserObject, boolean iCreateIfNotAvailable) { return database.getRecordByUserObject(iUserObject, iCreateIfNotAvailable); } public ODocument save(ORecordInternal<?> iRecord, String iClusterName, OPERATION_MODE iMode, boolean iForceCreate, final ORecordCallback<? extends Number> iRecordCreatedCallback, ORecordCallback<ORecordVersion> iRecordUpdatedCallback) { return database.save(iRecord, iClusterName, iMode, iForceCreate, iRecordCreatedCallback, iRecordUpdatedCallback); } public ODataSegmentStrategy getDataSegmentStrategy() { return database.getDataSegmentStrategy(); } public void setDataSegmentStrategy(ODataSegmentStrategy dataSegmentStrategy) { database.setDataSegmentStrategy(dataSegmentStrategy); } public ODatabaseDocumentTx delete(ODocument iRecord) { return database.delete(iRecord); } public long countClass(String iClassName) { return database.countClass(iClassName); } public ODatabaseComplex<ORecordInternal<?>> commit() { return database.commit(); } public ODatabaseComplex<ORecordInternal<?>> rollback() { return database.rollback(); } public String getType() { return database.getType(); } protected Object[] convertParameters(final Object[] iParameters) { if (iParameters != null) for (int i = 0; i < iParameters.length; ++i) { final Object p = iParameters[i]; if (p != null) { // if (p instanceof sun.org.mozilla.javascript.internal.IdScriptableObject) { // iParameters[i] = ((sun.org.mozilla.javascript.internal.NativeDate) p).to; // } } } return iParameters; } }
0true
core_src_main_java_com_orientechnologies_orient_core_command_script_OScriptDocumentDatabaseWrapper.java
2,571
clusterService.submitStateUpdateTask("zen-disco-join (elected_as_master)", Priority.URGENT, new ProcessedClusterStateUpdateTask() { @Override public ClusterState execute(ClusterState currentState) { DiscoveryNodes.Builder builder = new DiscoveryNodes.Builder() .localNodeId(localNode.id()) .masterNodeId(localNode.id()) // put our local node .put(localNode); // update the fact that we are the master... latestDiscoNodes = builder.build(); ClusterBlocks clusterBlocks = ClusterBlocks.builder().blocks(currentState.blocks()).removeGlobalBlock(NO_MASTER_BLOCK).build(); return ClusterState.builder(currentState).nodes(latestDiscoNodes).blocks(clusterBlocks).build(); } @Override public void onFailure(String source, Throwable t) { logger.error("unexpected failure during [{}]", t, source); } @Override public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { sendInitialStateEventIfNeeded(); } });
1no label
src_main_java_org_elasticsearch_discovery_zen_ZenDiscovery.java
275
public class JMSEmailServiceProducerImpl implements JMSEmailServiceProducer { private JmsTemplate emailServiceTemplate; private Destination emailServiceDestination; public void send(@SuppressWarnings("rawtypes") final HashMap props) { emailServiceTemplate.send(emailServiceDestination, new MessageCreator() { public Message createMessage(Session session) throws JMSException { ObjectMessage message = session.createObjectMessage(props); EmailInfo info = (EmailInfo) props.get(EmailPropertyType.INFO.getType()); message.setJMSPriority(Integer.parseInt(info.getSendAsyncPriority())); return message; } }); } /** * @return the emailServiceTemplate */ public JmsTemplate getEmailServiceTemplate() { return emailServiceTemplate; } /** * @param emailServiceTemplate the emailServiceTemplate to set */ public void setEmailServiceTemplate(JmsTemplate emailServiceTemplate) { this.emailServiceTemplate = emailServiceTemplate; } /** * @return the emailServiceDestination */ public Destination getEmailServiceDestination() { return emailServiceDestination; } /** * @param emailServiceDestination the emailServiceDestination to set */ public void setEmailServiceDestination(Destination emailServiceDestination) { this.emailServiceDestination = emailServiceDestination; } }
1no label
common_src_main_java_org_broadleafcommerce_common_email_service_jms_JMSEmailServiceProducerImpl.java
1,654
md.accept(new MetadataVisitor() { @Override public void visit(BasicFieldMetadata fmd) { request.setType(Type.STANDARD); request.setCeilingEntityClassname(fmd.getForeignKeyClass()); } @Override public void visit(BasicCollectionMetadata fmd) { ForeignKey foreignKey = (ForeignKey) fmd.getPersistencePerspective() .getPersistencePerspectiveItems().get(PersistencePerspectiveItemType.FOREIGNKEY); request.setType(Type.STANDARD); request.setCeilingEntityClassname(fmd.getCollectionCeilingEntity()); request.setOperationTypesOverride(fmd.getPersistencePerspective().getOperationTypes()); request.setForeignKey(foreignKey); } @Override public void visit(AdornedTargetCollectionMetadata fmd) { AdornedTargetList adornedList = (AdornedTargetList) fmd.getPersistencePerspective() .getPersistencePerspectiveItems().get(PersistencePerspectiveItemType.ADORNEDTARGETLIST); request.setType(Type.ADORNED); request.setCeilingEntityClassname(fmd.getCollectionCeilingEntity()); request.setOperationTypesOverride(fmd.getPersistencePerspective().getOperationTypes()); request.setAdornedList(adornedList); } @Override public void visit(MapMetadata fmd) { MapStructure mapStructure = (MapStructure) fmd.getPersistencePerspective() .getPersistencePerspectiveItems().get(PersistencePerspectiveItemType.MAPSTRUCTURE); ForeignKey foreignKey = (ForeignKey) fmd.getPersistencePerspective(). getPersistencePerspectiveItems().get(PersistencePerspectiveItemType.FOREIGNKEY); request.setType(Type.MAP); request.setCeilingEntityClassname(foreignKey.getForeignKeyClass()); request.setOperationTypesOverride(fmd.getPersistencePerspective().getOperationTypes()); request.setMapStructure(mapStructure); request.setForeignKey(foreignKey); } });
1no label
admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_server_domain_PersistencePackageRequest.java
559
private static final ToXContent.Params includeDefaultsParams = new ToXContent.Params() { final static String INCLUDE_DEFAULTS = "include_defaults"; @Override public String param(String key) { if (INCLUDE_DEFAULTS.equals(key)) { return "true"; } return null; } @Override public String param(String key, String defaultValue) { if (INCLUDE_DEFAULTS.equals(key)) { return "true"; } return defaultValue; } @Override public boolean paramAsBoolean(String key, boolean defaultValue) { if (INCLUDE_DEFAULTS.equals(key)) { return true; } return defaultValue; } public Boolean paramAsBoolean(String key, Boolean defaultValue) { if (INCLUDE_DEFAULTS.equals(key)) { return true; } return defaultValue; } @Override @Deprecated public Boolean paramAsBooleanOptional(String key, Boolean defaultValue) { return paramAsBoolean(key, defaultValue); } };
1no label
src_main_java_org_elasticsearch_action_admin_indices_mapping_get_TransportGetFieldMappingsAction.java
46
public class OCaseInsentiveComparator implements Comparator<String> { public int compare(final String stringOne, final String stringTwo) { return stringOne.compareToIgnoreCase(stringTwo); } }
0true
commons_src_main_java_com_orientechnologies_common_comparator_OCaseInsentiveComparator.java
1,353
tokenStream.getTokens()) { @Override protected boolean reuseExistingDescriptorModels() { return true; } };
1no label
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_core_model_ProjectSourceFile.java
262
@Inherited @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.TYPE) @TestGroup(enabled = true, sysProperty = SYSPROP_INTEGRATION) public @interface IntegrationTests { }
0true
src_test_java_org_apache_lucene_util_AbstractRandomizedTest.java
86
public class GloballySharedInputStream extends InputStream { private InputStream parentInputStream; public GloballySharedInputStream(InputStream parentInputStream) { this.parentInputStream = parentInputStream; } public int available() throws IOException { return parentInputStream.available(); } public void close() throws IOException { parentInputStream.close(); } public void mark(int arg0) { parentInputStream.mark(arg0); } public boolean markSupported() { return parentInputStream.markSupported(); } public int read() throws IOException { return parentInputStream.read(); } public int read(byte[] arg0, int arg1, int arg2) throws IOException { return parentInputStream.read(arg0, arg1, arg2); } public int read(byte[] arg0) throws IOException { return parentInputStream.read(arg0); } public void reset() throws IOException { parentInputStream.reset(); } public long skip(long arg0) throws IOException { return parentInputStream.skip(arg0); } }
0true
admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_file_service_GloballySharedInputStream.java
371
future.andThen(new ExecutionCallback<Integer>() { @Override public void onResponse(Integer response) { try { result[0] = response.intValue(); } finally { semaphore.release(); } } @Override public void onFailure(Throwable t) { semaphore.release(); } });
0true
hazelcast-client_src_test_java_com_hazelcast_client_mapreduce_DistributedMapperClientMapReduceTest.java
1,395
public class MetaDataCreateIndexService extends AbstractComponent { private final Environment environment; private final ThreadPool threadPool; private final ClusterService clusterService; private final IndicesService indicesService; private final AllocationService allocationService; private final MetaDataService metaDataService; private final Version version; private final String riverIndexName; @Inject public MetaDataCreateIndexService(Settings settings, Environment environment, ThreadPool threadPool, ClusterService clusterService, IndicesService indicesService, AllocationService allocationService, MetaDataService metaDataService, Version version, @RiverIndexName String riverIndexName) { super(settings); this.environment = environment; this.threadPool = threadPool; this.clusterService = clusterService; this.indicesService = indicesService; this.allocationService = allocationService; this.metaDataService = metaDataService; this.version = version; this.riverIndexName = riverIndexName; } public void createIndex(final CreateIndexClusterStateUpdateRequest request, final ClusterStateUpdateListener listener) { ImmutableSettings.Builder updatedSettingsBuilder = ImmutableSettings.settingsBuilder(); for (Map.Entry<String, String> entry : request.settings().getAsMap().entrySet()) { if (!entry.getKey().startsWith("index.")) { updatedSettingsBuilder.put("index." + entry.getKey(), entry.getValue()); } else { updatedSettingsBuilder.put(entry.getKey(), entry.getValue()); } } request.settings(updatedSettingsBuilder.build()); // we lock here, and not within the cluster service callback since we don't want to // block the whole cluster state handling final Semaphore mdLock = metaDataService.indexMetaDataLock(request.index()); // quick check to see if we can acquire a lock, otherwise spawn to a thread pool if (mdLock.tryAcquire()) { createIndex(request, listener, mdLock); return; } threadPool.executor(ThreadPool.Names.MANAGEMENT).execute(new Runnable() { @Override public void run() { try { if (!mdLock.tryAcquire(request.masterNodeTimeout().nanos(), TimeUnit.NANOSECONDS)) { listener.onFailure(new ProcessClusterEventTimeoutException(request.masterNodeTimeout(), "acquire index lock")); return; } } catch (InterruptedException e) { Thread.interrupted(); listener.onFailure(e); return; } createIndex(request, listener, mdLock); } }); } public void validateIndexName(String index, ClusterState state) throws ElasticsearchException { if (state.routingTable().hasIndex(index)) { throw new IndexAlreadyExistsException(new Index(index)); } if (state.metaData().hasIndex(index)) { throw new IndexAlreadyExistsException(new Index(index)); } if (!Strings.validFileName(index)) { throw new InvalidIndexNameException(new Index(index), index, "must not contain the following characters " + Strings.INVALID_FILENAME_CHARS); } if (index.contains("#")) { throw new InvalidIndexNameException(new Index(index), index, "must not contain '#'"); } if (!index.equals(riverIndexName) && index.charAt(0) == '_') { throw new InvalidIndexNameException(new Index(index), index, "must not start with '_'"); } if (!index.toLowerCase(Locale.ROOT).equals(index)) { throw new InvalidIndexNameException(new Index(index), index, "must be lowercase"); } if (state.metaData().aliases().containsKey(index)) { throw new InvalidIndexNameException(new Index(index), index, "already exists as alias"); } } private void createIndex(final CreateIndexClusterStateUpdateRequest request, final ClusterStateUpdateListener listener, final Semaphore mdLock) { clusterService.submitStateUpdateTask("create-index [" + request.index() + "], cause [" + request.cause() + "]", Priority.URGENT, new AckedClusterStateUpdateTask() { @Override public boolean mustAck(DiscoveryNode discoveryNode) { return true; } @Override public void onAllNodesAcked(@Nullable Throwable t) { mdLock.release(); listener.onResponse(new ClusterStateUpdateResponse(true)); } @Override public void onAckTimeout() { mdLock.release(); listener.onResponse(new ClusterStateUpdateResponse(false)); } @Override public TimeValue ackTimeout() { return request.ackTimeout(); } @Override public TimeValue timeout() { return request.masterNodeTimeout(); } @Override public void onFailure(String source, Throwable t) { mdLock.release(); listener.onFailure(t); } @Override public ClusterState execute(ClusterState currentState) throws Exception { boolean indexCreated = false; String failureReason = null; try { validate(request, currentState); // we only find a template when its an API call (a new index) // find templates, highest order are better matching List<IndexTemplateMetaData> templates = findTemplates(request, currentState); Map<String, Custom> customs = Maps.newHashMap(); // add the request mapping Map<String, Map<String, Object>> mappings = Maps.newHashMap(); for (Map.Entry<String, String> entry : request.mappings().entrySet()) { mappings.put(entry.getKey(), parseMapping(entry.getValue())); } for (Map.Entry<String, Custom> entry : request.customs().entrySet()) { customs.put(entry.getKey(), entry.getValue()); } // apply templates, merging the mappings into the request mapping if exists for (IndexTemplateMetaData template : templates) { for (ObjectObjectCursor<String, CompressedString> cursor : template.mappings()) { if (mappings.containsKey(cursor.key)) { XContentHelper.mergeDefaults(mappings.get(cursor.key), parseMapping(cursor.value.string())); } else { mappings.put(cursor.key, parseMapping(cursor.value.string())); } } // handle custom for (ObjectObjectCursor<String, Custom> cursor : template.customs()) { String type = cursor.key; IndexMetaData.Custom custom = cursor.value; IndexMetaData.Custom existing = customs.get(type); if (existing == null) { customs.put(type, custom); } else { IndexMetaData.Custom merged = IndexMetaData.lookupFactorySafe(type).merge(existing, custom); customs.put(type, merged); } } } // now add config level mappings File mappingsDir = new File(environment.configFile(), "mappings"); if (mappingsDir.exists() && mappingsDir.isDirectory()) { // first index level File indexMappingsDir = new File(mappingsDir, request.index()); if (indexMappingsDir.exists() && indexMappingsDir.isDirectory()) { addMappings(mappings, indexMappingsDir); } // second is the _default mapping File defaultMappingsDir = new File(mappingsDir, "_default"); if (defaultMappingsDir.exists() && defaultMappingsDir.isDirectory()) { addMappings(mappings, defaultMappingsDir); } } ImmutableSettings.Builder indexSettingsBuilder = settingsBuilder(); // apply templates, here, in reverse order, since first ones are better matching for (int i = templates.size() - 1; i >= 0; i--) { indexSettingsBuilder.put(templates.get(i).settings()); } // now, put the request settings, so they override templates indexSettingsBuilder.put(request.settings()); if (indexSettingsBuilder.get(SETTING_NUMBER_OF_SHARDS) == null) { if (request.index().equals(riverIndexName)) { indexSettingsBuilder.put(SETTING_NUMBER_OF_SHARDS, settings.getAsInt(SETTING_NUMBER_OF_SHARDS, 1)); } else { indexSettingsBuilder.put(SETTING_NUMBER_OF_SHARDS, settings.getAsInt(SETTING_NUMBER_OF_SHARDS, 5)); } } if (indexSettingsBuilder.get(SETTING_NUMBER_OF_REPLICAS) == null) { if (request.index().equals(riverIndexName)) { indexSettingsBuilder.put(SETTING_NUMBER_OF_REPLICAS, settings.getAsInt(SETTING_NUMBER_OF_REPLICAS, 1)); } else { indexSettingsBuilder.put(SETTING_NUMBER_OF_REPLICAS, settings.getAsInt(SETTING_NUMBER_OF_REPLICAS, 1)); } } if (settings.get(SETTING_AUTO_EXPAND_REPLICAS) != null && indexSettingsBuilder.get(SETTING_AUTO_EXPAND_REPLICAS) == null) { indexSettingsBuilder.put(SETTING_AUTO_EXPAND_REPLICAS, settings.get(SETTING_AUTO_EXPAND_REPLICAS)); } if (indexSettingsBuilder.get(SETTING_VERSION_CREATED) == null) { indexSettingsBuilder.put(SETTING_VERSION_CREATED, version); } indexSettingsBuilder.put(SETTING_UUID, Strings.randomBase64UUID()); Settings actualIndexSettings = indexSettingsBuilder.build(); // Set up everything, now locally create the index to see that things are ok, and apply // create the index here (on the master) to validate it can be created, as well as adding the mapping indicesService.createIndex(request.index(), actualIndexSettings, clusterService.localNode().id()); indexCreated = true; // now add the mappings IndexService indexService = indicesService.indexServiceSafe(request.index()); MapperService mapperService = indexService.mapperService(); // first, add the default mapping if (mappings.containsKey(MapperService.DEFAULT_MAPPING)) { try { mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedString(XContentFactory.jsonBuilder().map(mappings.get(MapperService.DEFAULT_MAPPING)).string()), false); } catch (Exception e) { failureReason = "failed on parsing default mapping on index creation"; throw new MapperParsingException("mapping [" + MapperService.DEFAULT_MAPPING + "]", e); } } for (Map.Entry<String, Map<String, Object>> entry : mappings.entrySet()) { if (entry.getKey().equals(MapperService.DEFAULT_MAPPING)) { continue; } try { // apply the default here, its the first time we parse it mapperService.merge(entry.getKey(), new CompressedString(XContentFactory.jsonBuilder().map(entry.getValue()).string()), true); } catch (Exception e) { failureReason = "failed on parsing mappings on index creation"; throw new MapperParsingException("mapping [" + entry.getKey() + "]", e); } } // now, update the mappings with the actual source Map<String, MappingMetaData> mappingsMetaData = Maps.newHashMap(); for (DocumentMapper mapper : mapperService) { MappingMetaData mappingMd = new MappingMetaData(mapper); mappingsMetaData.put(mapper.type(), mappingMd); } final IndexMetaData.Builder indexMetaDataBuilder = IndexMetaData.builder(request.index()).settings(actualIndexSettings); for (MappingMetaData mappingMd : mappingsMetaData.values()) { indexMetaDataBuilder.putMapping(mappingMd); } for (Map.Entry<String, Custom> customEntry : customs.entrySet()) { indexMetaDataBuilder.putCustom(customEntry.getKey(), customEntry.getValue()); } indexMetaDataBuilder.state(request.state()); final IndexMetaData indexMetaData; try { indexMetaData = indexMetaDataBuilder.build(); } catch (Exception e) { failureReason = "failed to build index metadata"; throw e; } MetaData newMetaData = MetaData.builder(currentState.metaData()) .put(indexMetaData, false) .build(); logger.info("[{}] creating index, cause [{}], shards [{}]/[{}], mappings {}", request.index(), request.cause(), indexMetaData.numberOfShards(), indexMetaData.numberOfReplicas(), mappings.keySet()); ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks()); if (!request.blocks().isEmpty()) { for (ClusterBlock block : request.blocks()) { blocks.addIndexBlock(request.index(), block); } } if (request.state() == State.CLOSE) { blocks.addIndexBlock(request.index(), MetaDataIndexStateService.INDEX_CLOSED_BLOCK); } ClusterState updatedState = ClusterState.builder(currentState).blocks(blocks).metaData(newMetaData).build(); if (request.state() == State.OPEN) { RoutingTable.Builder routingTableBuilder = RoutingTable.builder(updatedState.routingTable()) .addAsNew(updatedState.metaData().index(request.index())); RoutingAllocation.Result routingResult = allocationService.reroute(ClusterState.builder(updatedState).routingTable(routingTableBuilder).build()); updatedState = ClusterState.builder(updatedState).routingResult(routingResult).build(); } return updatedState; } finally { if (indexCreated) { // Index was already partially created - need to clean up indicesService.removeIndex(request.index(), failureReason != null ? failureReason : "failed to create index"); } } } @Override public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { } }); } private Map<String, Object> parseMapping(String mappingSource) throws Exception { return XContentFactory.xContent(mappingSource).createParser(mappingSource).mapAndClose(); } private void addMappings(Map<String, Map<String, Object>> mappings, File mappingsDir) { File[] mappingsFiles = mappingsDir.listFiles(); for (File mappingFile : mappingsFiles) { if (mappingFile.isHidden()) { continue; } int lastDotIndex = mappingFile.getName().lastIndexOf('.'); String mappingType = lastDotIndex != -1 ? mappingFile.getName().substring(0, lastDotIndex) : mappingFile.getName(); try { String mappingSource = Streams.copyToString(new InputStreamReader(new FileInputStream(mappingFile), Charsets.UTF_8)); if (mappings.containsKey(mappingType)) { XContentHelper.mergeDefaults(mappings.get(mappingType), parseMapping(mappingSource)); } else { mappings.put(mappingType, parseMapping(mappingSource)); } } catch (Exception e) { logger.warn("failed to read / parse mapping [" + mappingType + "] from location [" + mappingFile + "], ignoring...", e); } } } private List<IndexTemplateMetaData> findTemplates(CreateIndexClusterStateUpdateRequest request, ClusterState state) { List<IndexTemplateMetaData> templates = Lists.newArrayList(); for (ObjectCursor<IndexTemplateMetaData> cursor : state.metaData().templates().values()) { IndexTemplateMetaData template = cursor.value; if (Regex.simpleMatch(template.template(), request.index())) { templates.add(template); } } // see if we have templates defined under config File templatesDir = new File(environment.configFile(), "templates"); if (templatesDir.exists() && templatesDir.isDirectory()) { File[] templatesFiles = templatesDir.listFiles(); if (templatesFiles != null) { for (File templatesFile : templatesFiles) { XContentParser parser = null; try { byte[] templatesData = Streams.copyToByteArray(templatesFile); parser = XContentHelper.createParser(templatesData, 0, templatesData.length); IndexTemplateMetaData template = IndexTemplateMetaData.Builder.fromXContent(parser); if (Regex.simpleMatch(template.template(), request.index())) { templates.add(template); } } catch (Exception e) { logger.warn("[{}] failed to read template [{}] from config", e, request.index(), templatesFile.getAbsolutePath()); } finally { IOUtils.closeWhileHandlingException(parser); } } } } CollectionUtil.timSort(templates, new Comparator<IndexTemplateMetaData>() { @Override public int compare(IndexTemplateMetaData o1, IndexTemplateMetaData o2) { return o2.order() - o1.order(); } }); return templates; } private void validate(CreateIndexClusterStateUpdateRequest request, ClusterState state) throws ElasticsearchException { validateIndexName(request.index(), state); } }
1no label
src_main_java_org_elasticsearch_cluster_metadata_MetaDataCreateIndexService.java
2
public class Prover { private final Queue<ClusterState> unexploredKnownStates = new LinkedList<>( ); private final ProofDatabase db = new ProofDatabase("./clusterproof"); public static void main(String ... args) throws Exception { new Prover().prove(); } public void prove() throws Exception { try { System.out.println("Bootstrap genesis state.."); bootstrapCluster(); System.out.println("Begin exploring delivery orders."); exploreUnexploredStates(); System.out.println("Exporting graphviz.."); db.export(new GraphVizExporter(new File("./proof.gs"))); } finally { db.shutdown(); } // Generate .svg : // dot -Tsvg proof.gs -o proof.svg } private void bootstrapCluster() throws Exception { Logging logging = new TestLogging(); String instance1 = "cluster://localhost:5001"; String instance2 = "cluster://localhost:5002"; String instance3 = "cluster://localhost:5003"; ClusterConfiguration config = new ClusterConfiguration( "default", logging.getMessagesLog( ClusterConfiguration.class ), instance1, instance2, instance3 ); ClusterState state = new ClusterState( asList( newClusterInstance( new InstanceId( 1 ), new URI( instance1 ), config, logging ), newClusterInstance( new InstanceId( 2 ), new URI( instance2 ), config, logging ), newClusterInstance( new InstanceId( 3 ), new URI( instance3 ), config, logging )), emptySetOf( ClusterAction.class )); state = state.performAction( new MessageDeliveryAction( Message.to( ClusterMessage.create, new URI( instance3 ), "defaultcluster" ).setHeader( Message.CONVERSATION_ID, "-1" ).setHeader( Message.FROM, instance3 ) ) ); state = state.performAction( new MessageDeliveryAction( Message.to( ClusterMessage.join, new URI( instance2 ), new Object[]{"defaultcluster", new URI[]{new URI( instance3 )}} ).setHeader( Message.CONVERSATION_ID, "-1" ).setHeader( Message.FROM, instance2 ) ) ); state = state.performAction( new MessageDeliveryAction( Message.to( ClusterMessage.join, new URI( instance1 ), new Object[]{"defaultcluster", new URI[]{new URI( instance3 )}} ).setHeader( Message.CONVERSATION_ID, "-1" ).setHeader( Message.FROM, instance1 ) ) ); state.addPendingActions( new InstanceCrashedAction( instance3 ) ); unexploredKnownStates.add( state ); db.newState( state ); } private void exploreUnexploredStates() { while(!unexploredKnownStates.isEmpty()) { ClusterState state = unexploredKnownStates.poll(); Iterator<Pair<ClusterAction, ClusterState>> newStates = state.transitions(); while(newStates.hasNext()) { Pair<ClusterAction, ClusterState> next = newStates.next(); System.out.println( db.numberOfKnownStates() + " ("+unexploredKnownStates.size()+")" ); ClusterState nextState = next.other(); if(!db.isKnownState( nextState )) { db.newStateTransition( state, next ); unexploredKnownStates.offer( nextState ); if(nextState.isDeadEnd()) { System.out.println("DEAD END: " + nextState.toString() + " (" + db.id(nextState) + ")"); } } } } } }
1no label
enterprise_ha_src_test_java_org_neo4j_ha_correctness_Prover.java
1,031
@Entity @Inheritance(strategy = InheritanceType.JOINED) @Table(name="BLC_ORDER_ITEM_ATTRIBUTE") @Cache(usage = CacheConcurrencyStrategy.READ_WRITE, region="blOrderElements") @AdminPresentationClass(friendlyName = "OrderItemAttributeImpl_baseProductAttribute") public class OrderItemAttributeImpl implements OrderItemAttribute { public static final Log LOG = LogFactory.getLog(OrderItemAttributeImpl.class); private static final long serialVersionUID = 1L; @Id @GeneratedValue(generator= "OrderItemAttributeId") @GenericGenerator( name="OrderItemAttributeId", strategy="org.broadleafcommerce.common.persistence.IdOverrideTableGenerator", parameters = { @Parameter(name="segment_value", value="OrderItemAttributeImpl"), @Parameter(name="entity_name", value="org.broadleafcommerce.core.catalog.domain.OrderItemAttributeImpl") } ) @Column(name = "ORDER_ITEM_ATTRIBUTE_ID") protected Long id; @Column(name = "NAME", nullable=false) @AdminPresentation(visibility = VisibilityEnum.HIDDEN_ALL) protected String name; @Column(name = "VALUE", nullable=false) @AdminPresentation(friendlyName = "OrderItemAttributeImpl_Attribute_Value", order=2, group = "OrderItemAttributeImpl_Description", prominent=true) protected String value; @ManyToOne(targetEntity = OrderItemImpl.class, optional=false) @JoinColumn(name = "ORDER_ITEM_ID") protected OrderItem orderItem; @Override public Long getId() { return id; } @Override public void setId(Long id) { this.id = id; } @Override public String getValue() { return value; } @Override public void setValue(String value) { this.value = value; } @Override public String getName() { return name; } @Override public void setName(String name) { this.name = name; } @Override public String toString() { return value; } @Override public OrderItem getOrderItem() { return orderItem; } @Override public void setOrderItem(OrderItem orderItem) { this.orderItem = orderItem; } public void checkCloneable(OrderItemAttribute itemAttribute) throws CloneNotSupportedException, SecurityException, NoSuchMethodException { Method cloneMethod = itemAttribute.getClass().getMethod("clone", new Class[]{}); if (cloneMethod.getDeclaringClass().getName().startsWith("org.broadleafcommerce") && !itemAttribute.getClass().getName().startsWith("org.broadleafcommerce")) { //subclass is not implementing the clone method throw new CloneNotSupportedException("Custom extensions and implementations should implement clone in order to guarantee split and merge operations are performed accurately"); } } @Override public OrderItemAttribute clone() { //instantiate from the fully qualified name via reflection OrderItemAttribute itemAttribute; try { itemAttribute = (OrderItemAttribute) Class.forName(this.getClass().getName()).newInstance(); try { checkCloneable(itemAttribute); } catch (CloneNotSupportedException e) { LOG.warn("Clone implementation missing in inheritance hierarchy outside of Broadleaf: " + itemAttribute.getClass().getName(), e); } itemAttribute.setName(name); itemAttribute.setOrderItem(orderItem); itemAttribute.setValue(value); } catch (Exception e) { throw new RuntimeException(e); } return itemAttribute; } @Override public int hashCode() { return value.hashCode(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; if (value == null) { return false; } return value.equals(((OrderItemAttribute) obj).getValue()); } }
1no label
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_order_domain_OrderItemAttributeImpl.java
5,133
public abstract class InternalAggregation implements Aggregation, ToXContent, Streamable { /** * The aggregation type that holds all the string types that are associated with an aggregation: * <ul> * <li>name - used as the parser type</li> * <li>stream - used as the stream type</li> * </ul> */ public static class Type { private String name; private BytesReference stream; public Type(String name) { this(name, new BytesArray(name)); } public Type(String name, String stream) { this(name, new BytesArray(stream)); } public Type(String name, BytesReference stream) { this.name = name; this.stream = stream; } /** * @return The name of the type (mainly used for registering the parser for the aggregator (see {@link org.elasticsearch.search.aggregations.Aggregator.Parser#type()}). */ public String name() { return name; } /** * @return The name of the stream type (used for registering the aggregation stream * (see {@link AggregationStreams#registerStream(AggregationStreams.Stream, org.elasticsearch.common.bytes.BytesReference...)}). */ public BytesReference stream() { return stream; } } protected static class ReduceContext { private final List<InternalAggregation> aggregations; private final CacheRecycler cacheRecycler; public ReduceContext(List<InternalAggregation> aggregations, CacheRecycler cacheRecycler) { this.aggregations = aggregations; this.cacheRecycler = cacheRecycler; } public List<InternalAggregation> aggregations() { return aggregations; } public CacheRecycler cacheRecycler() { return cacheRecycler; } } protected String name; /** Constructs an un initialized addAggregation (used for serialization) **/ protected InternalAggregation() {} /** * Constructs an get with a given name. * * @param name The name of the get. */ protected InternalAggregation(String name) { this.name = name; } @Override public String getName() { return name; } /** * @return The {@link Type} of this aggregation */ public abstract Type type(); /** * Reduces the given addAggregation to a single one and returns it. In <b>most</b> cases, the assumption will be the all given * addAggregation are of the same type (the same type as this aggregation). For best efficiency, when implementing, * try reusing an existing get instance (typically the first in the given list) to save on redundant object * construction. */ public abstract InternalAggregation reduce(ReduceContext reduceContext); /** * Common xcontent fields that are shared among addAggregation */ public static final class CommonFields { public static final XContentBuilderString BUCKETS = new XContentBuilderString("buckets"); public static final XContentBuilderString VALUE = new XContentBuilderString("value"); public static final XContentBuilderString VALUE_AS_STRING = new XContentBuilderString("value_as_string"); public static final XContentBuilderString DOC_COUNT = new XContentBuilderString("doc_count"); public static final XContentBuilderString KEY = new XContentBuilderString("key"); public static final XContentBuilderString KEY_AS_STRING = new XContentBuilderString("key_as_string"); public static final XContentBuilderString FROM = new XContentBuilderString("from"); public static final XContentBuilderString FROM_AS_STRING = new XContentBuilderString("from_as_string"); public static final XContentBuilderString TO = new XContentBuilderString("to"); public static final XContentBuilderString TO_AS_STRING = new XContentBuilderString("to_as_string"); } }
1no label
src_main_java_org_elasticsearch_search_aggregations_InternalAggregation.java
397
public class TransportClusterSearchShardsAction extends TransportMasterNodeReadOperationAction<ClusterSearchShardsRequest, ClusterSearchShardsResponse> { @Inject public TransportClusterSearchShardsAction(Settings settings, TransportService transportService, ClusterService clusterService, ThreadPool threadPool) { super(settings, transportService, clusterService, threadPool); } @Override protected String transportAction() { return ClusterSearchShardsAction.NAME; } @Override protected String executor() { // all in memory work here... return ThreadPool.Names.SAME; } @Override protected ClusterSearchShardsRequest newRequest() { return new ClusterSearchShardsRequest(); } @Override protected ClusterSearchShardsResponse newResponse() { return new ClusterSearchShardsResponse(); } @Override protected void masterOperation(final ClusterSearchShardsRequest request, final ClusterState state, final ActionListener<ClusterSearchShardsResponse> listener) throws ElasticsearchException { ClusterState clusterState = clusterService.state(); String[] concreteIndices = clusterState.metaData().concreteIndices(request.indices(), request.indicesOptions()); Map<String, Set<String>> routingMap = clusterState.metaData().resolveSearchRouting(request.routing(), request.indices()); Set<String> nodeIds = newHashSet(); GroupShardsIterator groupShardsIterator = clusterService.operationRouting().searchShards(clusterState, request.indices(), concreteIndices, routingMap, request.preference()); ShardRouting shard; ClusterSearchShardsGroup[] groupResponses = new ClusterSearchShardsGroup[groupShardsIterator.size()]; int currentGroup = 0; for (ShardIterator shardIt : groupShardsIterator) { String index = shardIt.shardId().getIndex(); int shardId = shardIt.shardId().getId(); ShardRouting[] shardRoutings = new ShardRouting[shardIt.size()]; int currentShard = 0; shardIt.reset(); while ((shard = shardIt.nextOrNull()) != null) { shardRoutings[currentShard++] = shard; nodeIds.add(shard.currentNodeId()); } groupResponses[currentGroup++] = new ClusterSearchShardsGroup(index, shardId, shardRoutings); } DiscoveryNode[] nodes = new DiscoveryNode[nodeIds.size()]; int currentNode = 0; for (String nodeId : nodeIds) { nodes[currentNode++] = clusterState.getNodes().get(nodeId); } listener.onResponse(new ClusterSearchShardsResponse(groupResponses, nodes)); } }
1no label
src_main_java_org_elasticsearch_action_admin_cluster_shards_TransportClusterSearchShardsAction.java
40
static final class JDKModuleProposal extends CompletionProposal { private final String name; JDKModuleProposal(int offset, String prefix, int len, String versioned, String name) { super(offset, prefix, MODULE, versioned, versioned.substring(len)); this.name = name; } @Override public String getAdditionalProposalInfo() { return getDocumentationForModule(name, JDKUtils.jdk.version, "This module forms part of the Java SDK.", null, null); } @Override protected boolean qualifiedNameIsPath() { return true; } }
0true
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_complete_ModuleCompletions.java
15
static final class DescendingSubMap<K, V> extends NavigableSubMap<K, V> { private static final long serialVersionUID = 912986545866120460L; private final Comparator<? super K> reverseComparator = Collections.reverseOrder(m.comparator); DescendingSubMap(final OMVRBTree<K, V> m, final boolean fromStart, final K lo, final boolean loInclusive, final boolean toEnd, final K hi, final boolean hiInclusive) { super(m, fromStart, lo, loInclusive, toEnd, hi, hiInclusive); } public Comparator<? super K> comparator() { return reverseComparator; } public ONavigableMap<K, V> subMap(final K fromKey, final boolean fromInclusive, final K toKey, final boolean toInclusive) { if (!inRange(fromKey, fromInclusive)) throw new IllegalArgumentException("fromKey out of range"); if (!inRange(toKey, toInclusive)) throw new IllegalArgumentException("toKey out of range"); return new DescendingSubMap<K, V>(m, false, toKey, toInclusive, false, fromKey, fromInclusive); } public ONavigableMap<K, V> headMap(final K toKey, final boolean inclusive) { if (!inRange(toKey, inclusive)) throw new IllegalArgumentException("toKey out of range"); return new DescendingSubMap<K, V>(m, false, toKey, inclusive, toEnd, hi, hiInclusive); } public ONavigableMap<K, V> tailMap(final K fromKey, final boolean inclusive) { if (!inRange(fromKey, inclusive)) throw new IllegalArgumentException("fromKey out of range"); return new DescendingSubMap<K, V>(m, fromStart, lo, loInclusive, false, fromKey, inclusive); } public ONavigableMap<K, V> descendingMap() { ONavigableMap<K, V> mv = descendingMapView; return (mv != null) ? mv : (descendingMapView = new AscendingSubMap<K, V>(m, fromStart, lo, loInclusive, toEnd, hi, hiInclusive)); } @Override OLazyIterator<K> keyIterator() { return new DescendingSubMapKeyIterator(absHighest(), absLowFence()); } @Override OLazyIterator<K> descendingKeyIterator() { return new SubMapKeyIterator(absLowest(), absHighFence()); } final class DescendingEntrySetView extends EntrySetView { @Override public Iterator<Map.Entry<K, V>> iterator() { return new DescendingSubMapEntryIterator(absHighest(), absLowFence()); } } @Override public Set<Map.Entry<K, V>> entrySet() { EntrySetView es = entrySetView; return (es != null) ? es : new DescendingEntrySetView(); } @Override OMVRBTreeEntry<K, V> subLowest() { return absHighest().entry; } @Override OMVRBTreeEntry<K, V> subHighest() { return absLowest().entry; } @Override OMVRBTreeEntry<K, V> subCeiling(final K key) { return absFloor(key).entry; } @Override OMVRBTreeEntry<K, V> subHigher(final K key) { return absLower(key).entry; } @Override OMVRBTreeEntry<K, V> subFloor(final K key) { return absCeiling(key).entry; } @Override OMVRBTreeEntry<K, V> subLower(final K key) { return absHigher(key).entry; } }
0true
commons_src_main_java_com_orientechnologies_common_collection_OMVRBTree.java
2,359
class KeyValueSourceFacade<K, V> extends KeyValueSource<K, V> { private static final int UPDATE_PROCESSED_RECORDS_INTERVAL = 1000; private final ILogger logger; private final KeyValueSource<K, V> keyValueSource; private final JobSupervisor supervisor; private int processedRecords; KeyValueSourceFacade(KeyValueSource<K, V> keyValueSource, JobSupervisor supervisor) { this.keyValueSource = keyValueSource; this.supervisor = supervisor; this.logger = supervisor.getMapReduceService().getNodeEngine().getLogger(KeyValueSourceFacade.class); } @Override public boolean open(NodeEngine nodeEngine) { return keyValueSource.open(nodeEngine); } @Override public boolean hasNext() { return keyValueSource.hasNext(); } @Override public K key() { K key = keyValueSource.key(); processedRecords++; if (processedRecords == UPDATE_PROCESSED_RECORDS_INTERVAL) { notifyProcessStats(); processedRecords = 0; } return key; } @Override public Map.Entry<K, V> element() { return keyValueSource.element(); } @Override public boolean reset() { processedRecords = 0; return keyValueSource.reset(); } @Override public boolean isAllKeysSupported() { return keyValueSource.isAllKeysSupported(); } @Override protected Collection<K> getAllKeys0() { return keyValueSource.getAllKeys(); } @Override public void close() throws IOException { notifyProcessStats(); keyValueSource.close(); } private void notifyProcessStats() { if (processedRecords > 0) { try { MapReduceService mapReduceService = supervisor.getMapReduceService(); String name = supervisor.getConfiguration().getName(); String jobId = supervisor.getConfiguration().getJobId(); Address jobOwner = supervisor.getJobOwner(); mapReduceService.processRequest(jobOwner, new ProcessStatsUpdateOperation(name, jobId, processedRecords), name); } catch (Exception ignore) { // Don't care if wasn't executed properly logger.finest("ProcessedRecords update couldn't be executed", ignore); } } } }
1no label
hazelcast_src_main_java_com_hazelcast_mapreduce_impl_task_KeyValueSourceFacade.java
232
public interface ModuleConfiguration extends Serializable { public Long getId(); public void setId(Long id); public String getModuleName(); public void setModuleName(String name); public void setActiveStartDate(Date startDate); public Date getActiveStartDate(); public void setActiveEndDate(Date startDate); public Date getActiveEndDate(); public void setIsDefault(Boolean isDefault); public Boolean getIsDefault(); public void setPriority(Integer priority); public Integer getPriority(); public ModuleConfigurationType getModuleConfigurationType(); public void setAuditable(Auditable auditable); public Auditable getAuditable(); }
0true
common_src_main_java_org_broadleafcommerce_common_config_domain_ModuleConfiguration.java
211
class HeartBeat implements Runnable { long begin; final int heartBeatTimeout = heartBeatInterval/2; @Override public void run() { if (!live) { return; } begin = Clock.currentTimeMillis(); final Map<ClientConnection, Future> futureMap = new HashMap<ClientConnection, Future>(); for (ClientConnection connection : connections.values()) { if (begin - connection.lastReadTime() > heartBeatTimeout) { final ClientPingRequest request = new ClientPingRequest(); final ICompletableFuture future = invocationService.send(request, connection); futureMap.put(connection, future); } else { connection.heartBeatingSucceed(); } } for (Map.Entry<ClientConnection, Future> entry : futureMap.entrySet()) { final Future future = entry.getValue(); final ClientConnection connection = entry.getKey(); try { future.get(getRemainingTimeout(), TimeUnit.MILLISECONDS); connection.heartBeatingSucceed(); } catch (Exception ignored) { connection.heartBeatingFailed(); } } } private long getRemainingTimeout() { long timeout = heartBeatTimeout - Clock.currentTimeMillis() + begin; return timeout < 0 ? 0 : timeout; } }
1no label
hazelcast-client_src_main_java_com_hazelcast_client_connection_nio_ClientConnectionManagerImpl.java
275
public interface OCommandRequestInternal extends OCommandRequest, OSerializableStream { public Map<Object, Object> getParameters(); public OCommandResultListener getResultListener(); public void setResultListener(OCommandResultListener iListener); public OProgressListener getProgressListener(); public OCommandRequestInternal setProgressListener(OProgressListener iProgressListener); public void reset(); }
0true
core_src_main_java_com_orientechnologies_orient_core_command_OCommandRequestInternal.java
1,351
public class ProjectSourceFile extends SourceFile implements IResourceAware { public ProjectSourceFile(ProjectPhasedUnit phasedUnit) { super(phasedUnit); } @Override public ProjectPhasedUnit getPhasedUnit() { return (ProjectPhasedUnit) super.getPhasedUnit(); } @Override public IProject getProjectResource() { return getPhasedUnit().getProjectResource(); } @Override public IFile getFileResource() { return getPhasedUnit().getSourceFileResource(); } @Override public IFolder getRootFolderResource() { return getPhasedUnit().getSourceFolderResource(); } public CompilationUnitDelta buildDeltaAgainstModel() { try { final ProjectPhasedUnit modelPhaseUnit = getPhasedUnit(); if (modelPhaseUnit != null) { final ResourceVirtualFile virtualSrcFile = ResourceVirtualFile.createResourceVirtualFile(modelPhaseUnit.getSourceFileResource()); final ResourceVirtualFile virtualSrcDir = ResourceVirtualFile.createResourceVirtualFile(modelPhaseUnit.getSourceFolderResource()); final TypeChecker currentTypechecker = modelPhaseUnit.getTypeChecker(); final ModuleManager currentModuleManager = currentTypechecker.getPhasedUnits().getModuleManager(); Package singleSourceUnitPackage = new SingleSourceUnitPackage(getPackage(), virtualSrcFile.getPath()); PhasedUnit lastPhasedUnit = new CeylonSourceParser<PhasedUnit>() { @Override protected String getCharset() { try { return modelPhaseUnit.getProjectResource().getDefaultCharset(); } catch (Exception e) { throw new RuntimeException(e); } } @SuppressWarnings("unchecked") @Override protected PhasedUnit createPhasedUnit(CompilationUnit cu, Package pkg, CommonTokenStream tokenStream) { return new PhasedUnit(virtualSrcFile, virtualSrcDir, cu, pkg, currentModuleManager, currentTypechecker.getContext(), tokenStream.getTokens()) { @Override protected boolean reuseExistingDescriptorModels() { return true; } }; } }.parseFileToPhasedUnit( currentModuleManager, currentTypechecker, virtualSrcFile, virtualSrcDir, singleSourceUnitPackage); if (lastPhasedUnit != null) { lastPhasedUnit.validateTree(); lastPhasedUnit.visitSrcModulePhase(); lastPhasedUnit.visitRemainingModulePhase(); lastPhasedUnit.scanDeclarations(); lastPhasedUnit.scanTypeDeclarations(); lastPhasedUnit.validateRefinement(); lastPhasedUnit.analyseFlow(); UnknownTypeCollector utc = new UnknownTypeCollector(); lastPhasedUnit.getCompilationUnit().visit(utc); if (lastPhasedUnit.getCompilationUnit().getErrors().isEmpty()) { return buildDeltas_.buildDeltas(modelPhaseUnit, lastPhasedUnit); } } } } catch(Exception e) { } catch(ceylon.language.AssertionError e) { e.printStackTrace(); } return null; } }
1no label
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_core_model_ProjectSourceFile.java
272
public class ServerInfo implements Serializable { private static final long serialVersionUID = 1L; private String serverName; private Integer serverPort; private Integer securePort; private String appName; public String getSecureHost() { StringBuffer sb = new StringBuffer(); sb.append(serverName); if (!securePort.equals("443")) { sb.append(":"); sb.append(securePort); } return sb.toString(); } public String getHost() { StringBuffer sb = new StringBuffer(); sb.append(serverName); if (!serverPort.equals("80")) { sb.append(":"); sb.append(serverPort); } return sb.toString(); } /** * @return the serverName */ public String getServerName() { return serverName; } /** * @param serverName the serverName to set */ public void setServerName(String serverName) { this.serverName = serverName; } /** * @return the serverPort */ public Integer getServerPort() { return serverPort; } /** * @param serverPort the serverPort to set */ public void setServerPort(Integer serverPort) { this.serverPort = serverPort; } /** * @return the securePort */ public Integer getSecurePort() { return securePort; } /** * @param securePort the securePort to set */ public void setSecurePort(Integer securePort) { this.securePort = securePort; } /** * @return the appName */ public String getAppName() { return appName; } /** * @param appName the appName to set */ public void setAppName(String appName) { this.appName = appName; } }
1no label
common_src_main_java_org_broadleafcommerce_common_email_service_info_ServerInfo.java
9
Runtime.getRuntime().addShutdownHook(new Thread() { public void run() { shutdownHBase(stat); } });
0true
titan-hbase-parent_titan-hbase-core_src_test_java_com_thinkaurelius_titan_HBaseStorageSetup.java
409
public class ClientAtomicReferenceProxy<E> extends ClientProxy implements IAtomicReference<E> { private final String name; private volatile Data key; public ClientAtomicReferenceProxy(String instanceName, String serviceName, String objectId) { super(instanceName, serviceName, objectId); this.name = objectId; } @Override public <R> R apply(IFunction<E, R> function) { isNotNull(function, "function"); return invoke(new ApplyRequest(name, toData(function))); } @Override public void alter(IFunction<E, E> function) { isNotNull(function, "function"); invoke(new AlterRequest(name, toData(function))); } @Override public E alterAndGet(IFunction<E, E> function) { isNotNull(function, "function"); return invoke(new AlterAndGetRequest(name, toData(function))); } @Override public E getAndAlter(IFunction<E, E> function) { isNotNull(function, "function"); return invoke(new GetAndAlterRequest(name, toData(function))); } @Override public boolean compareAndSet(E expect, E update) { return (Boolean) invoke(new CompareAndSetRequest(name, toData(expect), toData(update))); } @Override public boolean contains(E expected) { return (Boolean) invoke(new ContainsRequest(name, toData(expected))); } @Override public E get() { return invoke(new GetRequest(name)); } @Override public void set(E newValue) { invoke(new SetRequest(name, toData(newValue))); } @Override public void clear() { set(null); } @Override public E getAndSet(E newValue) { return invoke(new GetAndSetRequest(name, toData(newValue))); } @Override public E setAndGet(E update) { invoke(new SetRequest(name, toData(update))); return update; } @Override public boolean isNull() { return (Boolean) invoke(new IsNullRequest(name)); } @Override protected void onDestroy() { } protected <T> T invoke(ClientRequest req) { return super.invoke(req, getKey()); } private Data getKey() { if (key == null) { key = toData(name); } return key; } @Override public String toString() { return "IAtomicReference{" + "name='" + name + '\'' + '}'; } }
1no label
hazelcast-client_src_main_java_com_hazelcast_client_proxy_ClientAtomicReferenceProxy.java
2,588
public class ZenDiscoveryModule extends AbstractModule { private final List<Class<? extends UnicastHostsProvider>> unicastHostProviders = Lists.newArrayList(); /** * Adds a custom unicast hosts provider to build a dynamic list of unicast hosts list when doing unicast discovery. */ public ZenDiscoveryModule addUnicastHostProvider(Class<? extends UnicastHostsProvider> unicastHostProvider) { unicastHostProviders.add(unicastHostProvider); return this; } @Override protected void configure() { bind(ZenPingService.class).asEagerSingleton(); Multibinder<UnicastHostsProvider> unicastHostsProviderMultibinder = Multibinder.newSetBinder(binder(), UnicastHostsProvider.class); for (Class<? extends UnicastHostsProvider> unicastHostProvider : unicastHostProviders) { unicastHostsProviderMultibinder.addBinding().to(unicastHostProvider); } bindDiscovery(); } protected void bindDiscovery() { bind(Discovery.class).to(ZenDiscovery.class).asEagerSingleton(); } }
1no label
src_main_java_org_elasticsearch_discovery_zen_ZenDiscoveryModule.java
311
public class ToggleBreakpointAdapter implements IToggleBreakpointsTarget { private static final String JDT_DEBUG_PLUGIN_ID= "org.eclipse.jdt.debug"; public ToggleBreakpointAdapter() { } public void toggleLineBreakpoints(IWorkbenchPart part, ISelection selection) throws CoreException { if (selection instanceof ITextSelection) { ITextSelection textSel= (ITextSelection) selection; IEditorPart editorPart= (IEditorPart) part.getAdapter(IEditorPart.class); //TODO: handle org.eclipse.ui.ide.FileStoreEditorInput // to set breakpoints in code from archives IEditorInput editorInput = editorPart.getEditorInput(); final IFile origSrcFile; if (editorInput instanceof IFileEditorInput) { origSrcFile= ((IFileEditorInput)editorInput).getFile(); } else if (editorInput instanceof FileStoreEditorInput) { URI uri = ((FileStoreEditorInput) editorInput).getURI(); IResource resource = ExternalSourceArchiveManager.toResource(uri); if (resource instanceof IFile) { origSrcFile = (IFile) resource; } else { origSrcFile = null; } } else { origSrcFile = null; } final int lineNumber = textSel.getStartLine()+1; IWorkspaceRunnable wr= new IWorkspaceRunnable() { public void run(IProgressMonitor monitor) throws CoreException { IMarker marker = findBreakpointMarker(origSrcFile, lineNumber); if (marker != null) { // The following will delete the associated marker clearLineBreakpoint(origSrcFile, lineNumber); } else { // The following will create a marker as a side-effect setLineBreakpoint(origSrcFile, lineNumber); } } }; try { getWorkspace().run(wr, null); } catch (CoreException e) { throw new DebugException(e.getStatus()); } } } private IMarker findBreakpointMarker(IFile srcFile, int lineNumber) throws CoreException { IMarker[] markers = srcFile.findMarkers(IBreakpoint.LINE_BREAKPOINT_MARKER, true, IResource.DEPTH_INFINITE); for (int k = 0; k < markers.length; k++ ){ if (((Integer) markers[k].getAttribute(IMarker.LINE_NUMBER)).intValue() == lineNumber){ return markers[k]; } } return null; } public void setLineBreakpoint(IFile file, int lineNumber) throws CoreException { String srcFileName= file.getName(); String typeName= srcFileName.substring(0, srcFileName.lastIndexOf('.')); Map<String,Object> bkptAttributes= new HashMap<String, Object>(); bkptAttributes.put("org.eclipse.jdt.debug.core.sourceName", srcFileName); bkptAttributes.put("org.eclipse.jdt.debug.core.typeName", typeName); try { JDIDebugModel.createStratumBreakpoint(file, null, srcFileName, null, null, lineNumber, -1, -1, 0, true, bkptAttributes); } catch (CoreException e) { e.printStackTrace(); } } public void clearLineBreakpoint(IFile file, int lineNumber) throws CoreException { try { IBreakpoint lineBkpt= findStratumBreakpoint(file, lineNumber); if (lineBkpt != null) { lineBkpt.delete(); } } catch (CoreException e) { e.printStackTrace(); } } public void disableLineBreakpoint(IFile file, int lineNumber) throws CoreException { try { IBreakpoint lineBkpt= findStratumBreakpoint(file, lineNumber); if (lineBkpt != null) { lineBkpt.setEnabled(false); } } catch (CoreException e) { e.printStackTrace(); } } public void enableLineBreakpoint(IFile file, int lineNumber) throws CoreException { try { IBreakpoint lineBkpt= findStratumBreakpoint(file, lineNumber); if (lineBkpt != null) { lineBkpt.setEnabled(true); } } catch (CoreException e) { e.printStackTrace(); } } /** * Returns a Java line breakpoint that is already registered with the breakpoint * manager for a type with the given name at the given line number. * * @param typeName fully qualified type name * @param lineNumber line number * @return a Java line breakpoint that is already registered with the breakpoint * manager for a type with the given name at the given line number or <code>null</code> * if no such breakpoint is registered * @exception CoreException if unable to retrieve the associated marker * attributes (line number). */ public static IJavaLineBreakpoint findStratumBreakpoint(IResource resource, int lineNumber) throws CoreException { String modelId= JDT_DEBUG_PLUGIN_ID; String markerType= "org.eclipse.jdt.debug.javaStratumLineBreakpointMarker"; IBreakpointManager manager= DebugPlugin.getDefault().getBreakpointManager(); IBreakpoint[] breakpoints= manager.getBreakpoints(modelId); for (int i = 0; i < breakpoints.length; i++) { if (!(breakpoints[i] instanceof IJavaLineBreakpoint)) { continue; } IJavaLineBreakpoint breakpoint = (IJavaLineBreakpoint) breakpoints[i]; IMarker marker = breakpoint.getMarker(); if (marker != null && marker.exists() && marker.getType().equals(markerType)) { if (breakpoint.getLineNumber() == lineNumber && resource.equals(marker.getResource())) { return breakpoint; } } } return null; } public boolean canToggleLineBreakpoints(IWorkbenchPart part, ISelection selection) { return true; } public void toggleMethodBreakpoints(IWorkbenchPart part, ISelection selection) throws CoreException { } public boolean canToggleMethodBreakpoints(IWorkbenchPart part, ISelection selection) { return false; } public void toggleWatchpoints(IWorkbenchPart part, ISelection selection) throws CoreException { } public boolean canToggleWatchpoints(IWorkbenchPart part, ISelection selection) { return false; } }
1no label
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_editor_ToggleBreakpointAdapter.java
270
public class MapPutPartitionAwareRunnable implements Runnable, DataSerializable, PartitionAware, HazelcastInstanceAware { private HazelcastInstance instance; public String mapName; public Object partitionKey; public MapPutPartitionAwareRunnable(){} public MapPutPartitionAwareRunnable(String mapName, Object partitionKey) { this.mapName = mapName; this.partitionKey = partitionKey; } public void writeData(ObjectDataOutput out) throws IOException { out.writeUTF(mapName); } public void readData(ObjectDataInput in) throws IOException { mapName = in.readUTF(); } public void run() { Member member = instance.getCluster().getLocalMember(); IMap map = instance.getMap(mapName); map.put(member.getUuid(), member.getUuid()+"value"); } @Override public void setHazelcastInstance(HazelcastInstance hazelcastInstance) { instance = hazelcastInstance; } public String getMapName() { return mapName; } public void setMapName(String mapName) { this.mapName = mapName; } @Override public Object getPartitionKey() { return partitionKey; } }
0true
hazelcast-client_src_test_java_com_hazelcast_client_executor_tasks_MapPutPartitionAwareRunnable.java
44
public class OSimpleImmutableEntry<K, V> implements Entry<K, V>, java.io.Serializable { private static final long serialVersionUID = 7138329143949025153L; private final K key; private final V value; /** * Creates an entry representing a mapping from the specified key to the specified value. * * @param key * the key represented by this entry * @param value * the value represented by this entry */ public OSimpleImmutableEntry(final K key, final V value) { this.key = key; this.value = value; } /** * Creates an entry representing the same mapping as the specified entry. * * @param entry * the entry to copy */ public OSimpleImmutableEntry(final Entry<? extends K, ? extends V> entry) { this.key = entry.getKey(); this.value = entry.getValue(); } /** * Returns the key corresponding to this entry. * * @return the key corresponding to this entry */ public K getKey() { return key; } /** * Returns the value corresponding to this entry. * * @return the value corresponding to this entry */ public V getValue() { return value; } /** * Replaces the value corresponding to this entry with the specified value (optional operation). This implementation simply throws * <tt>UnsupportedOperationException</tt>, as this class implements an <i>immutable</i> map entry. * * @param value * new value to be stored in this entry * @return (Does not return) * @throws UnsupportedOperationException * always */ public V setValue(V value) { throw new UnsupportedOperationException(); } /** * Compares the specified object with this entry for equality. Returns {@code true} if the given object is also a map entry and * the two entries represent the same mapping. More formally, two entries {@code e1} and {@code e2} represent the same mapping if * * <pre> * (e1.getKey() == null ? e2.getKey() == null : e1.getKey().equals(e2.getKey())) * &amp;&amp; (e1.getValue() == null ? e2.getValue() == null : e1.getValue().equals(e2.getValue())) * </pre> * * This ensures that the {@code equals} method works properly across different implementations of the {@code Map.Entry} interface. * * @param o * object to be compared for equality with this map entry * @return {@code true} if the specified object is equal to this map entry * @see #hashCode */ @Override public boolean equals(Object o) { if (!(o instanceof Map.Entry)) return false; Map.Entry<?, ?> e = (Map.Entry<?, ?>) o; return eq(key, e.getKey()) && eq(value, e.getValue()); } private boolean eq(final Object o1, final Object o2) { return o1 == null ? o2 == null : o1.equals(o2); } /** * Returns the hash code value for this map entry. The hash code of a map entry {@code e} is defined to be: * * <pre> * (e.getKey() == null ? 0 : e.getKey().hashCode()) &circ; (e.getValue() == null ? 0 : e.getValue().hashCode()) * </pre> * * This ensures that {@code e1.equals(e2)} implies that {@code e1.hashCode()==e2.hashCode()} for any two Entries {@code e1} and * {@code e2}, as required by the general contract of {@link Object#hashCode}. * * @return the hash code value for this map entry * @see #equals */ @Override public int hashCode() { return (key == null ? 0 : key.hashCode()) ^ (value == null ? 0 : value.hashCode()); } /** * Returns a String representation of this map entry. This implementation returns the string representation of this entry's key * followed by the equals character ("<tt>=</tt>") followed by the string representation of this entry's value. * * @return a String representation of this map entry */ @Override public String toString() { return key + "=" + value; } }
0true
commons_src_main_java_com_orientechnologies_common_collection_OSimpleImmutableEntry.java
36
public class StandaloneClusterClientIT { @Test public void canJoinWithExplicitInitialHosts() throws Exception { startAndAssertJoined( 5003, // Config file stringMap(), // Arguments stringMap( initial_hosts.name(), ":5001", server_id.name(), "3" ) ); } @Test public void willFailJoinIfIncorrectInitialHostsSet() throws Exception { assumeFalse( "Cannot kill processes on windows.", osIsWindows() ); startAndAssertJoined( SHOULD_NOT_JOIN, // Config file stringMap(), // Arguments stringMap( initial_hosts.name(), ":5011", server_id.name(), "3" ) ); } @Test public void canJoinWithInitialHostsInConfigFile() throws Exception { startAndAssertJoined( 5003, // Config file stringMap( initial_hosts.name(), ":5001" ), // Arguments stringMap( server_id.name(), "3" ) ); } @Test public void willFailJoinIfIncorrectInitialHostsSetInConfigFile() throws Exception { assumeFalse( "Cannot kill processes on windows.", osIsWindows() ); startAndAssertJoined( SHOULD_NOT_JOIN, // Config file stringMap( initial_hosts.name(), ":5011" ), // Arguments stringMap( server_id.name(), "3" ) ); } @Test public void canOverrideInitialHostsConfigFromConfigFile() throws Exception { startAndAssertJoined( 5003, // Config file stringMap( initial_hosts.name(), ":5011" ), // Arguments stringMap( initial_hosts.name(), ":5001", server_id.name(), "3" ) ); } @Test public void canSetSpecificPort() throws Exception { startAndAssertJoined( 5010, // Config file stringMap(), // Arguments stringMap( initial_hosts.name(), ":5001", server_id.name(), "3", cluster_server.name(), ":5010" ) ); } @Test public void usesPortRangeFromConfigFile() throws Exception { startAndAssertJoined( 5012, // Config file stringMap( initial_hosts.name(), ":5001", cluster_server.name(), ":5012-5020" ), // Arguments stringMap( server_id.name(), "3" ) ); } // === Everything else === private static Integer SHOULD_NOT_JOIN = null; @Rule public TestRule dumpPorts = new DumpPortListenerOnNettyBindFailure(); private final File directory = TargetDirectory.forTest( getClass() ).cleanDirectory( "temp" ); private LifeSupport life; private ClusterClient[] clients; @Before public void before() throws Exception { life = new LifeSupport(); life.start(); // So that the clients get started as they are added clients = new ClusterClient[2]; for ( int i = 1; i <= clients.length; i++ ) { Map<String, String> config = stringMap(); config.put( cluster_server.name(), ":" + (5000 + i) ); config.put( server_id.name(), "" + i ); config.put( initial_hosts.name(), ":5001" ); Logging logging = new DevNullLoggingService(); ObjectStreamFactory objectStreamFactory = new ObjectStreamFactory(); final ClusterClient client = new ClusterClient( adapt( new Config( config ) ), logging, new ServerIdElectionCredentialsProvider(), objectStreamFactory, objectStreamFactory ); final CountDownLatch latch = new CountDownLatch( 1 ); client.addClusterListener( new ClusterListener.Adapter() { @Override public void enteredCluster( ClusterConfiguration configuration ) { latch.countDown(); client.removeClusterListener( this ); } } ); clients[i - 1] = life.add( client ); assertTrue( "Didn't join the cluster", latch.await( 2, SECONDS ) ); } } @After public void after() throws Exception { life.shutdown(); } private File configFile( Map<String, String> config ) throws IOException { File directory = TargetDirectory.forTest( getClass() ).cleanDirectory( "temp" ); File dbConfigFile = new File( directory, "config-file" ); store( config, dbConfigFile ); File serverConfigFile = new File( directory, "server-file" ); store( stringMap( Configurator.DB_TUNING_PROPERTY_FILE_KEY, dbConfigFile.getAbsolutePath() ), serverConfigFile ); return serverConfigFile; } private void startAndAssertJoined( Integer expectedAssignedPort, Map<String, String> configInConfigFile, Map<String, String> config ) throws Exception { File configFile = configFile( configInConfigFile ); CountDownLatch latch = new CountDownLatch( 1 ); AtomicInteger port = new AtomicInteger(); clients[0].addClusterListener( joinAwaitingListener( latch, port ) ); boolean clientStarted = startStandaloneClusterClient( configFile, config, latch ); if ( expectedAssignedPort == null ) { assertFalse( format( "Should not be able to start cluster client given config file:%s " + "and arguments:%s", configInConfigFile, config ), clientStarted ); } else { assertTrue( format( "Should be able to start client client given config file:%s " + "and arguments:%s", configInConfigFile, config ), clientStarted ); assertEquals( expectedAssignedPort.intValue(), port.get() ); } } private Adapter joinAwaitingListener( final CountDownLatch latch, final AtomicInteger port ) { return new ClusterListener.Adapter() { @Override public void joinedCluster( InstanceId member, URI memberUri ) { port.set( memberUri.getPort() ); latch.countDown(); clients[0].removeClusterListener( this ); } }; } private boolean startStandaloneClusterClient( File configFile, Map<String, String> config, CountDownLatch latch ) throws Exception { Process process = null; ProcessStreamHandler handler = null; try { process = startStandaloneClusterClientProcess( configFile, config ); new InputStreamAwaiter( process.getInputStream() ).awaitLine( START_SIGNAL, 20, SECONDS ); handler = new ProcessStreamHandler( process, false, "", IGNORE_FAILURES ); handler.launch(); // Latch is triggered when this cluster client we just spawned joins the cluster, // or rather when the first client sees it as joined. If the latch awaiting times out it // (most likely) means that this cluster client couldn't be started. The reason for not // being able to start is assumed in this test to be that the specified port already is in use. return latch.await( 5, SECONDS ); } finally { if ( process != null ) { kill( process ); process.waitFor(); } if ( handler != null ) { handler.done(); } } } private Process startStandaloneClusterClientProcess( File configFile, Map<String, String> config ) throws Exception { List<String> args = new ArrayList<String>( asList( "java", "-cp", getProperty( "java.class.path" ), "-Dneo4j.home=" + directory.getAbsolutePath() ) ); if ( configFile != null ) { args.add( "-D" + Configurator.NEO_SERVER_CONFIG_FILE_KEY + "=" + configFile.getAbsolutePath() ); } args.add( StandaloneClusterClientTestProxy.class.getName() ); for ( Map.Entry<String, String> entry : config.entrySet() ) { args.add( "-" + entry.getKey() + "=" + entry.getValue() ); } return getRuntime().exec( args.toArray( new String[args.size()] ) ); } private static void kill( Process process ) throws NoSuchFieldException, IllegalAccessException, IOException, InterruptedException { if ( osIsWindows() ) { process.destroy(); } else { int pid = ((Number) accessible( process.getClass().getDeclaredField( "pid" ) ).get( process )).intValue(); new ProcessBuilder( "kill", "-9", "" + pid ).start().waitFor(); } } private static <T extends AccessibleObject> T accessible( T obj ) { obj.setAccessible( true ); return obj; } }
1no label
enterprise_server-enterprise_src_test_java_org_neo4j_server_enterprise_StandaloneClusterClientIT.java
4,133
public class IndexDynamicSettingsModule extends AbstractModule { private final DynamicSettings indexDynamicSettings; public IndexDynamicSettingsModule() { indexDynamicSettings = new DynamicSettings(); indexDynamicSettings.addDynamicSetting(AbstractIndexStore.INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC, Validator.BYTES_SIZE); indexDynamicSettings.addDynamicSetting(AbstractIndexStore.INDEX_STORE_THROTTLE_TYPE); indexDynamicSettings.addDynamicSetting(FilterAllocationDecider.INDEX_ROUTING_REQUIRE_GROUP + "*"); indexDynamicSettings.addDynamicSetting(FilterAllocationDecider.INDEX_ROUTING_INCLUDE_GROUP + "*"); indexDynamicSettings.addDynamicSetting(FilterAllocationDecider.INDEX_ROUTING_EXCLUDE_GROUP + "*"); indexDynamicSettings.addDynamicSetting(EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE); indexDynamicSettings.addDynamicSetting(DisableAllocationDecider.INDEX_ROUTING_ALLOCATION_DISABLE_ALLOCATION); indexDynamicSettings.addDynamicSetting(DisableAllocationDecider.INDEX_ROUTING_ALLOCATION_DISABLE_NEW_ALLOCATION); indexDynamicSettings.addDynamicSetting(DisableAllocationDecider.INDEX_ROUTING_ALLOCATION_DISABLE_REPLICA_ALLOCATION); indexDynamicSettings.addDynamicSetting(FsTranslog.INDEX_TRANSLOG_FS_TYPE); indexDynamicSettings.addDynamicSetting(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, Validator.NON_NEGATIVE_INTEGER); indexDynamicSettings.addDynamicSetting(IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS); indexDynamicSettings.addDynamicSetting(IndexMetaData.SETTING_READ_ONLY); indexDynamicSettings.addDynamicSetting(IndexMetaData.SETTING_BLOCKS_READ); indexDynamicSettings.addDynamicSetting(IndexMetaData.SETTING_BLOCKS_WRITE); indexDynamicSettings.addDynamicSetting(IndexMetaData.SETTING_BLOCKS_METADATA); indexDynamicSettings.addDynamicSetting(IndexShardGatewayService.INDEX_GATEWAY_SNAPSHOT_INTERVAL, Validator.TIME); indexDynamicSettings.addDynamicSetting(IndicesTTLService.INDEX_TTL_DISABLE_PURGE); indexDynamicSettings.addDynamicSetting(InternalIndexShard.INDEX_REFRESH_INTERVAL, Validator.TIME); indexDynamicSettings.addDynamicSetting(LocalGatewayAllocator.INDEX_RECOVERY_INITIAL_SHARDS); indexDynamicSettings.addDynamicSetting(LogByteSizeMergePolicyProvider.INDEX_MERGE_POLICY_MIN_MERGE_SIZE, Validator.BYTES_SIZE); indexDynamicSettings.addDynamicSetting(LogByteSizeMergePolicyProvider.INDEX_MERGE_POLICY_MAX_MERGE_SIZE, Validator.BYTES_SIZE); indexDynamicSettings.addDynamicSetting(LogByteSizeMergePolicyProvider.INDEX_MERGE_POLICY_MAX_MERGE_DOCS, Validator.POSITIVE_INTEGER); indexDynamicSettings.addDynamicSetting(LogByteSizeMergePolicyProvider.INDEX_MERGE_POLICY_MERGE_FACTOR, Validator.INTEGER_GTE_2); indexDynamicSettings.addDynamicSetting(LogByteSizeMergePolicyProvider.INDEX_COMPOUND_FORMAT); indexDynamicSettings.addDynamicSetting(LogDocMergePolicyProvider.INDEX_MERGE_POLICY_MIN_MERGE_DOCS, Validator.POSITIVE_INTEGER); indexDynamicSettings.addDynamicSetting(LogDocMergePolicyProvider.INDEX_MERGE_POLICY_MAX_MERGE_DOCS, Validator.POSITIVE_INTEGER); indexDynamicSettings.addDynamicSetting(LogDocMergePolicyProvider.INDEX_MERGE_POLICY_MERGE_FACTOR, Validator.INTEGER_GTE_2); indexDynamicSettings.addDynamicSetting(LogDocMergePolicyProvider.INDEX_COMPOUND_FORMAT); indexDynamicSettings.addDynamicSetting(InternalEngine.INDEX_INDEX_CONCURRENCY, Validator.NON_NEGATIVE_INTEGER); indexDynamicSettings.addDynamicSetting(InternalEngine.INDEX_COMPOUND_ON_FLUSH, Validator.BOOLEAN); indexDynamicSettings.addDynamicSetting(CodecService.INDEX_CODEC_BLOOM_LOAD, Validator.BOOLEAN); indexDynamicSettings.addDynamicSetting(InternalEngine.INDEX_GC_DELETES, Validator.TIME); indexDynamicSettings.addDynamicSetting(InternalEngine.INDEX_CODEC); indexDynamicSettings.addDynamicSetting(InternalEngine.INDEX_FAIL_ON_MERGE_FAILURE); indexDynamicSettings.addDynamicSetting(ShardSlowLogIndexingService.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_WARN, Validator.TIME); indexDynamicSettings.addDynamicSetting(ShardSlowLogIndexingService.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_INFO, Validator.TIME); indexDynamicSettings.addDynamicSetting(ShardSlowLogIndexingService.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_DEBUG, Validator.TIME); indexDynamicSettings.addDynamicSetting(ShardSlowLogIndexingService.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_TRACE, Validator.TIME); indexDynamicSettings.addDynamicSetting(ShardSlowLogIndexingService.INDEX_INDEXING_SLOWLOG_REFORMAT); indexDynamicSettings.addDynamicSetting(ShardSlowLogIndexingService.INDEX_INDEXING_SLOWLOG_LEVEL); indexDynamicSettings.addDynamicSetting(ShardSlowLogSearchService.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_WARN, Validator.TIME); indexDynamicSettings.addDynamicSetting(ShardSlowLogSearchService.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_INFO, Validator.TIME); indexDynamicSettings.addDynamicSetting(ShardSlowLogSearchService.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_DEBUG, Validator.TIME); indexDynamicSettings.addDynamicSetting(ShardSlowLogSearchService.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_TRACE, Validator.TIME); indexDynamicSettings.addDynamicSetting(ShardSlowLogSearchService.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_WARN, Validator.TIME); indexDynamicSettings.addDynamicSetting(ShardSlowLogSearchService.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_INFO, Validator.TIME); indexDynamicSettings.addDynamicSetting(ShardSlowLogSearchService.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_DEBUG, Validator.TIME); indexDynamicSettings.addDynamicSetting(ShardSlowLogSearchService.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_TRACE, Validator.TIME); indexDynamicSettings.addDynamicSetting(ShardSlowLogSearchService.INDEX_SEARCH_SLOWLOG_REFORMAT); indexDynamicSettings.addDynamicSetting(ShardSlowLogSearchService.INDEX_SEARCH_SLOWLOG_LEVEL); indexDynamicSettings.addDynamicSetting(ShardsLimitAllocationDecider.INDEX_TOTAL_SHARDS_PER_NODE, Validator.INTEGER); indexDynamicSettings.addDynamicSetting(TieredMergePolicyProvider.INDEX_MERGE_POLICY_EXPUNGE_DELETES_ALLOWED, Validator.DOUBLE); indexDynamicSettings.addDynamicSetting(TieredMergePolicyProvider.INDEX_MERGE_POLICY_FLOOR_SEGMENT, Validator.BYTES_SIZE); indexDynamicSettings.addDynamicSetting(TieredMergePolicyProvider.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE, Validator.INTEGER_GTE_2); indexDynamicSettings.addDynamicSetting(TieredMergePolicyProvider.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_EXPLICIT, Validator.INTEGER_GTE_2); indexDynamicSettings.addDynamicSetting(TieredMergePolicyProvider.INDEX_MERGE_POLICY_MAX_MERGED_SEGMENT, Validator.BYTES_SIZE); indexDynamicSettings.addDynamicSetting(TieredMergePolicyProvider.INDEX_MERGE_POLICY_SEGMENTS_PER_TIER, Validator.DOUBLE_GTE_2); indexDynamicSettings.addDynamicSetting(TieredMergePolicyProvider.INDEX_MERGE_POLICY_RECLAIM_DELETES_WEIGHT, Validator.NON_NEGATIVE_DOUBLE); indexDynamicSettings.addDynamicSetting(TieredMergePolicyProvider.INDEX_COMPOUND_FORMAT); indexDynamicSettings.addDynamicSetting(TranslogService.INDEX_TRANSLOG_FLUSH_INTERVAL, Validator.TIME); indexDynamicSettings.addDynamicSetting(TranslogService.INDEX_TRANSLOG_FLUSH_THRESHOLD_OPS, Validator.INTEGER); indexDynamicSettings.addDynamicSetting(TranslogService.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, Validator.BYTES_SIZE); indexDynamicSettings.addDynamicSetting(TranslogService.INDEX_TRANSLOG_FLUSH_THRESHOLD_PERIOD, Validator.TIME); indexDynamicSettings.addDynamicSetting(TranslogService.INDEX_TRANSLOG_DISABLE_FLUSH); indexDynamicSettings.addDynamicSetting(InternalIndicesWarmer.INDEX_WARMER_ENABLED); } public void addDynamicSettings(String... settings) { indexDynamicSettings.addDynamicSettings(settings); } public void addDynamicSetting(String setting, Validator validator) { indexDynamicSettings.addDynamicSetting(setting, validator); } @Override protected void configure() { bind(DynamicSettings.class).annotatedWith(IndexDynamicSettings.class).toInstance(indexDynamicSettings); } }
1no label
src_main_java_org_elasticsearch_index_settings_IndexDynamicSettingsModule.java
228
assertTrueEventually(new AssertTask() { public void run() throws Exception { assertEquals(1, map.size()); } });
0true
hazelcast-client_src_test_java_com_hazelcast_client_executor_ClientExecutorServiceExecuteTest.java
1,469
public class OSQLFunctionGremlin extends OSQLFunctionAbstract { public static final String NAME = "gremlin"; private List<Object> result; public OSQLFunctionGremlin() { super(NAME, 1, 1); } public Object execute(final OIdentifiable iCurrentRecord, Object iCurrentResult, final Object[] iParameters, final OCommandContext iContext) { if (!(iCurrentRecord instanceof ODocument)) // NOT DOCUMENT OR GRAPHDB? IGNORE IT return null; final ODatabaseDocumentTx db = OGremlinHelper.getGraphDatabase(ODatabaseRecordThreadLocal.INSTANCE.get()); if (result == null) result = new ArrayList<Object>(); @SuppressWarnings({ "rawtypes", "unchecked" }) final Object scriptResult = OGremlinHelper.execute(db, (String) iParameters[0], null, (Map) iContext.getVariables(), result, new OGremlinHelper.OGremlinCallback() { @Override public boolean call(ScriptEngine iEngine, OrientBaseGraph iGraph) { final ODocument document = (ODocument) iCurrentRecord; if (document.getSchemaClass() != null && document.getSchemaClass().isSubClassOf("E")) { // EDGE TYPE, CREATE THE BLUEPRINTS'S WRAPPER OrientEdge graphElement = (OrientEdge) new OrientElementIterable<OrientEdge>(iGraph, Arrays .asList(new ODocument[] { document })).iterator().next(); iEngine.getBindings(ScriptContext.ENGINE_SCOPE).put("current", graphElement); iEngine.getBindings(ScriptContext.ENGINE_SCOPE).put("it", graphElement); // FRAMES LIKE SYNTAX } else { // VERTEX TYPE, CREATE THE BLUEPRINTS'S WRAPPER OrientVertex graphElement = (OrientVertex) new OrientElementIterable<OrientVertex>(iGraph, Arrays .asList(new ODocument[] { document })).iterator().next(); iEngine.getBindings(ScriptContext.ENGINE_SCOPE).put("current", graphElement); iEngine.getBindings(ScriptContext.ENGINE_SCOPE).put("it", graphElement); // FRAMES LIKE SYNTAX } return true; } }, null); return scriptResult; } @Override public boolean aggregateResults() { return false; } public String getSyntax() { return "Syntax error: gremlin(<gremlin-expression>)"; } @Override public boolean filterResult() { return true; } @Override public Object getResult() { return result; } }
1no label
graphdb_src_main_java_com_orientechnologies_orient_graph_sql_functions_OSQLFunctionGremlin.java
3,211
constructors[REPL_CLEAR_MESSAGE] = new ConstructorFunction<Integer, IdentifiedDataSerializable>() { public IdentifiedDataSerializable createNew(Integer arg) { return new VectorClock(); } };
1no label
hazelcast_src_main_java_com_hazelcast_replicatedmap_operation_ReplicatedMapDataSerializerHook.java
1,369
public class JDTMethod implements MethodMirror, IBindingProvider { private WeakReference<MethodBinding> bindingRef; private Map<String, AnnotationMirror> annotations; private String name; private List<VariableMirror> parameters; private TypeMirror returnType; private List<TypeParameterMirror> typeParameters; Boolean isOverriding; private Boolean isOverloading; private JDTClass enclosingClass; private boolean isStatic; private boolean isPublic; private boolean isConstructor; private boolean isStaticInit; private boolean isAbstract; private boolean isFinal; private char[] bindingKey; private String readableName; private boolean isProtected; private boolean isDefaultAccess; private boolean isDeclaredVoid; private boolean isVariadic; private boolean isDefault; public JDTMethod(JDTClass enclosingClass, MethodBinding method) { this.enclosingClass = enclosingClass; bindingRef = new WeakReference<MethodBinding>(method); name = new String(method.selector); readableName = new String(method.readableName()); isStatic = method.isStatic(); isPublic = method.isPublic(); isConstructor = method.isConstructor(); isStaticInit = method.selector == TypeConstants.CLINIT; // TODO : check if it is right isAbstract = method.isAbstract(); isFinal = method.isFinal(); isProtected = method.isProtected(); isDefaultAccess = method.isDefault(); isDeclaredVoid = method.returnType.id == TypeIds.T_void; isVariadic = method.isVarargs(); isDefault = method.getDefaultValue()!=null; bindingKey = method.computeUniqueKey(); if (method instanceof ProblemMethodBinding) { annotations = new HashMap<>(); parameters = Collections.emptyList(); returnType = JDTType.UNKNOWN_TYPE; typeParameters = Collections.emptyList(); isOverriding = false; isOverloading = false; } } @Override public AnnotationMirror getAnnotation(String type) { if (annotations == null) { doWithBindings(new ActionOnMethodBinding() { @Override public void doWithBinding(IType declaringClassModel, ReferenceBinding declaringClass, MethodBinding method) { annotations = JDTUtils.getAnnotations(method.getAnnotations()); } }); } return annotations.get(type); } @Override public String getName() { return name; } @Override public boolean isStatic() { return isStatic; } @Override public boolean isPublic() { return isPublic; } @Override public boolean isConstructor() { return isConstructor; } @Override public boolean isStaticInit() { return isStaticInit; } @Override public List<VariableMirror> getParameters() { if (parameters == null) { doWithBindings(new ActionOnMethodBinding() { private String toParameterName(TypeBinding parameterType) { String typeName = new String(parameterType.sourceName()); StringTokenizer tokens = new StringTokenizer(typeName, "$.[]"); String result = null; while (tokens.hasMoreTokens()) { result = tokens.nextToken(); } if (typeName.endsWith("[]")) { result = result + "Array"; } return toLowerCase(result.charAt(0)) + result.substring(1); } @Override public void doWithBinding(IType declaringClassModel, ReferenceBinding declaringClassBinding, MethodBinding methodBinding) { TypeBinding[] parameterBindings; AnnotationBinding[][] parameterAnnotationBindings; parameterBindings = ((MethodBinding)methodBinding).parameters; parameterAnnotationBindings = ((MethodBinding)methodBinding).getParameterAnnotations(); if (parameterAnnotationBindings == null) { parameterAnnotationBindings = new AnnotationBinding[parameterBindings.length][]; for (int i=0; i<parameterAnnotationBindings.length; i++) { parameterAnnotationBindings[i] = new AnnotationBinding[0]; } } parameters = new ArrayList<VariableMirror>(parameterBindings.length); List<String> givenNames = new ArrayList<>(parameterBindings.length); for(int i=0;i<parameterBindings.length;i++) { Map<String, AnnotationMirror> parameterAnnotations = JDTUtils.getAnnotations(parameterAnnotationBindings[i]); String parameterName; AnnotationMirror nameAnnotation = getAnnotation(Name.class.getName()); TypeBinding parameterTypeBinding = parameterBindings[i]; if(nameAnnotation != null) { parameterName = (String) nameAnnotation.getValue(); } else { String baseName = toParameterName(parameterTypeBinding); int count = 0; String nameToReturn = baseName; for (String givenName : givenNames) { if (givenName.equals(nameToReturn)) { count ++; nameToReturn = baseName + Integer.toString(count); } } parameterName = nameToReturn; } givenNames.add(parameterName); parameters.add(new JDTVariable(parameterName, new JDTType(parameterTypeBinding), parameterAnnotations)); } } }); } return parameters; } @Override public boolean isAbstract() { return isAbstract; } @Override public boolean isFinal() { return isFinal; } @Override public TypeMirror getReturnType() { if (returnType == null) { doWithBindings(new ActionOnMethodBinding() { @Override public void doWithBinding(IType declaringClassModel, ReferenceBinding declaringClassBinding, MethodBinding methodBinding) { returnType = new JDTType(methodBinding.returnType); } }); } return returnType; } @Override public List<TypeParameterMirror> getTypeParameters() { if (typeParameters == null) { doWithBindings(new ActionOnMethodBinding() { @Override public void doWithBinding(IType declaringClassModel, ReferenceBinding declaringClassBinding, MethodBinding methodBinding) { TypeVariableBinding[] jdtTypeParameters = methodBinding.typeVariables(); typeParameters = new ArrayList<TypeParameterMirror>(jdtTypeParameters.length); for(TypeVariableBinding jdtTypeParameter : jdtTypeParameters) typeParameters.add(new JDTTypeParameter(jdtTypeParameter)); } }); } return typeParameters; } public boolean isOverridingMethod() { if (isOverriding == null) { isOverriding = false; doWithBindings(new ActionOnMethodBinding() { @Override public void doWithBinding(IType declaringClassModel, ReferenceBinding declaringClass, MethodBinding method) { if (CharOperation.equals(declaringClass.readableName(), "ceylon.language.Identifiable".toCharArray())) { if ("equals".equals(name) || "hashCode".equals(name)) { isOverriding = true; return; } } if (CharOperation.equals(declaringClass.readableName(), "ceylon.language.Object".toCharArray())) { if ("equals".equals(name) || "hashCode".equals(name) || "toString".equals(name)) { isOverriding = false; return; } } // try the superclass first if (isDefinedInSuperClasses(declaringClass, method)) { isOverriding = true; } if (isDefinedInSuperInterfaces(declaringClass, method)) { isOverriding = true; } } }); } return isOverriding.booleanValue(); } private void doWithBindings(final ActionOnMethodBinding action) { final IType declaringClassModel = enclosingClass.getType(); if (!JDTModelLoader.doWithMethodBinding(declaringClassModel, bindingRef.get(), action)) { JDTModelLoader.doWithResolvedType(declaringClassModel, new JDTModelLoader.ActionOnResolvedType() { @Override public void doWithBinding(ReferenceBinding declaringClass) { MethodBinding method = null; for (MethodBinding m : declaringClass.methods()) { if (CharOperation.equals(m.computeUniqueKey(), bindingKey)) { method = m; break; } } if (method == null) { throw new ModelResolutionException("Method '" + readableName + "' not found in the binding of class '" + declaringClassModel.getFullyQualifiedName() + "'"); } bindingRef = new WeakReference<MethodBinding>(method); action.doWithBinding(declaringClassModel, declaringClass, method); } }); } } public boolean isOverloadingMethod() { if (isOverloading == null) { isOverloading = Boolean.FALSE; doWithBindings(new ActionOnMethodBinding() { @Override public void doWithBinding(IType declaringClassModel, ReferenceBinding declaringClass, MethodBinding method) { // Exception has a pretend supertype of Object, unlike its Java supertype of java.lang.RuntimeException // so we stop there for it, especially since it does not have any overloading if(CharOperation.equals(declaringClass.qualifiedSourceName(), "ceylon.language.Exception".toCharArray())) { isOverloading = false; return; } // try the superclass first if (isOverloadingInSuperClasses(declaringClass, method)) { isOverloading = Boolean.TRUE; } if (isOverloadingInSuperInterfaces(declaringClass, method)) { isOverloading = Boolean.TRUE; } } }); } return isOverloading.booleanValue(); } private boolean ignoreMethodInAncestorSearch(MethodBinding methodBinding) { String name = CharOperation.charToString(methodBinding.selector); if(name.equals("finalize") || name.equals("clone")){ if(methodBinding.declaringClass != null && CharOperation.toString(methodBinding.declaringClass.compoundName).equals("java.lang.Object")) { return true; } } // skip ignored methods too if(JDTUtils.hasAnnotation(methodBinding, AbstractModelLoader.CEYLON_IGNORE_ANNOTATION)) { return true; } return false; } private boolean isDefinedInType(ReferenceBinding superClass, MethodBinding method) { MethodVerifier methodVerifier = superClass.getPackage().environment.methodVerifier(); for (MethodBinding inheritedMethod : superClass.methods()) { // skip ignored methods if(ignoreMethodInAncestorSearch(inheritedMethod)) { continue; } if (methodVerifier.doesMethodOverride(method, inheritedMethod)) { return true; } } return false; } private boolean isOverloadingInType(ReferenceBinding superClass, MethodBinding method) { MethodVerifier methodVerifier = superClass.getPackage().environment.methodVerifier(); for (MethodBinding inheritedMethod : superClass.methods()) { if(inheritedMethod.isPrivate() || inheritedMethod.isStatic() || inheritedMethod.isConstructor() || inheritedMethod.isBridge() || inheritedMethod.isSynthetic() || !Arrays.equals(inheritedMethod.constantPoolName(), method.selector)) continue; // skip ignored methods if(ignoreMethodInAncestorSearch(inheritedMethod)) { continue; } // if it does not override it and has the same name, it's overloading if (!methodVerifier.doesMethodOverride(method, inheritedMethod)) { return true; } } return false; } boolean isDefinedInSuperClasses(ReferenceBinding declaringClass, MethodBinding method) { ReferenceBinding superClass = declaringClass.superclass(); if (superClass == null) { return false; } superClass = JDTUtils.inferTypeParametersFromSuperClass(declaringClass, superClass); if (isDefinedInType(superClass, method)) { return true; } return isDefinedInSuperClasses(superClass, method); } boolean isDefinedInSuperInterfaces(ReferenceBinding declaringType, MethodBinding method) { ReferenceBinding[] superInterfaces = declaringType.superInterfaces(); if (superInterfaces == null) { return false; } for (ReferenceBinding superInterface : superInterfaces) { if (isDefinedInType(superInterface, method)) { return true; } if (isDefinedInSuperInterfaces(superInterface, method)) { return true; } } return false; } boolean isOverloadingInSuperClasses(ReferenceBinding declaringClass, MethodBinding method) { ReferenceBinding superClass = declaringClass.superclass(); if (superClass == null) { return false; } // Exception has a pretend supertype of Object, unlike its Java supertype of java.lang.RuntimeException // so we stop there for it, especially since it does not have any overloading if(CharOperation.equals(superClass.qualifiedSourceName(), "ceylon.language.Exception".toCharArray())) return false; superClass = JDTUtils.inferTypeParametersFromSuperClass(declaringClass, superClass); if (isOverloadingInType(superClass, method)) { return true; } return isOverloadingInSuperClasses(superClass, method); } boolean isOverloadingInSuperInterfaces(ReferenceBinding declaringType, MethodBinding method) { ReferenceBinding[] superInterfaces = declaringType.superInterfaces(); if (superInterfaces == null) { return false; } for (ReferenceBinding superInterface : superInterfaces) { if (isOverloadingInType(superInterface, method)) { return true; } if (isOverloadingInSuperInterfaces(superInterface, method)) { return true; } } return false; } @Override public boolean isProtected() { return isProtected; } @Override public boolean isDefaultAccess() { return isDefaultAccess; } @Override public boolean isDeclaredVoid() { return isDeclaredVoid; } @Override public boolean isVariadic() { return isVariadic; } @Override public boolean isDefault() { return isDefault; } @Override public char[] getBindingKey() { return bindingKey; } @Override public ClassMirror getEnclosingClass() { return enclosingClass; } }
1no label
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_core_model_mirror_JDTMethod.java
194
public static class Presentation { public static class Tab { public static class Name { public static final String Audit = "Auditable_Tab"; } public static class Order { public static final int Audit = 99000; } } public static class Group { public static class Name { public static final String Audit = "Auditable_Audit"; } public static class Order { public static final int Audit = 1000; } } }
0true
common_src_main_java_org_broadleafcommerce_common_audit_Auditable.java
34
public abstract class Adapter implements ClusterListener { @Override public void enteredCluster( ClusterConfiguration clusterConfiguration ) { } @Override public void joinedCluster( InstanceId instanceId, URI member ) { } @Override public void leftCluster( InstanceId instanceId ) { } @Override public void leftCluster() { } @Override public void elected( String role, InstanceId instanceId, URI electedMember ) { } @Override public void unelected( String role, InstanceId instanceId, URI electedMember ) { } }
1no label
enterprise_cluster_src_main_java_org_neo4j_cluster_protocol_cluster_ClusterListener.java
1,383
public static class Builder { private static final Set<String> VALID_FIELDS = Sets.newHashSet("template", "order", "mappings", "settings"); static { VALID_FIELDS.addAll(IndexMetaData.customFactories.keySet()); } private String name; private int order; private String template; private Settings settings = ImmutableSettings.Builder.EMPTY_SETTINGS; private final ImmutableOpenMap.Builder<String, CompressedString> mappings; private final ImmutableOpenMap.Builder<String, IndexMetaData.Custom> customs; public Builder(String name) { this.name = name; mappings = ImmutableOpenMap.builder(); customs = ImmutableOpenMap.builder(); } public Builder(IndexTemplateMetaData indexTemplateMetaData) { this.name = indexTemplateMetaData.name(); order(indexTemplateMetaData.order()); template(indexTemplateMetaData.template()); settings(indexTemplateMetaData.settings()); mappings = ImmutableOpenMap.builder(indexTemplateMetaData.mappings()); customs = ImmutableOpenMap.builder(indexTemplateMetaData.customs()); } public Builder order(int order) { this.order = order; return this; } public Builder template(String template) { this.template = template; return this; } public String template() { return template; } public Builder settings(Settings.Builder settings) { this.settings = settings.build(); return this; } public Builder settings(Settings settings) { this.settings = settings; return this; } public Builder removeMapping(String mappingType) { mappings.remove(mappingType); return this; } public Builder putMapping(String mappingType, CompressedString mappingSource) throws IOException { mappings.put(mappingType, mappingSource); return this; } public Builder putMapping(String mappingType, String mappingSource) throws IOException { mappings.put(mappingType, new CompressedString(mappingSource)); return this; } public Builder putCustom(String type, IndexMetaData.Custom customIndexMetaData) { this.customs.put(type, customIndexMetaData); return this; } public Builder removeCustom(String type) { this.customs.remove(type); return this; } public IndexMetaData.Custom getCustom(String type) { return this.customs.get(type); } public IndexTemplateMetaData build() { return new IndexTemplateMetaData(name, order, template, settings, mappings.build(), customs.build()); } public static void toXContent(IndexTemplateMetaData indexTemplateMetaData, XContentBuilder builder, ToXContent.Params params) throws IOException { builder.startObject(indexTemplateMetaData.name(), XContentBuilder.FieldCaseConversion.NONE); builder.field("order", indexTemplateMetaData.order()); builder.field("template", indexTemplateMetaData.template()); builder.startObject("settings"); for (Map.Entry<String, String> entry : indexTemplateMetaData.settings().getAsMap().entrySet()) { builder.field(entry.getKey(), entry.getValue()); } builder.endObject(); if (params.paramAsBoolean("reduce_mappings", false)) { builder.startObject("mappings"); for (ObjectObjectCursor<String, CompressedString> cursor : indexTemplateMetaData.mappings()) { byte[] mappingSource = cursor.value.uncompressed(); XContentParser parser = XContentFactory.xContent(mappingSource).createParser(mappingSource); Map<String, Object> mapping = parser.map(); if (mapping.size() == 1 && mapping.containsKey(cursor.key)) { // the type name is the root value, reduce it mapping = (Map<String, Object>) mapping.get(cursor.key); } builder.field(cursor.key); builder.map(mapping); } builder.endObject(); } else { builder.startArray("mappings"); for (ObjectObjectCursor<String, CompressedString> cursor : indexTemplateMetaData.mappings()) { byte[] data = cursor.value.uncompressed(); XContentParser parser = XContentFactory.xContent(data).createParser(data); Map<String, Object> mapping = parser.mapOrderedAndClose(); builder.map(mapping); } builder.endArray(); } for (ObjectObjectCursor<String, IndexMetaData.Custom> cursor : indexTemplateMetaData.customs()) { builder.startObject(cursor.key, XContentBuilder.FieldCaseConversion.NONE); IndexMetaData.lookupFactorySafe(cursor.key).toXContent(cursor.value, builder, params); builder.endObject(); } builder.endObject(); } public static IndexTemplateMetaData fromXContentStandalone(XContentParser parser) throws IOException { XContentParser.Token token = parser.nextToken(); if (token == null) { throw new IOException("no data"); } if (token != XContentParser.Token.START_OBJECT) { throw new IOException("should start object"); } token = parser.nextToken(); if (token != XContentParser.Token.FIELD_NAME) { throw new IOException("the first field should be the template name"); } return fromXContent(parser); } public static IndexTemplateMetaData fromXContent(XContentParser parser) throws IOException { Builder builder = new Builder(parser.currentName()); String currentFieldName = skipTemplateName(parser); XContentParser.Token token; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_OBJECT) { if ("settings".equals(currentFieldName)) { ImmutableSettings.Builder templateSettingsBuilder = ImmutableSettings.settingsBuilder(); for (Map.Entry<String, String> entry : SettingsLoader.Helper.loadNestedFromMap(parser.mapOrdered()).entrySet()) { if (!entry.getKey().startsWith("index.")) { templateSettingsBuilder.put("index." + entry.getKey(), entry.getValue()); } else { templateSettingsBuilder.put(entry.getKey(), entry.getValue()); } } builder.settings(templateSettingsBuilder.build()); } else if ("mappings".equals(currentFieldName)) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_OBJECT) { String mappingType = currentFieldName; Map<String, Object> mappingSource = MapBuilder.<String, Object>newMapBuilder().put(mappingType, parser.mapOrdered()).map(); builder.putMapping(mappingType, XContentFactory.jsonBuilder().map(mappingSource).string()); } } } else { // check if its a custom index metadata IndexMetaData.Custom.Factory<IndexMetaData.Custom> factory = IndexMetaData.lookupFactory(currentFieldName); if (factory == null) { //TODO warn parser.skipChildren(); } else { builder.putCustom(factory.type(), factory.fromXContent(parser)); } } } else if (token == XContentParser.Token.START_ARRAY) { if ("mappings".equals(currentFieldName)) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { Map<String, Object> mapping = parser.mapOrdered(); if (mapping.size() == 1) { String mappingType = mapping.keySet().iterator().next(); String mappingSource = XContentFactory.jsonBuilder().map(mapping).string(); if (mappingSource == null) { // crap, no mapping source, warn? } else { builder.putMapping(mappingType, mappingSource); } } } } } else if (token.isValue()) { if ("template".equals(currentFieldName)) { builder.template(parser.text()); } else if ("order".equals(currentFieldName)) { builder.order(parser.intValue()); } } } return builder.build(); } private static String skipTemplateName(XContentParser parser) throws IOException { XContentParser.Token token = parser.nextToken(); if (token != null && token == XContentParser.Token.START_OBJECT) { token = parser.nextToken(); if (token == XContentParser.Token.FIELD_NAME) { String currentFieldName = parser.currentName(); if (VALID_FIELDS.contains(currentFieldName)) { return currentFieldName; } else { // we just hit the template name, which should be ignored and we move on parser.nextToken(); } } } return null; } public static IndexTemplateMetaData readFrom(StreamInput in) throws IOException { Builder builder = new Builder(in.readString()); builder.order(in.readInt()); builder.template(in.readString()); builder.settings(ImmutableSettings.readSettingsFromStream(in)); int mappingsSize = in.readVInt(); for (int i = 0; i < mappingsSize; i++) { builder.putMapping(in.readString(), CompressedString.readCompressedString(in)); } int customSize = in.readVInt(); for (int i = 0; i < customSize; i++) { String type = in.readString(); IndexMetaData.Custom customIndexMetaData = IndexMetaData.lookupFactorySafe(type).readFrom(in); builder.putCustom(type, customIndexMetaData); } return builder.build(); } public static void writeTo(IndexTemplateMetaData indexTemplateMetaData, StreamOutput out) throws IOException { out.writeString(indexTemplateMetaData.name()); out.writeInt(indexTemplateMetaData.order()); out.writeString(indexTemplateMetaData.template()); ImmutableSettings.writeSettingsToStream(indexTemplateMetaData.settings(), out); out.writeVInt(indexTemplateMetaData.mappings().size()); for (ObjectObjectCursor<String, CompressedString> cursor : indexTemplateMetaData.mappings()) { out.writeString(cursor.key); cursor.value.writeTo(out); } out.writeVInt(indexTemplateMetaData.customs().size()); for (ObjectObjectCursor<String, IndexMetaData.Custom> cursor : indexTemplateMetaData.customs()) { out.writeString(cursor.key); IndexMetaData.lookupFactorySafe(cursor.key).writeTo(cursor.value, out); } } }
1no label
src_main_java_org_elasticsearch_cluster_metadata_IndexTemplateMetaData.java
1,772
@Component("blBasicPersistenceModule") @Scope("prototype") public class BasicPersistenceModule implements PersistenceModule, RecordHelper, ApplicationContextAware { private static final Log LOG = LogFactory.getLog(BasicPersistenceModule.class); public static final String MAIN_ENTITY_NAME_PROPERTY = "MAIN_ENTITY_NAME"; public static final String ALTERNATE_ID_PROPERTY = "ALTERNATE_ID"; protected ApplicationContext applicationContext; protected PersistenceManager persistenceManager; @Resource(name = "blEntityValidatorService") protected EntityValidatorService entityValidatorService; @Resource(name="blPersistenceProviders") protected List<FieldPersistenceProvider> fieldPersistenceProviders = new ArrayList<FieldPersistenceProvider>(); @Resource(name="blPopulateValueRequestValidators") protected List<PopulateValueRequestValidator> populateValidators; @Resource(name= "blDefaultFieldPersistenceProvider") protected FieldPersistenceProvider defaultFieldPersistenceProvider; @Resource(name="blCriteriaTranslator") protected CriteriaTranslator criteriaTranslator; @Resource(name="blRestrictionFactory") protected RestrictionFactory restrictionFactory; @Override public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { this.applicationContext = applicationContext; } @Override public boolean isCompatible(OperationType operationType) { return OperationType.BASIC == operationType || OperationType.NONDESTRUCTIVEREMOVE == operationType; } @Override public FieldManager getFieldManager() { return persistenceManager.getDynamicEntityDao().getFieldManager(); } @Override public DecimalFormat getDecimalFormatter() { BroadleafRequestContext brc = BroadleafRequestContext.getBroadleafRequestContext(); Locale locale = brc.getJavaLocale(); DecimalFormat format = (DecimalFormat) NumberFormat.getInstance(locale); format.applyPattern("0.########"); return format; } @Override public SimpleDateFormat getSimpleDateFormatter() { return FormatUtil.getDateFormat(); } protected Map<String, FieldMetadata> filterOutCollectionMetadata(Map<String, FieldMetadata> metadata) { if (metadata == null) { return null; } Map<String, FieldMetadata> newMap = new HashMap<String, FieldMetadata>(); for (Map.Entry<String, FieldMetadata> entry : metadata.entrySet()) { if (entry.getValue() instanceof BasicFieldMetadata) { newMap.put(entry.getKey(), entry.getValue()); } } return newMap; } protected Class<?> getBasicBroadleafType(SupportedFieldType fieldType) { Class<?> response; switch (fieldType) { case BOOLEAN: response = Boolean.TYPE; break; case DATE: response = Date.class; break; case DECIMAL: response = BigDecimal.class; break; case MONEY: response = Money.class; break; case INTEGER: response = Integer.TYPE; break; case UNKNOWN: response = null; break; default: response = String.class; break; } return response; } @Override public Serializable createPopulatedInstance(Serializable instance, Entity entity, Map<String, FieldMetadata> unfilteredProperties, Boolean setId) throws ValidationException { return createPopulatedInstance(instance, entity, unfilteredProperties, setId, true); } @Override public Serializable createPopulatedInstance(Serializable instance, Entity entity, Map<String, FieldMetadata> unfilteredProperties, Boolean setId, Boolean validateUnsubmittedProperties) throws ValidationException { Map<String, FieldMetadata> mergedProperties = filterOutCollectionMetadata(unfilteredProperties); FieldManager fieldManager = getFieldManager(); boolean handled = false; for (FieldPersistenceProvider fieldPersistenceProvider : fieldPersistenceProviders) { FieldProviderResponse response = fieldPersistenceProvider.filterProperties(new AddFilterPropertiesRequest(entity), unfilteredProperties); if (FieldProviderResponse.NOT_HANDLED != response) { handled = true; } if (FieldProviderResponse.HANDLED_BREAK == response) { break; } } if (!handled) { defaultFieldPersistenceProvider.filterProperties(new AddFilterPropertiesRequest(entity), unfilteredProperties); } try { for (Property property : entity.getProperties()) { BasicFieldMetadata metadata = (BasicFieldMetadata) mergedProperties.get(property.getName()); Class<?> returnType; if (!property.getName().contains(FieldManager.MAPFIELDSEPARATOR)) { Field field = fieldManager.getField(instance.getClass(), property.getName()); if (field == null) { LOG.debug("Unable to find a bean property for the reported property: " + property.getName() + ". Ignoring property."); continue; } returnType = field.getType(); } else { if (metadata == null) { LOG.debug("Unable to find a metadata property for the reported property: " + property.getName() + ". Ignoring property."); continue; } returnType = getMapFieldType(instance, fieldManager, property); if (returnType == null) { returnType = getBasicBroadleafType(metadata.getFieldType()); } } if (returnType == null) { throw new IllegalAccessException("Unable to determine the value type for the property ("+property.getName()+")"); } String value = property.getValue(); if (metadata != null) { Boolean mutable = metadata.getMutable(); Boolean readOnly = metadata.getReadOnly(); if (metadata.getFieldType().equals(SupportedFieldType.BOOLEAN)) { if (value == null) { value = "false"; } } if ((mutable == null || mutable) && (readOnly == null || !readOnly)) { if (value != null) { handled = false; PopulateValueRequest request = new PopulateValueRequest(setId, fieldManager, property, metadata, returnType, value, persistenceManager, this); boolean attemptToPopulate = true; for (PopulateValueRequestValidator validator : populateValidators) { PropertyValidationResult validationResult = validator.validate(request, instance); if (!validationResult.isValid()) { entity.addValidationError(property.getName(), validationResult.getErrorMessage()); attemptToPopulate = false; } } if (attemptToPopulate) { for (FieldPersistenceProvider fieldPersistenceProvider : fieldPersistenceProviders) { FieldProviderResponse response = fieldPersistenceProvider.populateValue(request, instance); if (FieldProviderResponse.NOT_HANDLED != response) { handled = true; } if (FieldProviderResponse.HANDLED_BREAK == response) { break; } } if (!handled) { defaultFieldPersistenceProvider.populateValue(new PopulateValueRequest(setId, fieldManager, property, metadata, returnType, value, persistenceManager, this), instance); } } } else { try { if (fieldManager.getFieldValue(instance, property.getName()) != null && (metadata.getFieldType() != SupportedFieldType.ID || setId) && metadata.getFieldType() != SupportedFieldType.PASSWORD) { fieldManager.setFieldValue(instance, property.getName(), null); } } catch (FieldNotAvailableException e) { throw new IllegalArgumentException(e); } } } } } validate(entity, instance, mergedProperties, validateUnsubmittedProperties); //if validation failed, refresh the current instance so that none of the changes will be persisted if (entity.isValidationFailure()) { //only refresh the instance if it was managed to begin with if (persistenceManager.getDynamicEntityDao().getStandardEntityManager().contains(instance)) { persistenceManager.getDynamicEntityDao().refresh(instance); } //re-initialize the valid properties for the entity in order to deal with the potential of not //completely sending over all checkbox/radio fields List<Serializable> entityList = new ArrayList<Serializable>(1); entityList.add(instance); Entity invalid = getRecords(mergedProperties, entityList, null, null)[0]; invalid.setValidationErrors(entity.getValidationErrors()); invalid.overridePropertyValues(entity); throw new ValidationException(invalid, "The entity has failed validation"); } else { fieldManager.persistMiddleEntities(); } } catch (IllegalAccessException e) { throw new PersistenceException(e); } catch (InstantiationException e) { throw new PersistenceException(e); } return instance; } protected Class<?> getMapFieldType(Serializable instance, FieldManager fieldManager, Property property) { Class<?> returnType = null; Field field = fieldManager.getField(instance.getClass(), property.getName().substring(0, property.getName().indexOf(FieldManager.MAPFIELDSEPARATOR))); java.lang.reflect.Type type = field.getGenericType(); if (type instanceof ParameterizedType) { ParameterizedType pType = (ParameterizedType) type; Class<?> clazz = (Class<?>) pType.getActualTypeArguments()[1]; Class<?>[] entities = persistenceManager.getDynamicEntityDao().getAllPolymorphicEntitiesFromCeiling(clazz); if (!ArrayUtils.isEmpty(entities)) { returnType = entities[entities.length-1]; } } return returnType; } @Override public Entity getRecord(Map<String, FieldMetadata> primaryMergedProperties, Serializable record, Map<String, FieldMetadata> alternateMergedProperties, String pathToTargetObject) { List<Serializable> records = new ArrayList<Serializable>(1); records.add(record); Entity[] productEntities = getRecords(primaryMergedProperties, records, alternateMergedProperties, pathToTargetObject); return productEntities[0]; } @Override public Entity getRecord(Class<?> ceilingEntityClass, PersistencePerspective persistencePerspective, Serializable record) { Map<String, FieldMetadata> mergedProperties = getSimpleMergedProperties(ceilingEntityClass.getName(), persistencePerspective); return getRecord(mergedProperties, record, null, null); } @Override public Entity[] getRecords(Class<?> ceilingEntityClass, PersistencePerspective persistencePerspective, List<? extends Serializable> records) { Map<String, FieldMetadata> mergedProperties = getSimpleMergedProperties(ceilingEntityClass.getName(), persistencePerspective); return getRecords(mergedProperties, records, null, null); } @Override public Map<String, FieldMetadata> getSimpleMergedProperties(String entityName, PersistencePerspective persistencePerspective) { return persistenceManager.getDynamicEntityDao().getSimpleMergedProperties(entityName, persistencePerspective); } @Override public Entity[] getRecords(Map<String, FieldMetadata> primaryMergedProperties, List<? extends Serializable> records) { return getRecords(primaryMergedProperties, records, null, null); } @Override public Entity[] getRecords(Map<String, FieldMetadata> primaryUnfilteredMergedProperties, List<? extends Serializable> records, Map<String, FieldMetadata> alternateUnfilteredMergedProperties, String pathToTargetObject) { Map<String, FieldMetadata> primaryMergedProperties = filterOutCollectionMetadata(primaryUnfilteredMergedProperties); Map<String, FieldMetadata> alternateMergedProperties = filterOutCollectionMetadata(alternateUnfilteredMergedProperties); Entity[] entities = new Entity[records.size()]; int j = 0; for (Serializable recordEntity : records) { Serializable entity; if (pathToTargetObject != null) { try { entity = (Serializable) getFieldManager().getFieldValue(recordEntity, pathToTargetObject); } catch (Exception e) { throw new PersistenceException(e); } } else { entity = recordEntity; } Entity entityItem = new Entity(); entityItem.setType(new String[]{entity.getClass().getName()}); entities[j] = entityItem; List<Property> props = new ArrayList<Property>(primaryMergedProperties.size()); extractPropertiesFromPersistentEntity(primaryMergedProperties, entity, props); if (alternateMergedProperties != null) { extractPropertiesFromPersistentEntity(alternateMergedProperties, recordEntity, props); } // Try to add the "main name" property. Log a debug message if we can't try { Property p = new Property(); p.setName(MAIN_ENTITY_NAME_PROPERTY); String mainEntityName = (String) MethodUtils.invokeMethod(entity, "getMainEntityName"); p.setValue(mainEntityName); props.add(p); } catch (Exception e) { LOG.debug(String.format("Could not execute the getMainEntityName() method for [%s]", entity.getClass().getName()), e); } // Try to add the alternate id property if available if (alternateMergedProperties != null) { for (Entry<String, FieldMetadata> entry : alternateMergedProperties.entrySet()) { if (entry.getValue() instanceof BasicFieldMetadata) { if (((BasicFieldMetadata) entry.getValue()).getFieldType() == SupportedFieldType.ID) { Map<String, FieldMetadata> alternateOnEntity = new HashMap<String, FieldMetadata>(); alternateOnEntity.put(entry.getKey(), entry.getValue()); List<Property> props2 = new ArrayList<Property>(); extractPropertiesFromPersistentEntity(alternateOnEntity, recordEntity, props2); if (props2.size() == 1) { Property alternateIdProp = props2.get(0); alternateIdProp.setName(ALTERNATE_ID_PROPERTY); props.add(alternateIdProp); } } } } } Property[] properties = new Property[props.size()]; properties = props.toArray(properties); entityItem.setProperties(properties); j++; } return entities; } protected void extractPropertiesFromPersistentEntity(Map<String, FieldMetadata> mergedProperties, Serializable entity, List<Property> props) { FieldManager fieldManager = getFieldManager(); try { if (entity instanceof AdminMainEntity) { //Create an invisible property for the admin main entity name, if applicable. //This is useful for ToOneLookups if that ToOneLookup uses AdminMainEntity to drive //its display name. try { Property propertyItem = new Property(); propertyItem.setName(AdminMainEntity.MAIN_ENTITY_NAME_PROPERTY); propertyItem.setValue(((AdminMainEntity) entity).getMainEntityName()); props.add(propertyItem); } catch (Exception e) { //do nothing here except for not add the property. Exceptions could occur when there is a validation //issue and some properties/relationships that are used for gleaning the main entity name end up //not being set } } for (Entry<String, FieldMetadata> entry : mergedProperties.entrySet()) { String property = entry.getKey(); BasicFieldMetadata metadata = (BasicFieldMetadata) entry.getValue(); if (Class.forName(metadata.getInheritedFromType()).isAssignableFrom(entity.getClass()) || entity.getClass().isAssignableFrom(Class.forName(metadata.getInheritedFromType()))) { boolean proceed = true; if (property.contains(".")) { StringTokenizer tokens = new StringTokenizer(property, "."); Object testObject = entity; while (tokens.hasMoreTokens()) { String token = tokens.nextToken(); if (tokens.hasMoreTokens()) { try { testObject = fieldManager.getFieldValue(testObject, token); } catch (FieldNotAvailableException e) { proceed = false; break; } if (testObject == null) { Property propertyItem = new Property(); propertyItem.setName(property); if (props.contains(propertyItem)) { proceed = false; break; } propertyItem.setValue(null); props.add(propertyItem); proceed = false; break; } } } } if (!proceed) { continue; } boolean isFieldAccessible = true; Object value = null; try { value = fieldManager.getFieldValue(entity, property); } catch (FieldNotAvailableException e) { isFieldAccessible = false; } checkField: { if (isFieldAccessible) { Property propertyItem = new Property(); propertyItem.setName(property); if (props.contains(propertyItem)) { continue; } props.add(propertyItem); String displayVal = propertyItem.getDisplayValue(); boolean handled = false; for (FieldPersistenceProvider fieldPersistenceProvider : fieldPersistenceProviders) { FieldProviderResponse response = fieldPersistenceProvider.extractValue( new ExtractValueRequest(props, fieldManager, metadata, value, displayVal, persistenceManager, this, entity), propertyItem); if (FieldProviderResponse.NOT_HANDLED != response) { handled = true; } if (FieldProviderResponse.HANDLED_BREAK == response) { break; } } if (!handled) { defaultFieldPersistenceProvider.extractValue( new ExtractValueRequest(props, fieldManager, metadata, value, displayVal, persistenceManager, this, entity), propertyItem); } break checkField; } //try a direct property acquisition via reflection try { String strVal = null; Method method; try { //try a 'get' prefixed mutator first String temp = "get" + property.substring(0, 1).toUpperCase() + property.substring(1, property.length()); method = entity.getClass().getMethod(temp, new Class[]{}); } catch (NoSuchMethodException e) { method = entity.getClass().getMethod(property, new Class[]{}); } value = method.invoke(entity, new String[]{}); Property propertyItem = new Property(); propertyItem.setName(property); if (props.contains(propertyItem)) { continue; } props.add(propertyItem); if (value == null) { strVal = null; } else { if (Date.class.isAssignableFrom(value.getClass())) { strVal = getSimpleDateFormatter().format((Date) value); } else if (Timestamp.class.isAssignableFrom(value.getClass())) { strVal = getSimpleDateFormatter().format(new Date(((Timestamp) value).getTime())); } else if (Calendar.class.isAssignableFrom(value.getClass())) { strVal = getSimpleDateFormatter().format(((Calendar) value).getTime()); } else if (Double.class.isAssignableFrom(value.getClass())) { strVal = getDecimalFormatter().format(value); } else if (BigDecimal.class.isAssignableFrom(value.getClass())) { strVal = getDecimalFormatter().format(value); } else { strVal = value.toString(); } } propertyItem.setValue(strVal); } catch (NoSuchMethodException e) { LOG.debug("Unable to find a specified property in the entity: " + property); //do nothing - this property is simply not in the bean } } } } } catch (ClassNotFoundException e) { throw new PersistenceException(e); } catch (IllegalAccessException e) { throw new PersistenceException(e); } catch (InvocationTargetException e) { throw new PersistenceException(e); } } @Override public String getStringValueFromGetter(Serializable instance, String propertyName) throws IllegalAccessException, InvocationTargetException, NoSuchMethodException { Object value = PropertyUtils.getProperty(instance, propertyName); return formatValue(value); } @Override public String formatValue(Object value) { String strVal; if (value == null) { strVal = null; } else { if (Date.class.isAssignableFrom(value.getClass())) { strVal = getSimpleDateFormatter().format((Date) value); } else if (Timestamp.class.isAssignableFrom(value.getClass())) { strVal = getSimpleDateFormatter().format(new Date(((Timestamp) value).getTime())); } else if (Calendar.class.isAssignableFrom(value.getClass())) { strVal = getSimpleDateFormatter().format(((Calendar) value).getTime()); } else if (Double.class.isAssignableFrom(value.getClass())) { strVal = getDecimalFormatter().format(value); } else if (BigDecimal.class.isAssignableFrom(value.getClass())) { strVal = getDecimalFormatter().format(value); } else { strVal = value.toString(); } } return strVal; } protected EntityResult update(PersistencePackage persistencePackage, Object primaryKey, boolean includeRealEntity) throws ServiceException { EntityResult entityResult = new EntityResult(); Entity entity = persistencePackage.getEntity(); PersistencePerspective persistencePerspective = persistencePackage.getPersistencePerspective(); ForeignKey foreignKey = (ForeignKey) persistencePerspective.getPersistencePerspectiveItems().get(PersistencePerspectiveItemType.FOREIGNKEY); if (foreignKey != null && !foreignKey.getMutable()) { throw new SecurityServiceException("Entity not mutable"); } try { Class<?>[] entities = persistenceManager.getPolymorphicEntities(persistencePackage.getCeilingEntityFullyQualifiedClassname()); Map<String, FieldMetadata> mergedProperties = persistenceManager.getDynamicEntityDao().getMergedProperties( persistencePackage.getCeilingEntityFullyQualifiedClassname(), entities, foreignKey, persistencePerspective.getAdditionalNonPersistentProperties(), persistencePerspective.getAdditionalForeignKeys(), MergedPropertyType.PRIMARY, persistencePerspective.getPopulateToOneFields(), persistencePerspective.getIncludeFields(), persistencePerspective.getExcludeFields(), persistencePerspective.getConfigurationKey(), "" ); if (primaryKey == null) { primaryKey = getPrimaryKey(entity, mergedProperties); } Serializable instance = persistenceManager.getDynamicEntityDao().retrieve(Class.forName(entity.getType()[0]), primaryKey); Assert.isTrue(instance != null, "Entity not found"); instance = createPopulatedInstance(instance, entity, mergedProperties, false, persistencePackage.isValidateUnsubmittedProperties()); if (!entity.isValidationFailure()) { instance = persistenceManager.getDynamicEntityDao().merge(instance); if (includeRealEntity) { entityResult.setEntityBackingObject(instance); } List<Serializable> entityList = new ArrayList<Serializable>(1); entityList.add(instance); entity = getRecords(mergedProperties, entityList, null, null)[0]; entityResult.setEntity(entity); return entityResult; } else { entityResult.setEntity(entity); return entityResult; } } catch (Exception e) { throw new ServiceException("Problem updating entity : " + e.getMessage(), e); } } @Override public Object getPrimaryKey(Entity entity, Map<String, FieldMetadata> mergedUnfilteredProperties) { Map<String, FieldMetadata> mergedProperties = filterOutCollectionMetadata(mergedUnfilteredProperties); Object primaryKey = null; String idPropertyName = null; BasicFieldMetadata metaData = null; for (String property : mergedProperties.keySet()) { BasicFieldMetadata temp = (BasicFieldMetadata) mergedProperties.get(property); if (temp.getFieldType() == SupportedFieldType.ID && !property.contains(".")) { idPropertyName = property; metaData = temp; break; } } if (idPropertyName == null) { throw new RuntimeException("Could not find a primary key property in the passed entity with type: " + entity.getType()[0]); } for (Property property : entity.getProperties()) { if (property.getName().equals(idPropertyName)) { switch(metaData.getSecondaryType()) { case INTEGER: primaryKey = (property.getValue() == null) ? null : Long.valueOf(property.getValue()); break; case STRING: primaryKey = property.getValue(); break; } break; } } if (primaryKey == null) { throw new RuntimeException("Could not find the primary key property (" + idPropertyName + ") in the passed entity with type: " + entity.getType()[0]); } return primaryKey; } @Override public List<FilterMapping> getFilterMappings(PersistencePerspective persistencePerspective, CriteriaTransferObject cto, String ceilingEntityFullyQualifiedClassname, Map<String, FieldMetadata> mergedUnfilteredProperties, RestrictionFactory customRestrictionFactory) { Map<String, FieldMetadata> mergedProperties = filterOutCollectionMetadata(mergedUnfilteredProperties); List<FilterMapping> filterMappings = new ArrayList<FilterMapping>(); for (String propertyId : cto.getCriteriaMap().keySet()) { if (mergedProperties.containsKey(propertyId)) { boolean handled = false; for (FieldPersistenceProvider fieldPersistenceProvider : fieldPersistenceProviders) { FieldProviderResponse response = fieldPersistenceProvider.addSearchMapping( new AddSearchMappingRequest(persistencePerspective, cto, ceilingEntityFullyQualifiedClassname, mergedProperties, propertyId, getFieldManager(), this, customRestrictionFactory==null?restrictionFactory :customRestrictionFactory), filterMappings); if (FieldProviderResponse.NOT_HANDLED != response) { handled = true; } if (FieldProviderResponse.HANDLED_BREAK == response) { break; } } if (!handled) { defaultFieldPersistenceProvider.addSearchMapping( new AddSearchMappingRequest(persistencePerspective, cto, ceilingEntityFullyQualifiedClassname, mergedProperties, propertyId, getFieldManager(), this, customRestrictionFactory==null?restrictionFactory :customRestrictionFactory), filterMappings); } } } return filterMappings; } @Override public List<FilterMapping> getFilterMappings(PersistencePerspective persistencePerspective, CriteriaTransferObject cto, String ceilingEntityFullyQualifiedClassname, Map<String, FieldMetadata> mergedUnfilteredProperties) { return getFilterMappings(persistencePerspective, cto, ceilingEntityFullyQualifiedClassname, mergedUnfilteredProperties, null); } @Override public void extractProperties(Class<?>[] inheritanceLine, Map<MergedPropertyType, Map<String, FieldMetadata>> mergedProperties, List<Property> properties) { extractPropertiesFromMetadata(inheritanceLine, mergedProperties.get(MergedPropertyType.PRIMARY), properties, false, MergedPropertyType.PRIMARY); } protected void extractPropertiesFromMetadata(Class<?>[] inheritanceLine, Map<String, FieldMetadata> mergedProperties, List<Property> properties, Boolean isHiddenOverride, MergedPropertyType type) { for (Map.Entry<String, FieldMetadata> entry : mergedProperties.entrySet()) { String property = entry.getKey(); Property prop = new Property(); FieldMetadata metadata = mergedProperties.get(property); prop.setName(property); Comparator<Property> comparator = new Comparator<Property>() { @Override public int compare(Property o1, Property o2) { return o1.getName().compareTo(o2.getName()); } }; Collections.sort(properties, comparator); int pos = Collections.binarySearch(properties, prop, comparator); if (pos >= 0 && MergedPropertyType.MAPSTRUCTUREKEY != type && MergedPropertyType.MAPSTRUCTUREVALUE != type) { logWarn: { if ((metadata instanceof BasicFieldMetadata) && SupportedFieldType.ID.equals(((BasicFieldMetadata) metadata).getFieldType())) { //don't warn for id field collisions, but still ignore the colliding fields break logWarn; } LOG.warn("Detected a field name collision (" + metadata.getTargetClass() + "." + property + ") during inspection for the inheritance line starting with (" + inheritanceLine[0].getName() + "). Ignoring the additional field. This can occur most commonly when using the @AdminPresentationAdornedTargetCollection and the collection type and target class have field names in common. This situation should be avoided, as the system will strip the repeated fields, which can cause unpredictable behavior."); } continue; } properties.add(prop); prop.setMetadata(metadata); if (isHiddenOverride && prop.getMetadata() instanceof BasicFieldMetadata) { //this only makes sense for non collection types ((BasicFieldMetadata) prop.getMetadata()).setVisibility(VisibilityEnum.HIDDEN_ALL); } } } @Override public void updateMergedProperties(PersistencePackage persistencePackage, Map<MergedPropertyType, Map<String, FieldMetadata>> allMergedProperties) throws ServiceException { String ceilingEntityFullyQualifiedClassname = persistencePackage.getCeilingEntityFullyQualifiedClassname(); try { PersistencePerspective persistencePerspective = persistencePackage.getPersistencePerspective(); Class<?>[] entities = persistenceManager.getPolymorphicEntities(ceilingEntityFullyQualifiedClassname); Map<String, FieldMetadata> mergedProperties = persistenceManager.getDynamicEntityDao().getMergedProperties( ceilingEntityFullyQualifiedClassname, entities, (ForeignKey) persistencePerspective.getPersistencePerspectiveItems().get(PersistencePerspectiveItemType.FOREIGNKEY), persistencePerspective.getAdditionalNonPersistentProperties(), persistencePerspective.getAdditionalForeignKeys(), MergedPropertyType.PRIMARY, persistencePerspective.getPopulateToOneFields(), persistencePerspective.getIncludeFields(), persistencePerspective.getExcludeFields(), persistencePerspective.getConfigurationKey(), "" ); allMergedProperties.put(MergedPropertyType.PRIMARY, mergedProperties); } catch (Exception e) { throw new ServiceException("Unable to fetch results for " + ceilingEntityFullyQualifiedClassname, e); } } @Override public EntityResult update(PersistencePackage persistencePackage, boolean includeRealEntityObject) throws ServiceException { return update(persistencePackage, null, true); } @Override public Entity update(PersistencePackage persistencePackage) throws ServiceException { EntityResult er = update(persistencePackage, null, false); return er.getEntity(); } @Override public Entity add(PersistencePackage persistencePackage) throws ServiceException { EntityResult entityResult = add(persistencePackage, false); return entityResult.getEntity(); } @Override public EntityResult add(PersistencePackage persistencePackage, boolean includeRealEntityObject) throws ServiceException { EntityResult entityResult = new EntityResult(); Entity entity = persistencePackage.getEntity(); PersistencePerspective persistencePerspective = persistencePackage.getPersistencePerspective(); ForeignKey foreignKey = (ForeignKey) persistencePerspective.getPersistencePerspectiveItems().get(PersistencePerspectiveItemType.FOREIGNKEY); if (foreignKey != null && !foreignKey.getMutable()) { throw new SecurityServiceException("Entity not mutable"); } try { Class<?>[] entities = persistenceManager.getPolymorphicEntities(persistencePackage.getCeilingEntityFullyQualifiedClassname()); Map<String, FieldMetadata> mergedUnfilteredProperties = persistenceManager.getDynamicEntityDao().getMergedProperties( persistencePackage.getCeilingEntityFullyQualifiedClassname(), entities, foreignKey, persistencePerspective.getAdditionalNonPersistentProperties(), persistencePerspective.getAdditionalForeignKeys(), MergedPropertyType.PRIMARY, persistencePerspective.getPopulateToOneFields(), persistencePerspective.getIncludeFields(), persistencePerspective.getExcludeFields(), persistencePerspective.getConfigurationKey(), "" ); Map<String, FieldMetadata> mergedProperties = filterOutCollectionMetadata(mergedUnfilteredProperties); String idProperty = null; for (String property : mergedProperties.keySet()) { if (((BasicFieldMetadata) mergedProperties.get(property)).getFieldType() == SupportedFieldType.ID) { idProperty = property; break; } } if (idProperty == null) { throw new RuntimeException("Could not find a primary key property in the passed entity with type: " + entity.getType()[0]); } Object primaryKey = null; try { primaryKey = getPrimaryKey(entity, mergedProperties); } catch (Exception e) { //don't do anything - this is a valid case } if (primaryKey == null) { Serializable instance = (Serializable) Class.forName(entity.getType()[0]).newInstance(); instance = createPopulatedInstance(instance, entity, mergedProperties, false); instance = persistenceManager.getDynamicEntityDao().merge(instance); if (includeRealEntityObject) { entityResult.setEntityBackingObject(instance); } List<Serializable> entityList = new ArrayList<Serializable>(1); entityList.add(instance); entity = getRecords(mergedProperties, entityList, null, null)[0]; entityResult.setEntity(entity); return entityResult; } else { return update(persistencePackage, primaryKey, includeRealEntityObject); } } catch (Exception e) { throw new ServiceException("Problem adding new entity : " + e.getMessage(), e); } } @Override public void remove(PersistencePackage persistencePackage) throws ServiceException { Entity entity = persistencePackage.getEntity(); PersistencePerspective persistencePerspective = persistencePackage.getPersistencePerspective(); ForeignKey foreignKey = (ForeignKey) persistencePerspective.getPersistencePerspectiveItems().get(PersistencePerspectiveItemType.FOREIGNKEY); if (foreignKey != null && !foreignKey.getMutable()) { throw new SecurityServiceException("Entity not mutable"); } try { Class<?>[] entities = persistenceManager.getPolymorphicEntities(persistencePackage.getCeilingEntityFullyQualifiedClassname()); Map<String, FieldMetadata> mergedUnfilteredProperties = persistenceManager.getDynamicEntityDao().getMergedProperties( persistencePackage.getCeilingEntityFullyQualifiedClassname(), entities, foreignKey, persistencePerspective.getAdditionalNonPersistentProperties(), persistencePerspective.getAdditionalForeignKeys(), MergedPropertyType.PRIMARY, persistencePerspective.getPopulateToOneFields(), persistencePerspective.getIncludeFields(), persistencePerspective.getExcludeFields(), persistencePerspective.getConfigurationKey(), "" ); Map<String, FieldMetadata> mergedProperties = filterOutCollectionMetadata(mergedUnfilteredProperties); Object primaryKey = getPrimaryKey(entity, mergedProperties); Serializable instance = persistenceManager.getDynamicEntityDao().retrieve(Class.forName(entity.getType()[0]), primaryKey); Assert.isTrue(instance != null, "Entity not found"); switch (persistencePerspective.getOperationTypes().getRemoveType()) { case NONDESTRUCTIVEREMOVE: for (Property property : entity.getProperties()) { String originalPropertyName = property.getName(); FieldManager fieldManager = getFieldManager(); if (fieldManager.getField(instance.getClass(), property.getName()) == null) { LOG.debug("Unable to find a bean property for the reported property: " + originalPropertyName + ". Ignoring property."); continue; } if (SupportedFieldType.FOREIGN_KEY == ((BasicFieldMetadata) mergedProperties.get(originalPropertyName)).getFieldType()) { String value = property.getValue(); Serializable foreignInstance = persistenceManager.getDynamicEntityDao().retrieve(Class.forName(foreignKey.getForeignKeyClass()), Long.valueOf(value)); Collection collection = (Collection) fieldManager.getFieldValue(foreignInstance, foreignKey.getOriginatingField()); collection.remove(instance); break; } } break; case BASIC: persistenceManager.getDynamicEntityDao().remove(instance); break; } } catch (Exception e) { throw new ServiceException("Problem removing entity : " + e.getMessage(), e); } } @Override public DynamicResultSet fetch(PersistencePackage persistencePackage, CriteriaTransferObject cto) throws ServiceException { Entity[] payload; int totalRecords; String ceilingEntityFullyQualifiedClassname = persistencePackage.getCeilingEntityFullyQualifiedClassname(); if (StringUtils.isEmpty(persistencePackage.getFetchTypeFullyQualifiedClassname())) { persistencePackage.setFetchTypeFullyQualifiedClassname(ceilingEntityFullyQualifiedClassname); } PersistencePerspective persistencePerspective = persistencePackage.getPersistencePerspective(); try { Class<?>[] entities = persistenceManager.getDynamicEntityDao().getAllPolymorphicEntitiesFromCeiling(Class.forName(ceilingEntityFullyQualifiedClassname)); Map<String, FieldMetadata> mergedProperties = persistenceManager.getDynamicEntityDao().getMergedProperties( ceilingEntityFullyQualifiedClassname, entities, (ForeignKey) persistencePerspective.getPersistencePerspectiveItems().get(PersistencePerspectiveItemType.FOREIGNKEY), persistencePerspective.getAdditionalNonPersistentProperties(), persistencePerspective.getAdditionalForeignKeys(), MergedPropertyType.PRIMARY, persistencePerspective.getPopulateToOneFields(), persistencePerspective.getIncludeFields(), persistencePerspective.getExcludeFields(), persistencePerspective.getConfigurationKey(), "" ); List<FilterMapping> filterMappings = getFilterMappings(persistencePerspective, cto, persistencePackage .getFetchTypeFullyQualifiedClassname(), mergedProperties); if (CollectionUtils.isNotEmpty(cto.getAdditionalFilterMappings())) { filterMappings.addAll(cto.getAdditionalFilterMappings()); } boolean isArchivable = false; for (Class<?> entity : entities) { if (Status.class.isAssignableFrom(entity)) { isArchivable = true; break; } } if (isArchivable && !persistencePerspective.getShowArchivedFields()) { FilterMapping filterMapping = new FilterMapping() .withFieldPath(new FieldPath().withTargetProperty("archiveStatus.archived")) .withDirectFilterValues(new EmptyFilterValues()) .withRestriction(new Restriction() .withPredicateProvider(new PredicateProvider<Character, Character>() { @Override public Predicate buildPredicate(CriteriaBuilder builder, FieldPathBuilder fieldPathBuilder, From root, String ceilingEntity, String fullPropertyName, Path<Character> explicitPath, List<Character> directValues) { return builder.or(builder.equal(explicitPath, 'N'), builder.isNull(explicitPath)); } }) ); filterMappings.add(filterMapping); } List<Serializable> records = getPersistentRecords(persistencePackage.getFetchTypeFullyQualifiedClassname(), filterMappings, cto.getFirstResult(), cto.getMaxResults()); payload = getRecords(mergedProperties, records, null, null); totalRecords = getTotalRecords(persistencePackage.getFetchTypeFullyQualifiedClassname(), filterMappings); } catch (Exception e) { throw new ServiceException("Unable to fetch results for " + ceilingEntityFullyQualifiedClassname, e); } return new DynamicResultSet(null, payload, totalRecords); } @Override public Integer getTotalRecords(String ceilingEntity, List<FilterMapping> filterMappings) { return ((Long) criteriaTranslator.translateCountQuery(persistenceManager.getDynamicEntityDao(), ceilingEntity, filterMappings).getSingleResult()).intValue(); } @Override public List<Serializable> getPersistentRecords(String ceilingEntity, List<FilterMapping> filterMappings, Integer firstResult, Integer maxResults) { return criteriaTranslator.translateQuery(persistenceManager.getDynamicEntityDao(), ceilingEntity, filterMappings, firstResult, maxResults).getResultList(); } @Override public boolean validate(Entity entity, Serializable populatedInstance, Map<String, FieldMetadata> mergedProperties) { return validate(entity, populatedInstance, mergedProperties, true); } @Override public boolean validate(Entity entity, Serializable populatedInstance, Map<String, FieldMetadata> mergedProperties, boolean validateUnsubmittedProperties) { entityValidatorService.validate(entity, populatedInstance, mergedProperties, validateUnsubmittedProperties); return !entity.isValidationFailure(); } @Override public void setPersistenceManager(PersistenceManager persistenceManager) { this.persistenceManager = persistenceManager; } @Override public PersistenceModule getCompatibleModule(OperationType operationType) { return ((InspectHelper) persistenceManager).getCompatibleModule(operationType); } public FieldPersistenceProvider getDefaultFieldPersistenceProvider() { return defaultFieldPersistenceProvider; } public void setDefaultFieldPersistenceProvider(FieldPersistenceProvider defaultFieldPersistenceProvider) { this.defaultFieldPersistenceProvider = defaultFieldPersistenceProvider; } public List<FieldPersistenceProvider> getFieldPersistenceProviders() { return fieldPersistenceProviders; } public void setFieldPersistenceProviders(List<FieldPersistenceProvider> fieldPersistenceProviders) { this.fieldPersistenceProviders = fieldPersistenceProviders; } public CriteriaTranslator getCriteriaTranslator() { return criteriaTranslator; } public void setCriteriaTranslator(CriteriaTranslator criteriaTranslator) { this.criteriaTranslator = criteriaTranslator; } public EntityValidatorService getEntityValidatorService() { return entityValidatorService; } public void setEntityValidatorService(EntityValidatorService entityValidatorService) { this.entityValidatorService = entityValidatorService; } public RestrictionFactory getRestrictionFactory() { return restrictionFactory; } public void setRestrictionFactory(RestrictionFactory restrictionFactory) { this.restrictionFactory = restrictionFactory; } public PersistenceManager getPersistenceManager() { return persistenceManager; } }
1no label
admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_server_service_persistence_module_BasicPersistenceModule.java
1,033
@Entity @Inheritance(strategy = InheritanceType.JOINED) @Table(name = "BLC_ORDER_ITEM") @Cache(usage=CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region="blOrderElements") @AdminPresentationMergeOverrides( { @AdminPresentationMergeOverride(name = "", mergeEntries = @AdminPresentationMergeEntry(propertyType = PropertyType.AdminPresentation.READONLY, booleanOverrideValue = true)) } ) @AdminPresentationClass(populateToOneFields = PopulateToOneFieldsEnum.TRUE, friendlyName = "OrderItemImpl_baseOrderItem") public class OrderItemImpl implements OrderItem, Cloneable, AdminMainEntity, CurrencyCodeIdentifiable { private static final Log LOG = LogFactory.getLog(OrderItemImpl.class); private static final long serialVersionUID = 1L; @Id @GeneratedValue(generator = "OrderItemId") @GenericGenerator( name="OrderItemId", strategy="org.broadleafcommerce.common.persistence.IdOverrideTableGenerator", parameters = { @Parameter(name="segment_value", value="OrderItemImpl"), @Parameter(name="entity_name", value="org.broadleafcommerce.core.order.domain.OrderItemImpl") } ) @Column(name = "ORDER_ITEM_ID") @AdminPresentation(visibility = VisibilityEnum.HIDDEN_ALL) protected Long id; @ManyToOne(targetEntity = CategoryImpl.class) @JoinColumn(name = "CATEGORY_ID") @Index(name="ORDERITEM_CATEGORY_INDEX", columnNames={"CATEGORY_ID"}) @NotFound(action = NotFoundAction.IGNORE) @AdminPresentation(friendlyName = "OrderItemImpl_Category", order=Presentation.FieldOrder.CATEGORY, group = Presentation.Group.Name.Catalog, groupOrder = Presentation.Group.Order.Catalog) @AdminPresentationToOneLookup() protected Category category; @ManyToOne(targetEntity = OrderImpl.class) @JoinColumn(name = "ORDER_ID") @Index(name="ORDERITEM_ORDER_INDEX", columnNames={"ORDER_ID"}) @AdminPresentation(excluded = true) protected Order order; @Column(name = "PRICE", precision = 19, scale = 5) @AdminPresentation(friendlyName = "OrderItemImpl_Item_Price", order = Presentation.FieldOrder.PRICE, group = Presentation.Group.Name.Pricing, groupOrder = Presentation.Group.Order.Pricing, fieldType = SupportedFieldType.MONEY, prominent = true, gridOrder = 3000) protected BigDecimal price; @Column(name = "QUANTITY", nullable = false) @AdminPresentation(friendlyName = "OrderItemImpl_Item_Quantity", order = Presentation.FieldOrder.QUANTITY, group = Presentation.Group.Name.Pricing, groupOrder = Presentation.Group.Order.Pricing, prominent = true, gridOrder = 2000) protected int quantity; @Column(name = "RETAIL_PRICE", precision=19, scale=5) @AdminPresentation(friendlyName = "OrderItemImpl_Item_Retail_Price", order = Presentation.FieldOrder.RETAILPRICE, group = Presentation.Group.Name.Pricing, groupOrder = Presentation.Group.Order.Pricing, fieldType = SupportedFieldType.MONEY, prominent = true, gridOrder = 4000) protected BigDecimal retailPrice; @Column(name = "SALE_PRICE", precision=19, scale=5) @AdminPresentation(friendlyName = "OrderItemImpl_Item_Sale_Price", order = Presentation.FieldOrder.SALEPRICE, group = Presentation.Group.Name.Pricing, groupOrder = Presentation.Group.Order.Pricing, fieldType = SupportedFieldType.MONEY) protected BigDecimal salePrice; @Column(name = "NAME") @AdminPresentation(friendlyName = "OrderItemImpl_Item_Name", order=Presentation.FieldOrder.NAME, group = Presentation.Group.Name.Description, prominent=true, gridOrder = 1000, groupOrder = Presentation.Group.Order.Description) protected String name; @ManyToOne(targetEntity = PersonalMessageImpl.class, cascade = { CascadeType.ALL }) @JoinColumn(name = "PERSONAL_MESSAGE_ID") @Cache(usage=CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region="blOrderElements") @Index(name="ORDERITEM_MESSAGE_INDEX", columnNames={"PERSONAL_MESSAGE_ID"}) protected PersonalMessage personalMessage; @ManyToOne(targetEntity = GiftWrapOrderItemImpl.class, cascade = { CascadeType.MERGE, CascadeType.PERSIST }) @JoinColumn(name = "GIFT_WRAP_ITEM_ID", nullable = true) @Cache(usage=CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region="blOrderElements") @Index(name="ORDERITEM_GIFT_INDEX", columnNames={"GIFT_WRAP_ITEM_ID"}) @AdminPresentation(excluded = true) protected GiftWrapOrderItem giftWrapOrderItem; @OneToMany(mappedBy = "orderItem", targetEntity = OrderItemAdjustmentImpl.class, cascade = { CascadeType.ALL }, orphanRemoval = true) @Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region = "blOrderElements") @AdminPresentationCollection(friendlyName="OrderItemImpl_Adjustments", order = Presentation.FieldOrder.ADJUSTMENTS, tab = Presentation.Tab.Name.Advanced, tabOrder = Presentation.Tab.Order.Advanced) protected List<OrderItemAdjustment> orderItemAdjustments = new ArrayList<OrderItemAdjustment>(); @OneToMany(mappedBy = "orderItem", targetEntity = OrderItemQualifierImpl.class, cascade = { CascadeType.ALL }, orphanRemoval = true) @Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region = "blOrderElements") protected List<OrderItemQualifier> orderItemQualifiers = new ArrayList<OrderItemQualifier>(); @OneToMany(mappedBy = "orderItem", targetEntity = CandidateItemOfferImpl.class, cascade = { CascadeType.ALL }, orphanRemoval = true) @Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region = "blOrderElements") protected List<CandidateItemOffer> candidateItemOffers = new ArrayList<CandidateItemOffer>(); @OneToMany(mappedBy = "orderItem", targetEntity = OrderItemPriceDetailImpl.class, cascade = { CascadeType.ALL }, orphanRemoval = true) @Cache(usage=CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region="blOrderElements") @AdminPresentationCollection(friendlyName="OrderItemImpl_Price_Details", order = Presentation.FieldOrder.PRICEDETAILS, tab = Presentation.Tab.Name.Advanced, tabOrder = Presentation.Tab.Order.Advanced) protected List<OrderItemPriceDetail> orderItemPriceDetails = new ArrayList<OrderItemPriceDetail>(); @Column(name = "ORDER_ITEM_TYPE") @Index(name="ORDERITEM_TYPE_INDEX", columnNames={"ORDER_ITEM_TYPE"}) protected String orderItemType; @Column(name = "ITEM_TAXABLE_FLAG") protected Boolean itemTaxable; @Column(name = "RETAIL_PRICE_OVERRIDE") protected Boolean retailPriceOverride; @Column(name = "SALE_PRICE_OVERRIDE") protected Boolean salePriceOverride; @Column(name = "DISCOUNTS_ALLOWED") @AdminPresentation(friendlyName = "OrderItemImpl_Discounts_Allowed", order=Presentation.FieldOrder.DISCOUNTALLOWED, tab = Presentation.Tab.Name.Advanced, tabOrder = Presentation.Tab.Order.Advanced) protected Boolean discountsAllowed; @OneToMany(mappedBy = "orderItem", targetEntity = OrderItemAttributeImpl.class, cascade = { CascadeType.ALL }, orphanRemoval = true) @Cache(usage=CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region="blOrderElements") @MapKey(name="name") @AdminPresentationMap(friendlyName = "OrderItemImpl_Attributes", tab = Presentation.Tab.Name.Advanced, tabOrder = Presentation.Tab.Order.Advanced, deleteEntityUponRemove = true, forceFreeFormKeys = true, keyPropertyFriendlyName = "OrderItemAttributeImpl_Attribute_Name" ) protected Map<String, OrderItemAttribute> orderItemAttributeMap = new HashMap<String, OrderItemAttribute>(); @Column(name = "TOTAL_TAX") @AdminPresentation(friendlyName = "OrderItemImpl_Total_Tax", order = Presentation.FieldOrder.TOTALTAX, group = Presentation.Group.Name.Pricing, groupOrder = Presentation.Group.Order.Pricing, fieldType = SupportedFieldType.MONEY) protected BigDecimal totalTax; @Override public Money getRetailPrice() { if (retailPrice == null) { updateSaleAndRetailPrices(); } return convertToMoney(retailPrice); } @Override public void setRetailPrice(Money retailPrice) { this.retailPrice = Money.toAmount(retailPrice); } @Override public Money getSalePrice() { if (salePrice == null) { updateSaleAndRetailPrices(); } if (salePrice != null) { Money returnPrice = convertToMoney(salePrice); if (retailPrice != null && returnPrice.greaterThan(getRetailPrice())) { return getRetailPrice(); } else { return returnPrice; } } else { return getRetailPrice(); } } @Override public void setSalePrice(Money salePrice) { this.salePrice = Money.toAmount(salePrice); } @Override public Money getPrice() { return getAveragePrice(); } @Override public void setPrice(Money finalPrice) { setRetailPrice(finalPrice); setSalePrice(finalPrice); setRetailPriceOverride(true); setSalePriceOverride(true); setDiscountingAllowed(false); this.price = Money.toAmount(finalPrice); } @Override public Money getTaxablePrice() { Money taxablePrice = BroadleafCurrencyUtils.getMoney(BigDecimal.ZERO, getOrder().getCurrency()); if (isTaxable() == null || isTaxable()) { taxablePrice = getAveragePrice(); } return taxablePrice; } @Override public int getQuantity() { return quantity; } @Override public void setQuantity(int quantity) { this.quantity = quantity; } @Override public Category getCategory() { return category; } @Override public void setCategory(Category category) { this.category = category; } @Override public List<CandidateItemOffer> getCandidateItemOffers() { return candidateItemOffers; } @Override public void setCandidateItemOffers(List<CandidateItemOffer> candidateItemOffers) { this.candidateItemOffers = candidateItemOffers; } @Override public PersonalMessage getPersonalMessage() { return personalMessage; } @Override public void setPersonalMessage(PersonalMessage personalMessage) { this.personalMessage = personalMessage; } @Override public Order getOrder() { return order; } @Override public void setOrder(Order order) { this.order = order; } @Override public Long getId() { return id; } @Override public void setId(Long id) { this.id = id; } @Override public String getName() { return name; } @Override public void setName(String name) { this.name = name; } @Override public boolean isInCategory(String categoryName) { Category currentCategory = category; if (currentCategory != null) { if (currentCategory.getName().equals(categoryName)) { return true; } while ((currentCategory = currentCategory.getDefaultParentCategory()) != null) { if (currentCategory.getName().equals(categoryName)) { return true; } } } return false; } @Override public List<OrderItemQualifier> getOrderItemQualifiers() { return this.orderItemQualifiers; } @Override public void setOrderItemQualifiers(List<OrderItemQualifier> orderItemQualifiers) { this.orderItemQualifiers = orderItemQualifiers; } @Override public List<OrderItemAdjustment> getOrderItemAdjustments() { return this.orderItemAdjustments; } @Override public void setOrderItemAdjustments(List<OrderItemAdjustment> orderItemAdjustments) { this.orderItemAdjustments = orderItemAdjustments; } @Override public Money getAdjustmentValue() { return getAverageAdjustmentValue(); } @Override public GiftWrapOrderItem getGiftWrapOrderItem() { return giftWrapOrderItem; } @Override public void setGiftWrapOrderItem(GiftWrapOrderItem giftWrapOrderItem) { this.giftWrapOrderItem = giftWrapOrderItem; } @Override public OrderItemType getOrderItemType() { return convertOrderItemType(orderItemType); } @Override public void setOrderItemType(OrderItemType orderItemType) { this.orderItemType = orderItemType.getType(); } @Override public boolean getIsOnSale() { if (getSalePrice() != null) { return !getSalePrice().equals(getRetailPrice()); } else { return false; } } @Override public boolean getIsDiscounted() { if (getPrice() != null) { return !getPrice().equals(getRetailPrice()); } else { return false; } } @Override public boolean updateSaleAndRetailPrices() { if (salePrice == null) { salePrice = retailPrice; } return false; } @Override public void finalizePrice() { price = getAveragePrice().getAmount(); } @Override public void assignFinalPrice() { Money finalPrice = getTotalPrice().divide(quantity); price = finalPrice.getAmount(); } @Override public Money getPriceBeforeAdjustments(boolean allowSalesPrice) { boolean retailPriceOverride = false; for (OrderItemPriceDetail oipd : getOrderItemPriceDetails()) { if (oipd.getUseSalePrice() == false) { retailPriceOverride = true; break; } } if (allowSalesPrice && !retailPriceOverride) { return getSalePrice(); } else { return getRetailPrice(); } } @Override public void addCandidateItemOffer(CandidateItemOffer candidateItemOffer) { getCandidateItemOffers().add(candidateItemOffer); } @Override public void removeAllCandidateItemOffers() { if (getCandidateItemOffers() != null) { for (CandidateItemOffer candidate : getCandidateItemOffers()) { candidate.setOrderItem(null); } getCandidateItemOffers().clear(); } } @Override public int removeAllAdjustments() { int removedAdjustmentCount = 0; if (getOrderItemAdjustments() != null) { for (OrderItemAdjustment adjustment : getOrderItemAdjustments()) { adjustment.setOrderItem(null); } removedAdjustmentCount = getOrderItemAdjustments().size(); getOrderItemAdjustments().clear(); } assignFinalPrice(); return removedAdjustmentCount; } /** * A list of arbitrary attributes added to this item. */ @Override public Map<String,OrderItemAttribute> getOrderItemAttributes() { return orderItemAttributeMap; } /** * Sets the map of order item attributes. * * @param orderItemAttributes */ @Override public void setOrderItemAttributes(Map<String,OrderItemAttribute> orderItemAttributes) { this.orderItemAttributeMap = orderItemAttributes; } @Override public Boolean isTaxable() { return itemTaxable == null ? true : itemTaxable; } @Override public void setTaxable(Boolean taxable) { this.itemTaxable = taxable; } @Override public void setOrderItemPriceDetails(List<OrderItemPriceDetail> orderItemPriceDetails) { this.orderItemPriceDetails = orderItemPriceDetails; } @Override public boolean isDiscountingAllowed() { if (discountsAllowed == null) { return true; } else { return discountsAllowed.booleanValue(); } } @Override public void setDiscountingAllowed(boolean discountsAllowed) { this.discountsAllowed = discountsAllowed; } @Override public Money getAveragePrice() { if (quantity == 0) { return price == null ? null : BroadleafCurrencyUtils.getMoney(price, getOrder().getCurrency()); } return getTotalPrice().divide(quantity); } @Override public Money getAverageAdjustmentValue() { if (quantity == 0) { return null; } return getTotalAdjustmentValue().divide(quantity); } @Override public Money getTotalAdjustmentValue() { Money totalAdjustmentValue = BroadleafCurrencyUtils.getMoney(getOrder().getCurrency()); List<OrderItemPriceDetail> priceDetails = getOrderItemPriceDetails(); if (priceDetails != null) { for (OrderItemPriceDetail priceDetail : getOrderItemPriceDetails()) { totalAdjustmentValue = totalAdjustmentValue.add(priceDetail.getTotalAdjustmentValue()); } } return totalAdjustmentValue; } @Override public Money getTotalPrice() { Money returnValue = convertToMoney(BigDecimal.ZERO); if (orderItemPriceDetails != null && orderItemPriceDetails.size() > 0) { for (OrderItemPriceDetail oipd : orderItemPriceDetails) { returnValue = returnValue.add(oipd.getTotalAdjustedPrice()); } } else { if (price != null) { returnValue = convertToMoney(price).multiply(quantity); } else { return getSalePrice().multiply(quantity); } } return returnValue; } @Override public Money getTotalPriceBeforeAdjustments(boolean allowSalesPrice) { return getPriceBeforeAdjustments(allowSalesPrice).multiply(getQuantity()); } @Override public void setRetailPriceOverride(boolean override) { this.retailPriceOverride = Boolean.valueOf(override); } @Override public boolean isRetailPriceOverride() { if (retailPriceOverride == null) { return false; } else { return retailPriceOverride.booleanValue(); } } @Override public void setSalePriceOverride(boolean override) { this.salePriceOverride = Boolean.valueOf(override); } @Override public boolean isSalePriceOverride() { if (salePriceOverride == null) { return false; } else { return salePriceOverride.booleanValue(); } } @Override public List<OrderItemPriceDetail> getOrderItemPriceDetails() { return orderItemPriceDetails; } @Override public String getMainEntityName() { return getName(); } @Override public String getCurrencyCode() { if (getOrder().getCurrency() != null) { return getOrder().getCurrency().getCurrencyCode(); } return null; } public void checkCloneable(OrderItem orderItem) throws CloneNotSupportedException, SecurityException, NoSuchMethodException { Method cloneMethod = orderItem.getClass().getMethod("clone", new Class[]{}); if (cloneMethod.getDeclaringClass().getName().startsWith("org.broadleafcommerce") && !orderItem.getClass().getName().startsWith("org.broadleafcommerce")) { //subclass is not implementing the clone method throw new CloneNotSupportedException("Custom extensions and implementations should implement clone in " + "order to guarantee split and merge operations are performed accurately"); } } protected Money convertToMoney(BigDecimal amount) { return amount == null ? null : BroadleafCurrencyUtils.getMoney(amount, getOrder().getCurrency()); } protected OrderItemType convertOrderItemType(String type) { return OrderItemType.getInstance(type); } @Override public OrderItem clone() { //this is likely an extended class - instantiate from the fully qualified name via reflection OrderItemImpl clonedOrderItem; try { clonedOrderItem = (OrderItemImpl) Class.forName(this.getClass().getName()).newInstance(); try { checkCloneable(clonedOrderItem); } catch (CloneNotSupportedException e) { LOG.warn("Clone implementation missing in inheritance hierarchy outside of Broadleaf: " + clonedOrderItem.getClass().getName(), e); } if (candidateItemOffers != null) { for (CandidateItemOffer candidate : candidateItemOffers) { CandidateItemOffer clone = candidate.clone(); clone.setOrderItem(clonedOrderItem); clonedOrderItem.getCandidateItemOffers().add(clone); } } if (orderItemAttributeMap != null && !orderItemAttributeMap.isEmpty()) { for (OrderItemAttribute attribute : orderItemAttributeMap.values()) { OrderItemAttribute clone = attribute.clone(); clone.setOrderItem(clonedOrderItem); clonedOrderItem.getOrderItemAttributes().put(clone.getName(), clone); } } clonedOrderItem.setCategory(category); clonedOrderItem.setGiftWrapOrderItem(giftWrapOrderItem); clonedOrderItem.setName(name); clonedOrderItem.setOrder(order); clonedOrderItem.setOrderItemType(convertOrderItemType(orderItemType)); clonedOrderItem.setPersonalMessage(personalMessage); clonedOrderItem.setQuantity(quantity); clonedOrderItem.retailPrice = retailPrice; clonedOrderItem.salePrice = salePrice; clonedOrderItem.discountsAllowed = discountsAllowed; clonedOrderItem.salePriceOverride = salePriceOverride; clonedOrderItem.retailPriceOverride = retailPriceOverride; } catch (Exception e) { throw new RuntimeException(e); } return clonedOrderItem; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((category == null) ? 0 : category.hashCode()); result = prime * result + ((giftWrapOrderItem == null) ? 0 : giftWrapOrderItem.hashCode()); result = prime * result + ((order == null) ? 0 : order.hashCode()); result = prime * result + ((orderItemType == null) ? 0 : orderItemType.hashCode()); result = prime * result + ((personalMessage == null) ? 0 : personalMessage.hashCode()); result = prime * result + ((price == null) ? 0 : price.hashCode()); result = prime * result + quantity; result = prime * result + ((retailPrice == null) ? 0 : retailPrice.hashCode()); result = prime * result + ((salePrice == null) ? 0 : salePrice.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } OrderItemImpl other = (OrderItemImpl) obj; if (id != null && other.id != null) { return id.equals(other.id); } if (category == null) { if (other.category != null) { return false; } } else if (!category.equals(other.category)) { return false; } if (giftWrapOrderItem == null) { if (other.giftWrapOrderItem != null) { return false; } } else if (!giftWrapOrderItem.equals(other.giftWrapOrderItem)) { return false; } if (order == null) { if (other.order != null) { return false; } } else if (!order.equals(other.order)) { return false; } if (orderItemType == null) { if (other.orderItemType != null) { return false; } } else if (!orderItemType.equals(other.orderItemType)) { return false; } if (personalMessage == null) { if (other.personalMessage != null) { return false; } } else if (!personalMessage.equals(other.personalMessage)) { return false; } if (price == null) { if (other.price != null) { return false; } } else if (!price.equals(other.price)) { return false; } if (quantity != other.quantity) { return false; } if (retailPrice == null) { if (other.retailPrice != null) { return false; } } else if (!retailPrice.equals(other.retailPrice)) { return false; } if (salePrice == null) { if (other.salePrice != null) { return false; } } else if (!salePrice.equals(other.salePrice)) { return false; } return true; } public static class Presentation { public static class Tab { public static class Name { public static final String Advanced = "OrderImpl_Advanced"; } public static class Order { public static final int Advanced = 2000; } } public static class Group { public static class Name { public static final String Description = "OrderItemImpl_Description"; public static final String Pricing = "OrderItemImpl_Pricing"; public static final String Catalog = "OrderItemImpl_Catalog"; } public static class Order { public static final int Description = 1000; public static final int Pricing = 2000; public static final int Catalog = 3000; } } public static class FieldOrder { public static final int NAME = 1000; public static final int PRICE = 2000; public static final int QUANTITY = 3000; public static final int RETAILPRICE = 4000; public static final int SALEPRICE = 5000; public static final int TOTALTAX = 6000; public static final int CATEGORY = 1000; public static final int PRICEDETAILS = 1000; public static final int ADJUSTMENTS = 2000; public static final int DISCOUNTALLOWED = 3000; } } @Override public boolean isSkuActive() { //abstract method, by default return true return true; } }
1no label
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_order_domain_OrderItemImpl.java
3,469
public class ShardIndexingService extends AbstractIndexShardComponent { private final ShardSlowLogIndexingService slowLog; private final StatsHolder totalStats = new StatsHolder(); private final CopyOnWriteArrayList<IndexingOperationListener> listeners = new CopyOnWriteArrayList<IndexingOperationListener>(); private volatile Map<String, StatsHolder> typesStats = ImmutableMap.of(); @Inject public ShardIndexingService(ShardId shardId, @IndexSettings Settings indexSettings, ShardSlowLogIndexingService slowLog) { super(shardId, indexSettings); this.slowLog = slowLog; } /** * Returns the stats, including type specific stats. If the types are null/0 length, then nothing * is returned for them. If they are set, then only types provided will be returned, or * <tt>_all</tt> for all types. */ public IndexingStats stats(String... types) { IndexingStats.Stats total = totalStats.stats(); Map<String, IndexingStats.Stats> typesSt = null; if (types != null && types.length > 0) { if (types.length == 1 && types[0].equals("_all")) { typesSt = new HashMap<String, IndexingStats.Stats>(typesStats.size()); for (Map.Entry<String, StatsHolder> entry : typesStats.entrySet()) { typesSt.put(entry.getKey(), entry.getValue().stats()); } } else { typesSt = new HashMap<String, IndexingStats.Stats>(types.length); for (String type : types) { StatsHolder statsHolder = typesStats.get(type); if (statsHolder != null) { typesSt.put(type, statsHolder.stats()); } } } } return new IndexingStats(total, typesSt); } public void addListener(IndexingOperationListener listener) { listeners.add(listener); } public void removeListener(IndexingOperationListener listener) { listeners.remove(listener); } public Engine.Create preCreate(Engine.Create create) { totalStats.indexCurrent.inc(); typeStats(create.type()).indexCurrent.inc(); for (IndexingOperationListener listener : listeners) { create = listener.preCreate(create); } return create; } public void postCreateUnderLock(Engine.Create create) { for (IndexingOperationListener listener : listeners) { try { listener.postCreateUnderLock(create); } catch (Exception e) { logger.warn("post listener [{}] failed", e, listener); } } } public void postCreate(Engine.Create create) { long took = create.endTime() - create.startTime(); totalStats.indexMetric.inc(took); totalStats.indexCurrent.dec(); StatsHolder typeStats = typeStats(create.type()); typeStats.indexMetric.inc(took); typeStats.indexCurrent.dec(); slowLog.postCreate(create, took); for (IndexingOperationListener listener : listeners) { try { listener.postCreate(create); } catch (Exception e) { logger.warn("post listener [{}] failed", e, listener); } } } public Engine.Index preIndex(Engine.Index index) { totalStats.indexCurrent.inc(); typeStats(index.type()).indexCurrent.inc(); for (IndexingOperationListener listener : listeners) { index = listener.preIndex(index); } return index; } public void postIndexUnderLock(Engine.Index index) { for (IndexingOperationListener listener : listeners) { try { listener.postIndexUnderLock(index); } catch (Exception e) { logger.warn("post listener [{}] failed", e, listener); } } } public void postIndex(Engine.Index index) { long took = index.endTime() - index.startTime(); totalStats.indexMetric.inc(took); totalStats.indexCurrent.dec(); StatsHolder typeStats = typeStats(index.type()); typeStats.indexMetric.inc(took); typeStats.indexCurrent.dec(); slowLog.postIndex(index, took); for (IndexingOperationListener listener : listeners) { try { listener.postIndex(index); } catch (Exception e) { logger.warn("post listener [{}] failed", e, listener); } } } public void failedIndex(Engine.Index index) { totalStats.indexCurrent.dec(); typeStats(index.type()).indexCurrent.dec(); } public Engine.Delete preDelete(Engine.Delete delete) { totalStats.deleteCurrent.inc(); typeStats(delete.type()).deleteCurrent.inc(); for (IndexingOperationListener listener : listeners) { delete = listener.preDelete(delete); } return delete; } public void postDeleteUnderLock(Engine.Delete delete) { for (IndexingOperationListener listener : listeners) { try { listener.postDeleteUnderLock(delete); } catch (Exception e) { logger.warn("post listener [{}] failed", e, listener); } } } public void postDelete(Engine.Delete delete) { long took = delete.endTime() - delete.startTime(); totalStats.deleteMetric.inc(took); totalStats.deleteCurrent.dec(); StatsHolder typeStats = typeStats(delete.type()); typeStats.deleteMetric.inc(took); typeStats.deleteCurrent.dec(); for (IndexingOperationListener listener : listeners) { try { listener.postDelete(delete); } catch (Exception e) { logger.warn("post listener [{}] failed", e, listener); } } } public void failedDelete(Engine.Delete delete) { totalStats.deleteCurrent.dec(); typeStats(delete.type()).deleteCurrent.dec(); } public Engine.DeleteByQuery preDeleteByQuery(Engine.DeleteByQuery deleteByQuery) { for (IndexingOperationListener listener : listeners) { deleteByQuery = listener.preDeleteByQuery(deleteByQuery); } return deleteByQuery; } public void postDeleteByQuery(Engine.DeleteByQuery deleteByQuery) { for (IndexingOperationListener listener : listeners) { listener.postDeleteByQuery(deleteByQuery); } } public void clear() { totalStats.clear(); synchronized (this) { if (!typesStats.isEmpty()) { MapBuilder<String, StatsHolder> typesStatsBuilder = MapBuilder.newMapBuilder(); for (Map.Entry<String, StatsHolder> typeStats : typesStats.entrySet()) { if (typeStats.getValue().totalCurrent() > 0) { typeStats.getValue().clear(); typesStatsBuilder.put(typeStats.getKey(), typeStats.getValue()); } } typesStats = typesStatsBuilder.immutableMap(); } } } private StatsHolder typeStats(String type) { StatsHolder stats = typesStats.get(type); if (stats == null) { synchronized (this) { stats = typesStats.get(type); if (stats == null) { stats = new StatsHolder(); typesStats = MapBuilder.newMapBuilder(typesStats).put(type, stats).immutableMap(); } } } return stats; } static class StatsHolder { public final MeanMetric indexMetric = new MeanMetric(); public final MeanMetric deleteMetric = new MeanMetric(); public final CounterMetric indexCurrent = new CounterMetric(); public final CounterMetric deleteCurrent = new CounterMetric(); public IndexingStats.Stats stats() { return new IndexingStats.Stats( indexMetric.count(), TimeUnit.NANOSECONDS.toMillis(indexMetric.sum()), indexCurrent.count(), deleteMetric.count(), TimeUnit.NANOSECONDS.toMillis(deleteMetric.sum()), deleteCurrent.count()); } public long totalCurrent() { return indexCurrent.count() + deleteMetric.count(); } public void clear() { indexMetric.clear(); deleteMetric.clear(); } } }
1no label
src_main_java_org_elasticsearch_index_indexing_ShardIndexingService.java
2,837
final class PartitionReplicaVersions { final int partitionId; // read and updated only by operation/partition threads final long[] versions = new long[InternalPartition.MAX_BACKUP_COUNT]; PartitionReplicaVersions(int partitionId) { this.partitionId = partitionId; } long[] incrementAndGet(int backupCount) { for (int i = 0; i < backupCount; i++) { versions[i]++; } return versions; } long[] get() { return versions; } boolean update(long[] newVersions, int currentReplica) { int index = currentReplica - 1; long current = versions[index]; long next = newVersions[index]; boolean updated = (current == next - 1); if (updated) { arraycopy(newVersions, 0, versions, 0, newVersions.length); } return updated; } void reset(long[] newVersions) { arraycopy(newVersions, 0, versions, 0, newVersions.length); } void clear() { for (int i = 0; i < versions.length; i++) { versions[i] = 0; } } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("PartitionReplicaVersions"); sb.append("{partitionId=").append(partitionId); sb.append(", versions=").append(Arrays.toString(versions)); sb.append('}'); return sb.toString(); } }
1no label
hazelcast_src_main_java_com_hazelcast_partition_impl_PartitionReplicaVersions.java
334
public class NodesRestartAction extends ClusterAction<NodesRestartRequest, NodesRestartResponse, NodesRestartRequestBuilder> { public static final NodesRestartAction INSTANCE = new NodesRestartAction(); public static final String NAME = "cluster/nodes/restart"; private NodesRestartAction() { super(NAME); } @Override public NodesRestartResponse newResponse() { return new NodesRestartResponse(); } @Override public NodesRestartRequestBuilder newRequestBuilder(ClusterAdminClient client) { return new NodesRestartRequestBuilder(client); } }
0true
src_main_java_org_elasticsearch_action_admin_cluster_node_restart_NodesRestartAction.java
2,041
public class ClearExpiredOperation extends AbstractMapOperation implements PartitionAwareOperation { private List expiredKeyValueSequence; public ClearExpiredOperation(String name) { super(name); } @Override public void run() throws Exception { final PartitionContainer partitionContainer = mapService.getPartitionContainer(getPartitionId()); // this should be existing record store since we don't want to trigger record store creation. final RecordStore recordStore = partitionContainer.getExistingRecordStore(name); if (recordStore == null) { return; } expiredKeyValueSequence = recordStore.findUnlockedExpiredRecords(); } @Override public void afterRun() throws Exception { final List expiredKeyValueSequence = this.expiredKeyValueSequence; if (expiredKeyValueSequence == null || expiredKeyValueSequence.isEmpty()) { return; } final MapService mapService = this.mapService; final String mapName = this.name; final NodeEngine nodeEngine = getNodeEngine(); final Address owner = nodeEngine.getPartitionService().getPartitionOwner(getPartitionId()); final boolean isOwner = nodeEngine.getThisAddress().equals(owner); final int size = expiredKeyValueSequence.size(); for (int i = 0; i < size; i += 2) { Data key = (Data) expiredKeyValueSequence.get(i); Object value = expiredKeyValueSequence.get(i + 1); mapService.interceptAfterRemove(mapName, value); if (mapService.isNearCacheAndInvalidationEnabled(mapName)) { mapService.invalidateAllNearCaches(mapName, key); } if (isOwner) { EvictionHelper.fireEvent(key, value, mapName, mapService); } } } @Override public boolean returnsResponse() { return false; } @Override protected void writeInternal(ObjectDataOutput out) throws IOException { throw new UnsupportedOperationException(); } @Override protected void readInternal(ObjectDataInput in) throws IOException { throw new UnsupportedOperationException(); } @Override public String toString() { return "ClearExpiredOperation{}"; } }
1no label
hazelcast_src_main_java_com_hazelcast_map_operation_ClearExpiredOperation.java
1,045
public class OCommandExecutorSQLInsert extends OCommandExecutorSQLSetAware implements OCommandDistributedReplicateRequest { public static final String KEYWORD_INSERT = "INSERT"; private static final String KEYWORD_VALUES = "VALUES"; private String className = null; private String clusterName = null; private String indexName = null; private List<Map<String, Object>> newRecords; @SuppressWarnings("unchecked") public OCommandExecutorSQLInsert parse(final OCommandRequest iRequest) { final ODatabaseRecord database = getDatabase(); init((OCommandRequestText) iRequest); className = null; newRecords = null; content = null; parserRequiredKeyword("INSERT"); parserRequiredKeyword("INTO"); String subjectName = parserRequiredWord(true, "Invalid subject name. Expected cluster, class or index"); if (subjectName.startsWith(OCommandExecutorSQLAbstract.CLUSTER_PREFIX)) // CLUSTER clusterName = subjectName.substring(OCommandExecutorSQLAbstract.CLUSTER_PREFIX.length()); else if (subjectName.startsWith(OCommandExecutorSQLAbstract.INDEX_PREFIX)) // INDEX indexName = subjectName.substring(OCommandExecutorSQLAbstract.INDEX_PREFIX.length()); else { // CLASS if (subjectName.startsWith(OCommandExecutorSQLAbstract.CLASS_PREFIX)) subjectName = subjectName.substring(OCommandExecutorSQLAbstract.CLASS_PREFIX.length()); final OClass cls = database.getMetadata().getSchema().getClass(subjectName); if (cls == null) throwParsingException("Class " + subjectName + " not found in database"); className = cls.getName(); } parserSkipWhiteSpaces(); if (parserIsEnded()) throwSyntaxErrorException("Set of fields is missed. Example: (name, surname) or SET name = 'Bill'"); final String temp = parseOptionalWord(true); if (temp.equals("CLUSTER")) { clusterName = parserRequiredWord(false); parserSkipWhiteSpaces(); if (parserIsEnded()) throwSyntaxErrorException("Set of fields is missed. Example: (name, surname) or SET name = 'Bill'"); } else parserGoBack(); newRecords = new ArrayList<Map<String, Object>>(); if (parserGetCurrentChar() == '(') { parseValues(); } else { parserNextWord(true, " ,\r\n"); if (parserGetLastWord().equals(KEYWORD_CONTENT)) { newRecords = null; parseContent(); } else if (parserGetLastWord().equals(KEYWORD_SET)) { final LinkedHashMap<String, Object> fields = new LinkedHashMap<String, Object>(); newRecords.add(fields); parseSetFields(fields); } } return this; } protected void parseValues() { final int beginFields = parserGetCurrentPosition(); final int endFields = parserText.indexOf(')', beginFields + 1); if (endFields == -1) throwSyntaxErrorException("Missed closed brace"); final ArrayList<String> fieldNames = new ArrayList<String>(); parserSetCurrentPosition(OStringSerializerHelper.getParameters(parserText, beginFields, endFields, fieldNames)); if (fieldNames.size() == 0) throwSyntaxErrorException("Set of fields is empty. Example: (name, surname)"); // REMOVE QUOTATION MARKS IF ANY for (int i = 0; i < fieldNames.size(); ++i) fieldNames.set(i, OStringSerializerHelper.removeQuotationMarks(fieldNames.get(i))); parserRequiredKeyword(KEYWORD_VALUES); parserSkipWhiteSpaces(); if (parserIsEnded() || parserText.charAt(parserGetCurrentPosition()) != '(') { throwParsingException("Set of values is missed. Example: ('Bill', 'Stuart', 300)"); } int blockStart = parserGetCurrentPosition(); int blockEnd = parserGetCurrentPosition(); final List<String> records = OStringSerializerHelper.smartSplit(parserText, new char[] { ',' }, blockStart, -1, true, true, false); for (String record : records) { final List<String> values = new ArrayList<String>(); blockEnd += OStringSerializerHelper.getParameters(record, 0, -1, values); if (blockEnd == -1) throw new OCommandSQLParsingException("Missed closed brace. Use " + getSyntax(), parserText, blockStart); if (values.isEmpty()) throw new OCommandSQLParsingException("Set of values is empty. Example: ('Bill', 'Stuart', 300). Use " + getSyntax(), parserText, blockStart); if (values.size() != fieldNames.size()) throw new OCommandSQLParsingException("Fields not match with values", parserText, blockStart); // TRANSFORM FIELD VALUES final Map<String, Object> fields = new LinkedHashMap<String, Object>(); for (int i = 0; i < values.size(); ++i) fields.put(fieldNames.get(i), OSQLHelper.parseValue(this, OStringSerializerHelper.decode(values.get(i).trim()), context)); newRecords.add(fields); blockStart = blockEnd; } } /** * Execute the INSERT and return the ODocument object created. */ public Object execute(final Map<Object, Object> iArgs) { if (newRecords == null && content == null) throw new OCommandExecutionException("Cannot execute the command because it has not been parsed yet"); final OCommandParameters commandParameters = new OCommandParameters(iArgs); if (indexName != null) { if (newRecords == null) throw new OCommandExecutionException("No key/value found"); final OIndex<?> index = getDatabase().getMetadata().getIndexManager().getIndex(indexName); if (index == null) throw new OCommandExecutionException("Target index '" + indexName + "' not found"); // BIND VALUES Map<String, Object> result = null; for (Map<String, Object> candidate : newRecords) { index.put(getIndexKeyValue(commandParameters, candidate), getIndexValue(commandParameters, candidate)); result = candidate; } // RETURN LAST ENTRY return new ODocument(result); } else { // CREATE NEW DOCUMENTS final List<ODocument> docs = new ArrayList<ODocument>(); if (newRecords != null) { for (Map<String, Object> candidate : newRecords) { final ODocument doc = className != null ? new ODocument(className) : new ODocument(); OSQLHelper.bindParameters(doc, candidate, commandParameters, context); if (clusterName != null) { doc.save(clusterName); } else { doc.save(); } docs.add(doc); } if (docs.size() == 1) return docs.get(0); else return docs; } else if (content != null) { final ODocument doc = className != null ? new ODocument(className) : new ODocument(); doc.merge(content, true, false); doc.save(); return doc; } } return null; } private Object getIndexKeyValue(OCommandParameters commandParameters, Map<String, Object> candidate) { final Object parsedKey = candidate.get(KEYWORD_KEY); if (parsedKey instanceof OSQLFilterItemField) { final OSQLFilterItemField f = (OSQLFilterItemField) parsedKey; if (f.getRoot().equals("?")) // POSITIONAL PARAMETER return commandParameters.getNext(); else if (f.getRoot().startsWith(":")) // NAMED PARAMETER return commandParameters.getByName(f.getRoot().substring(1)); } return parsedKey; } private OIdentifiable getIndexValue(OCommandParameters commandParameters, Map<String, Object> candidate) { final Object parsedRid = candidate.get(KEYWORD_RID); if (parsedRid instanceof OSQLFilterItemField) { final OSQLFilterItemField f = (OSQLFilterItemField) parsedRid; if (f.getRoot().equals("?")) // POSITIONAL PARAMETER return (OIdentifiable) commandParameters.getNext(); else if (f.getRoot().startsWith(":")) // NAMED PARAMETER return (OIdentifiable) commandParameters.getByName(f.getRoot().substring(1)); } return (OIdentifiable) parsedRid; } public boolean isReplicated() { return indexName != null; } @Override public String getSyntax() { return "INSERT INTO [class:]<class>|cluster:<cluster>|index:<index> [(<field>[,]*) VALUES (<expression>[,]*)[,]*]|[SET <field> = <expression>|<sub-command>[,]*]|CONTENT {<JSON>}"; } }
1no label
core_src_main_java_com_orientechnologies_orient_core_sql_OCommandExecutorSQLInsert.java
497
public class CloseIndexRequestBuilder extends AcknowledgedRequestBuilder<CloseIndexRequest, CloseIndexResponse, CloseIndexRequestBuilder> { public CloseIndexRequestBuilder(IndicesAdminClient indicesClient) { super((InternalIndicesAdminClient) indicesClient, new CloseIndexRequest()); } public CloseIndexRequestBuilder(IndicesAdminClient indicesClient, String... indices) { super((InternalIndicesAdminClient) indicesClient, new CloseIndexRequest(indices)); } /** * Sets the indices to be closed * @param indices the indices to be closed * @return the request itself */ public CloseIndexRequestBuilder setIndices(String... indices) { request.indices(indices); return this; } /** * Specifies what type of requested indices to ignore and wildcard indices expressions * For example indices that don't exist. * * @param indicesOptions the desired behaviour regarding indices to ignore and indices wildcard expressions * @return the request itself */ public CloseIndexRequestBuilder setIndicesOptions(IndicesOptions indicesOptions) { request.indicesOptions(indicesOptions); return this; } @Override protected void doExecute(ActionListener<CloseIndexResponse> listener) { ((IndicesAdminClient) client).close(request, listener); } }
0true
src_main_java_org_elasticsearch_action_admin_indices_close_CloseIndexRequestBuilder.java
1,605
public abstract class OAbstractRemoteTask implements Externalizable { private static final long serialVersionUID = 1L; public enum RESULT_STRATEGY { ANY, MERGE, UNION } public enum QUORUM_TYPE { NONE, READ, WRITE, ALL } protected transient boolean inheritedDatabase; protected transient String nodeSource; /** * Constructor used from unmarshalling. */ public OAbstractRemoteTask() { } public abstract String getName(); public abstract QUORUM_TYPE getQuorumType(); public abstract Object execute(OServer iServer, ODistributedServerManager iManager, ODatabaseDocumentTx database) throws Exception; public long getTimeout() { return OGlobalConfiguration.DISTRIBUTED_CRUD_TASK_SYNCH_TIMEOUT.getValueAsLong(); } public long getSynchronousTimeout(final int iSynchNodes) { return getTimeout() * iSynchNodes; } public long getTotalTimeout(final int iTotalNodes) { return getTimeout() * iTotalNodes; } public RESULT_STRATEGY getResultStrategy() { return RESULT_STRATEGY.ANY; } @Override public String toString() { return getName(); } public String getNodeSource() { return nodeSource; } public void setNodeSource(String nodeSource) { this.nodeSource = nodeSource; } public void undo() { } public boolean isRequireNodeOnline() { return true; } }
1no label
server_src_main_java_com_orientechnologies_orient_server_distributed_task_OAbstractRemoteTask.java
251
public class OLevel2RecordCache extends OAbstractRecordCache { private final String CACHE_HIT; private final String CACHE_MISS; private STRATEGY strategy; public enum STRATEGY { POP_RECORD, COPY_RECORD } public OLevel2RecordCache(final OStorage storage, OCacheLevelTwoLocator cacheLocator) { super(cacheLocator.primaryCache(storage.getName())); profilerPrefix = "db." + storage.getName() + ".cache.level2."; profilerMetadataPrefix = "db.*.cache.level2."; CACHE_HIT = profilerPrefix + "cache.found"; CACHE_MISS = profilerPrefix + "cache.notFound"; strategy = STRATEGY.values()[(CACHE_LEVEL2_STRATEGY.getValueAsInteger())]; } @Override public void startup() { super.startup(); setEnable(OGlobalConfiguration.CACHE_LEVEL2_ENABLED.getValueAsBoolean()); } /** * Push record to cache. Identifier of record used as access key * * @param fresh * new record that should be cached */ public void updateRecord(final ORecordInternal<?> fresh) { if (!isEnabled() || fresh == null || fresh.isDirty() || fresh.getIdentity().isNew() || !fresh.getIdentity().isValid() || fresh.getIdentity().getClusterId() == excludedCluster || fresh.getRecordVersion().isTombstone()) return; if (fresh.isPinned() == null || fresh.isPinned()) { underlying.lock(fresh.getIdentity()); try { final ORecordInternal<?> current = underlying.get(fresh.getIdentity()); if (current != null && current.getRecordVersion().compareTo(fresh.getRecordVersion()) >= 0) return; if (ODatabaseRecordThreadLocal.INSTANCE.isDefined() && !ODatabaseRecordThreadLocal.INSTANCE.get().isClosed()) // CACHE A COPY underlying.put((ORecordInternal<?>) fresh.flatCopy()); else { // CACHE THE DETACHED RECORD fresh.detach(); underlying.put(fresh); } } finally { underlying.unlock(fresh.getIdentity()); } } else underlying.remove(fresh.getIdentity()); } /** * Retrieve the record if any following the supported strategies:<br> * 0 = If found remove it (pop): the client (database instances) will push it back when finished or on close.<br> * 1 = Return the instance but keep a copy in 2-level cache; this could help highly-concurrent environment. * * @param iRID * record identity * @return record if exists in cache, {@code null} otherwise */ protected ORecordInternal<?> retrieveRecord(final ORID iRID) { if (!isEnabled() || iRID.getClusterId() == excludedCluster) return null; ORecordInternal<?> record; underlying.lock(iRID); try { record = underlying.remove(iRID); if (record == null || record.isDirty()) { Orient.instance().getProfiler() .updateCounter(CACHE_MISS, "Record not found in Level2 Cache", +1, "db.*.cache.level2.cache.notFound"); return null; } if (strategy == STRATEGY.COPY_RECORD) { final ORecordInternal<?> resident = OGlobalConfiguration.ENVIRONMENT_CONCURRENT.getValueAsBoolean() ? (ORecordInternal<?>) record .flatCopy() : record; // PUT BACK A COPY OR ThE ORIGINAL IF NOT MULTI-THREADS (THIS UPDATE ALSO THE LRU) underlying.put(resident); } } finally { underlying.unlock(iRID); } Orient.instance().getProfiler().updateCounter(CACHE_HIT, "Record found in Level2 Cache", +1, "db.*.cache.level2.cache.found"); return record; } public void setStrategy(final STRATEGY newStrategy) { strategy = newStrategy; } @Override public String toString() { return "STORAGE level2 cache records = " + getSize() + ", maxSize = " + getMaxSize(); } }
0true
core_src_main_java_com_orientechnologies_orient_core_cache_OLevel2RecordCache.java
163
public abstract class PartitionClientRequest extends ClientRequest { private static final int TRY_COUNT = 100; protected void beforeProcess() { } protected void afterResponse() { } @Override final void process() { beforeProcess(); ClientEndpoint endpoint = getEndpoint(); Operation op = prepareOperation(); op.setCallerUuid(endpoint.getUuid()); InvocationBuilder builder = clientEngine.createInvocationBuilder(getServiceName(), op, getPartition()) .setReplicaIndex(getReplicaIndex()) .setTryCount(TRY_COUNT) .setResultDeserialized(false) .setCallback(new CallbackImpl(endpoint)); builder.invoke(); } protected abstract Operation prepareOperation(); protected abstract int getPartition(); protected int getReplicaIndex() { return 0; } protected Object filter(Object response) { return response; } private class CallbackImpl implements Callback<Object> { private final ClientEndpoint endpoint; public CallbackImpl(ClientEndpoint endpoint) { this.endpoint = endpoint; } @Override public void notify(Object object) { endpoint.sendResponse(filter(object), getCallId()); afterResponse(); } } }
1no label
hazelcast_src_main_java_com_hazelcast_client_PartitionClientRequest.java
47
public class OComparatorFactory { public static final OComparatorFactory INSTANCE = new OComparatorFactory(); private static final boolean unsafeWasDetected; static { boolean unsafeDetected = false; try { Class<?> sunClass = Class.forName("sun.misc.Unsafe"); unsafeDetected = sunClass != null; } catch (ClassNotFoundException cnfe) { // Ignore } unsafeWasDetected = unsafeDetected; } /** * Returns {@link Comparator} instance if applicable one exist or <code>null</code> otherwise. * * @param clazz * Class of object that is going to be compared. * @param <T> * Class of object that is going to be compared. * @return {@link Comparator} instance if applicable one exist or <code>null</code> otherwise. */ @SuppressWarnings("unchecked") public <T> Comparator<T> getComparator(Class<T> clazz) { boolean useUnsafe = Boolean.valueOf(System.getProperty("memory.useUnsafe")); if (clazz.equals(byte[].class)) { if (useUnsafe && unsafeWasDetected) return (Comparator<T>) OUnsafeByteArrayComparator.INSTANCE; return (Comparator<T>) OByteArrayComparator.INSTANCE; } return null; } }
0true
commons_src_main_java_com_orientechnologies_common_comparator_OComparatorFactory.java
4
public interface Action<A> { void accept(A a); }
0true
src_main_java_jsr166e_CompletableFuture.java
5,776
public class MatchedQueriesFetchSubPhase implements FetchSubPhase { @Override public Map<String, ? extends SearchParseElement> parseElements() { return ImmutableMap.of(); } @Override public boolean hitsExecutionNeeded(SearchContext context) { return false; } @Override public void hitsExecute(SearchContext context, InternalSearchHit[] hits) throws ElasticsearchException { } @Override public boolean hitExecutionNeeded(SearchContext context) { return !context.parsedQuery().namedFilters().isEmpty() || (context.parsedPostFilter() !=null && !context.parsedPostFilter().namedFilters().isEmpty()); } @Override public void hitExecute(SearchContext context, HitContext hitContext) throws ElasticsearchException { List<String> matchedQueries = Lists.newArrayListWithCapacity(2); addMatchedQueries(hitContext, context.parsedQuery().namedFilters(), matchedQueries); if (context.parsedPostFilter() != null) { addMatchedQueries(hitContext, context.parsedPostFilter().namedFilters(), matchedQueries); } hitContext.hit().matchedQueries(matchedQueries.toArray(new String[matchedQueries.size()])); } private void addMatchedQueries(HitContext hitContext, ImmutableMap<String, Filter> namedFiltersAndQueries, List<String> matchedQueries) { for (Map.Entry<String, Filter> entry : namedFiltersAndQueries.entrySet()) { String name = entry.getKey(); Filter filter = entry.getValue(); try { DocIdSet docIdSet = filter.getDocIdSet(hitContext.readerContext(), null); // null is fine, since we filter by hitContext.docId() if (!DocIdSets.isEmpty(docIdSet)) { Bits bits = docIdSet.bits(); if (bits != null) { if (bits.get(hitContext.docId())) { matchedQueries.add(name); } } else { DocIdSetIterator iterator = docIdSet.iterator(); if (iterator != null) { if (iterator.advance(hitContext.docId()) == hitContext.docId()) { matchedQueries.add(name); } } } } } catch (IOException e) { // ignore } finally { SearchContext.current().clearReleasables(); } } } }
1no label
src_main_java_org_elasticsearch_search_fetch_matchedqueries_MatchedQueriesFetchSubPhase.java
349
transportService.sendRequest(state.nodes().masterNode(), NodeShutdownRequestHandler.ACTION, new NodeShutdownRequest(request), new EmptyTransportResponseHandler(ThreadPool.Names.SAME) { @Override public void handleResponse(TransportResponse.Empty response) { logger.trace("[cluster_shutdown]: received shutdown response from master"); } @Override public void handleException(TransportException exp) { logger.warn("[cluster_shutdown]: received failed shutdown response master", exp); } });
0true
src_main_java_org_elasticsearch_action_admin_cluster_node_shutdown_TransportNodesShutdownAction.java
195
Analyzer analyzer = new Analyzer() { @Override protected TokenStreamComponents createComponents(String fieldName, Reader reader) { Tokenizer t = new WhitespaceTokenizer(Lucene.VERSION, reader); return new TokenStreamComponents(t, new TruncateTokenFilter(t, 3)); } };
0true
src_test_java_org_apache_lucene_analysis_miscellaneous_TruncateTokenFilterTests.java
105
static class MapInterceptorImpl implements MapInterceptor { MapInterceptorImpl() { } public Object interceptGet(Object value) { if ("value1".equals(value)) { return "getIntercepted"; } return null; } public void afterGet(Object value) { } public Object interceptPut(Object oldValue, Object newValue) { if ("oldValue".equals(oldValue) && "newValue".equals(newValue)) { return "putIntercepted"; } return null; } public void afterPut(Object value) { } public Object interceptRemove(Object removedValue) { if ("value2".equals(removedValue)) { return "removeIntercepted"; } return null; } public void afterRemove(Object value) { } }
0true
hazelcast-client_src_test_java_com_hazelcast_client_ClientIssueTest.java
85
public abstract class OConsoleCommandCollection { protected OConsoleApplication context; void setContext(OConsoleApplication context){ this.context = context; } }
0true
commons_src_main_java_com_orientechnologies_common_console_OConsoleCommandCollection.java
21
static final class ExceptionCompletion<T> extends Completion { final CompletableFuture<? extends T> src; final Fun<? super Throwable, ? extends T> fn; final CompletableFuture<T> dst; ExceptionCompletion(CompletableFuture<? extends T> src, Fun<? super Throwable, ? extends T> fn, CompletableFuture<T> dst) { this.src = src; this.fn = fn; this.dst = dst; } public final void run() { final CompletableFuture<? extends T> a; final Fun<? super Throwable, ? extends T> fn; final CompletableFuture<T> dst; Object r; T t = null; Throwable ex, dx = null; if ((dst = this.dst) != null && (fn = this.fn) != null && (a = this.src) != null && (r = a.result) != null && compareAndSet(0, 1)) { if ((r instanceof AltResult) && (ex = ((AltResult)r).ex) != null) { try { t = fn.apply(ex); } catch (Throwable rex) { dx = rex; } } else { @SuppressWarnings("unchecked") T tr = (T) r; t = tr; } dst.internalComplete(t, dx); } } private static final long serialVersionUID = 5232453952276885070L; }
0true
src_main_java_jsr166e_CompletableFuture.java
35
@Service("blSkuFieldService") public class SkuFieldServiceImpl extends AbstractRuleBuilderFieldService { @Override public void init() { fields.add(new FieldData.Builder() .label("rule_skuName") .name("name") .operators("blcOperators_Text") .options("[]") .type(SupportedFieldType.STRING) .build()); fields.add(new FieldData.Builder() .label("rule_skuFulfillmentType") .name("fulfillmentType") .operators("blcOperators_Enumeration") .options("blcOptions_FulfillmentType") .type(SupportedFieldType.BROADLEAF_ENUMERATION) .build()); fields.add(new FieldData.Builder() .label("rule_skuInventoryType") .name("inventoryType") .operators("blcOperators_Enumeration") .options("blcOptions_InventoryType") .type(SupportedFieldType.BROADLEAF_ENUMERATION) .build()); fields.add(new FieldData.Builder() .label("rule_skuDescription") .name("description") .operators("blcOperators_Text") .options("[]") .type(SupportedFieldType.STRING) .build()); fields.add(new FieldData.Builder() .label("rule_skuLongDescription") .name("longDescription") .operators("blcOperators_Text") .options("[]") .type(SupportedFieldType.STRING) .build()); fields.add(new FieldData.Builder() .label("rule_skuTaxable") .name("taxable") .operators("blcOperators_Boolean") .options("[]") .type(SupportedFieldType.BOOLEAN) .build()); fields.add(new FieldData.Builder() .label("rule_skuAvailable") .name("available") .operators("blcOperators_Boolean") .options("[]") .type(SupportedFieldType.BOOLEAN) .build()); fields.add(new FieldData.Builder() .label("rule_skuStartDate") .name("activeStartDate") .operators("blcOperators_Date") .options("[]") .type(SupportedFieldType.DATE) .build()); fields.add(new FieldData.Builder() .label("rule_skuEndDate") .name("activeEndDate") .operators("blcOperators_Date") .options("[]") .type(SupportedFieldType.DATE) .build()); fields.add(new FieldData.Builder() .label("rule_skuProductUrl") .name("product.url") .operators("blcOperators_Text") .options("[]") .type(SupportedFieldType.STRING) .build()); fields.add(new FieldData.Builder() .label("rule_skuProductIsFeatured") .name("product.isFeaturedProduct") .operators("blcOperators_Boolean") .options("[]") .type(SupportedFieldType.BOOLEAN) .build()); fields.add(new FieldData.Builder() .label("rule_skuProductManufacturer") .name("product.manufacturer") .operators("blcOperators_Text") .options("[]") .type(SupportedFieldType.STRING) .build()); fields.add(new FieldData.Builder() .label("rule_skuProductModel") .name("product.model") .operators("blcOperators_Text") .options("[]") .type(SupportedFieldType.STRING) .build()); } @Override public String getName() { return RuleIdentifier.SKU; } @Override public String getDtoClassName() { return "org.broadleafcommerce.core.catalog.domain.SkuImpl"; } }
0true
admin_broadleaf-admin-module_src_main_java_org_broadleafcommerce_admin_web_rulebuilder_service_SkuFieldServiceImpl.java
3,747
public class WebDataSerializerHook implements DataSerializerHook { public static final int F_ID = FactoryIdHelper.getFactoryId(FactoryIdHelper.WEB_DS_FACTORY, F_ID_OFFSET_WEBMODULE); public static final int SESSION_ATTRIBUTE_ID = 1; @Override public int getFactoryId() { return F_ID; } @Override public DataSerializableFactory createFactory() { return new DataSerializableFactory() { @Override public IdentifiedDataSerializable create(int typeId) { if (typeId == SESSION_ATTRIBUTE_ID) { return new SessionAttributePredicate(); } throw new IllegalArgumentException(); } }; } }
1no label
hazelcast-wm_src_main_java_com_hazelcast_web_WebDataSerializerHook.java
613
indexEngine.getValuesMajor(iRangeFrom, isInclusive, MultiValuesTransformer.INSTANCE, new OIndexEngine.ValuesResultListener() { @Override public boolean addResult(OIdentifiable identifiable) { return valuesResultListener.addResult(identifiable); } });
1no label
core_src_main_java_com_orientechnologies_orient_core_index_OIndexMultiValues.java
1,486
public class IntervalFilterMap { public static final String CLASS = Tokens.makeNamespace(IntervalFilterMap.class) + ".class"; public static final String KEY = Tokens.makeNamespace(IntervalFilterMap.class) + ".key"; public static final String START_VALUE = Tokens.makeNamespace(IntervalFilterMap.class) + ".startValue"; public static final String END_VALUE = Tokens.makeNamespace(IntervalFilterMap.class) + ".endValue"; public static final String VALUE_CLASS = Tokens.makeNamespace(IntervalFilterMap.class) + ".valueClass"; public enum Counters { VERTICES_FILTERED, EDGES_FILTERED } public static Configuration createConfiguration(final Class<? extends Element> klass, final String key, final Object startValue, final Object endValue) { final Configuration configuration = new EmptyConfiguration(); configuration.setClass(CLASS, klass, Element.class); configuration.set(KEY, key); if (startValue instanceof String) { configuration.set(VALUE_CLASS, String.class.getName()); configuration.set(START_VALUE, (String) startValue); configuration.set(END_VALUE, (String) endValue); } else if (startValue instanceof Number) { configuration.set(VALUE_CLASS, Float.class.getName()); configuration.setFloat(START_VALUE, ((Number) startValue).floatValue()); configuration.setFloat(END_VALUE, ((Number) endValue).floatValue()); } else if (startValue instanceof Boolean) { configuration.set(VALUE_CLASS, Boolean.class.getName()); configuration.setBoolean(START_VALUE, (Boolean) startValue); configuration.setBoolean(END_VALUE, (Boolean) endValue); } else { throw new RuntimeException("Unknown value class: " + startValue.getClass().getName()); } return configuration; } public static class Map extends Mapper<NullWritable, FaunusVertex, NullWritable, FaunusVertex> { private boolean isVertex; private ElementChecker startChecker; private ElementChecker endChecker; @Override public void setup(final Mapper.Context context) throws IOException, InterruptedException { this.isVertex = context.getConfiguration().getClass(CLASS, Element.class, Element.class).equals(Vertex.class); final String key = context.getConfiguration().get(KEY); final Class valueClass = context.getConfiguration().getClass(VALUE_CLASS, String.class); final Object startValue; final Object endValue; if (valueClass.equals(String.class)) { startValue = context.getConfiguration().get(START_VALUE); endValue = context.getConfiguration().get(END_VALUE); } else if (Number.class.isAssignableFrom((valueClass))) { startValue = context.getConfiguration().getFloat(START_VALUE, Float.MIN_VALUE); endValue = context.getConfiguration().getFloat(END_VALUE, Float.MAX_VALUE); } else { throw new IOException("Class " + valueClass + " is an unsupported value class"); } this.startChecker = new ElementChecker(key, Compare.GREATER_THAN_EQUAL, startValue); this.endChecker = new ElementChecker(key, Compare.LESS_THAN, endValue); } @Override public void map(final NullWritable key, final FaunusVertex value, final Mapper<NullWritable, FaunusVertex, NullWritable, FaunusVertex>.Context context) throws IOException, InterruptedException { if (this.isVertex) { if (value.hasPaths() && !(this.startChecker.isLegal(value) && this.endChecker.isLegal(value))) { value.clearPaths(); DEFAULT_COMPAT.incrementContextCounter(context, Counters.VERTICES_FILTERED, 1L); } } else { long counter = 0; for (final Edge e : value.getEdges(Direction.BOTH)) { final StandardFaunusEdge edge = (StandardFaunusEdge) e; if (edge.hasPaths() && !(this.startChecker.isLegal(edge) && this.endChecker.isLegal(edge))) { edge.clearPaths(); counter++; } } DEFAULT_COMPAT.incrementContextCounter(context, Counters.EDGES_FILTERED, counter); } context.write(NullWritable.get(), value); } } }
1no label
titan-hadoop-parent_titan-hadoop-core_src_main_java_com_thinkaurelius_titan_hadoop_mapreduce_filter_IntervalFilterMap.java
1,132
public class OSQLMethodAsBoolean extends OAbstractSQLMethod { public static final String NAME = "asboolean"; public OSQLMethodAsBoolean() { super(NAME); } @Override public Object execute(OIdentifiable iCurrentRecord, OCommandContext iContext, Object ioResult, Object[] iMethodParams) { if (ioResult != null) { if (ioResult instanceof String) { ioResult = Boolean.valueOf(((String) ioResult).trim()); } else if (ioResult instanceof Number) { final int bValue = ((Number) ioResult).intValue(); if (bValue == 0) { ioResult = Boolean.FALSE; } else if (bValue == 1) { ioResult = Boolean.TRUE; } else { // IGNORE OTHER VALUES ioResult = null; } } } return ioResult; } }
1no label
core_src_main_java_com_orientechnologies_orient_core_sql_method_misc_OSQLMethodAsBoolean.java
52
public interface RelationType extends TitanVertex, TitanSchemaType { /** * Checks if this relation type is a property key * * @return true, if this relation type is a property key, else false. * @see PropertyKey */ public boolean isPropertyKey(); /** * Checks if this relation type is an edge label * * @return true, if this relation type is a edge label, else false. * @see EdgeLabel */ public boolean isEdgeLabel(); }
0true
titan-core_src_main_java_com_thinkaurelius_titan_core_RelationType.java
1,161
public class OSQLMethodToLowerCase extends OAbstractSQLMethod { public static final String NAME = "tolowercase"; public OSQLMethodToLowerCase() { super(NAME); } @Override public Object execute(OIdentifiable iCurrentRecord, OCommandContext iContext, Object ioResult, Object[] iMethodParams) { ioResult = ioResult != null ? ioResult.toString().toLowerCase() : null; return ioResult; } }
1no label
core_src_main_java_com_orientechnologies_orient_core_sql_method_misc_OSQLMethodToLowerCase.java
507
createIndexService.createIndex(updateRequest, new ClusterStateUpdateListener() { @Override public void onResponse(ClusterStateUpdateResponse response) { listener.onResponse(new CreateIndexResponse(response.isAcknowledged())); } @Override public void onFailure(Throwable t) { if (t instanceof IndexAlreadyExistsException) { logger.trace("[{}] failed to create", t, request.index()); } else { logger.debug("[{}] failed to create", t, request.index()); } listener.onFailure(t); } });
0true
src_main_java_org_elasticsearch_action_admin_indices_create_TransportCreateIndexAction.java
81
{ @Override public Object doWork( Void state ) throws Exception { try ( Transaction tx = db.beginTx() ) { node.getRelationships(); tx.success(); } return null; } };
0true
community_kernel_src_test_java_org_neo4j_kernel_impl_transaction_TestCacheUpdateDeadlock.java
354
public class SingleTableInheritanceInfo { protected String className; protected String discriminatorName; protected DiscriminatorType discriminatorType; protected int discriminatorLength; public String getClassName() { return className; } public void setClassName(String className) { this.className = className; } public String getDiscriminatorName() { return discriminatorName; } public void setDiscriminatorName(String discriminatorName) { this.discriminatorName = discriminatorName; } public DiscriminatorType getDiscriminatorType() { return discriminatorType; } public void setDiscriminatorType(DiscriminatorType discriminatorType) { this.discriminatorType = discriminatorType; } public int getDiscriminatorLength() { return discriminatorLength; } public void setDiscriminatorLength(int discriminatorLength) { this.discriminatorLength = discriminatorLength; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((className == null) ? 0 : className.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; SingleTableInheritanceInfo other = (SingleTableInheritanceInfo) obj; if (className == null) { if (other.className != null) return false; } else if (!className.equals(other.className)) return false; return true; } }
1no label
common_src_main_java_org_broadleafcommerce_common_extensibility_jpa_convert_inheritance_SingleTableInheritanceInfo.java
719
public abstract class CollectionService implements ManagedService, RemoteService, EventPublishingService<CollectionEvent, ItemListener>, TransactionalService, MigrationAwareService { protected NodeEngine nodeEngine; protected CollectionService(NodeEngine nodeEngine) { this.nodeEngine = nodeEngine; } @Override public void init(NodeEngine nodeEngine, Properties properties) { } @Override public void reset() { getContainerMap().clear(); } @Override public void shutdown(boolean terminate) { reset(); } @Override public void destroyDistributedObject(String name) { getContainerMap().remove(name); nodeEngine.getEventService().deregisterAllListeners(getServiceName(), name); } public abstract CollectionContainer getOrCreateContainer(String name, boolean backup); public abstract Map<String, ? extends CollectionContainer> getContainerMap(); public abstract String getServiceName(); @Override public void dispatchEvent(CollectionEvent event, ItemListener listener) { ItemEvent itemEvent = new ItemEvent(event.name, event.eventType, nodeEngine.toObject(event.data), nodeEngine.getClusterService().getMember(event.caller)); if (event.eventType.equals(ItemEventType.ADDED)) { listener.itemAdded(itemEvent); } else { listener.itemRemoved(itemEvent); } } @Override public void rollbackTransaction(String transactionId) { final Set<String> collectionNames = getContainerMap().keySet(); InternalPartitionService partitionService = nodeEngine.getPartitionService(); OperationService operationService = nodeEngine.getOperationService(); for (String name : collectionNames) { int partitionId = partitionService.getPartitionId(StringPartitioningStrategy.getPartitionKey(name)); Operation operation = new CollectionTransactionRollbackOperation(name, transactionId) .setPartitionId(partitionId) .setService(this) .setNodeEngine(nodeEngine); operationService.executeOperation(operation); } } @Override public void beforeMigration(PartitionMigrationEvent event) { } public Map<String, CollectionContainer> getMigrationData(PartitionReplicationEvent event) { Map<String, CollectionContainer> migrationData = new HashMap<String, CollectionContainer>(); InternalPartitionService partitionService = nodeEngine.getPartitionService(); for (Map.Entry<String, ? extends CollectionContainer> entry : getContainerMap().entrySet()) { String name = entry.getKey(); int partitionId = partitionService.getPartitionId(StringPartitioningStrategy.getPartitionKey(name)); CollectionContainer container = entry.getValue(); if (partitionId == event.getPartitionId() && container.getConfig().getTotalBackupCount() >= event.getReplicaIndex()) { migrationData.put(name, container); } } return migrationData; } @Override public void commitMigration(PartitionMigrationEvent event) { if (event.getMigrationEndpoint() == MigrationEndpoint.SOURCE) { clearMigrationData(event.getPartitionId()); } } @Override public void rollbackMigration(PartitionMigrationEvent event) { if (event.getMigrationEndpoint() == MigrationEndpoint.DESTINATION) { clearMigrationData(event.getPartitionId()); } } @Override public void clearPartitionReplica(int partitionId) { clearMigrationData(partitionId); } private void clearMigrationData(int partitionId) { final Set<? extends Map.Entry<String, ? extends CollectionContainer>> entrySet = getContainerMap().entrySet(); final Iterator<? extends Map.Entry<String, ? extends CollectionContainer>> iterator = entrySet.iterator(); InternalPartitionService partitionService = nodeEngine.getPartitionService(); while (iterator.hasNext()) { final Map.Entry<String, ? extends CollectionContainer> entry = iterator.next(); final String name = entry.getKey(); final CollectionContainer container = entry.getValue(); int containerPartitionId = partitionService.getPartitionId(StringPartitioningStrategy.getPartitionKey(name)); if (containerPartitionId == partitionId) { container.destroy(); iterator.remove(); } } } public void addContainer(String name, CollectionContainer container) { final Map map = getContainerMap(); map.put(name, container); } }
1no label
hazelcast_src_main_java_com_hazelcast_collection_CollectionService.java
3,988
public class FunctionScoreQueryBuilder extends BaseQueryBuilder implements BoostableQueryBuilder<FunctionScoreQueryBuilder> { private final QueryBuilder queryBuilder; private final FilterBuilder filterBuilder; private Float boost; private Float maxBoost; private String scoreMode; private String boostMode; private ArrayList<FilterBuilder> filters = new ArrayList<FilterBuilder>(); private ArrayList<ScoreFunctionBuilder> scoreFunctions = new ArrayList<ScoreFunctionBuilder>(); public FunctionScoreQueryBuilder(QueryBuilder queryBuilder) { this.queryBuilder = queryBuilder; this.filterBuilder = null; } public FunctionScoreQueryBuilder(FilterBuilder filterBuilder) { this.filterBuilder = filterBuilder; this.queryBuilder = null; } public FunctionScoreQueryBuilder() { this.filterBuilder = null; this.queryBuilder = null; } public FunctionScoreQueryBuilder(ScoreFunctionBuilder scoreFunctionBuilder) { queryBuilder = null; filterBuilder = null; this.filters.add(null); this.scoreFunctions.add(scoreFunctionBuilder); } public FunctionScoreQueryBuilder add(FilterBuilder filter, ScoreFunctionBuilder scoreFunctionBuilder) { this.filters.add(filter); this.scoreFunctions.add(scoreFunctionBuilder); return this; } public FunctionScoreQueryBuilder add(ScoreFunctionBuilder scoreFunctionBuilder) { this.filters.add(null); this.scoreFunctions.add(scoreFunctionBuilder); return this; } public FunctionScoreQueryBuilder scoreMode(String scoreMode) { this.scoreMode = scoreMode; return this; } public FunctionScoreQueryBuilder boostMode(String boostMode) { this.boostMode = boostMode; return this; } public FunctionScoreQueryBuilder boostMode(CombineFunction combineFunction) { this.boostMode = combineFunction.getName(); return this; } public FunctionScoreQueryBuilder maxBoost(float maxBoost) { this.maxBoost = maxBoost; return this; } /** * Sets the boost for this query. Documents matching this query will (in * addition to the normal weightings) have their score multiplied by the * boost provided. */ public FunctionScoreQueryBuilder boost(float boost) { this.boost = boost; return this; } @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(FunctionScoreQueryParser.NAME); if (queryBuilder != null) { builder.field("query"); queryBuilder.toXContent(builder, params); } else if (filterBuilder != null) { builder.field("filter"); filterBuilder.toXContent(builder, params); } // If there is only one function without a filter, we later want to // create a FunctionScoreQuery. // For this, we only build the scoreFunction.Tthis will be translated to // FunctionScoreQuery in the parser. if (filters.size() == 1 && filters.get(0) == null) { scoreFunctions.get(0).toXContent(builder, params); } else { // in all other cases we build the format needed for a // FiltersFunctionScoreQuery builder.startArray("functions"); for (int i = 0; i < filters.size(); i++) { builder.startObject(); if (filters.get(i) != null) { builder.field("filter"); filters.get(i).toXContent(builder, params); } scoreFunctions.get(i).toXContent(builder, params); builder.endObject(); } builder.endArray(); } if (scoreMode != null) { builder.field("score_mode", scoreMode); } if (boostMode != null) { builder.field("boost_mode", boostMode); } if (maxBoost != null) { builder.field("max_boost", maxBoost); } if (boost != null) { builder.field("boost", boost); } builder.endObject(); } }
1no label
src_main_java_org_elasticsearch_index_query_functionscore_FunctionScoreQueryBuilder.java
6,115
clusterService.submitStateUpdateTask("update snapshot state after node removal", new ClusterStateUpdateTask() { @Override public ClusterState execute(ClusterState currentState) throws Exception { DiscoveryNodes nodes = currentState.nodes(); MetaData metaData = currentState.metaData(); MetaData.Builder mdBuilder = MetaData.builder(currentState.metaData()); SnapshotMetaData snapshots = metaData.custom(SnapshotMetaData.TYPE); if (snapshots == null) { return currentState; } boolean changed = false; ArrayList<SnapshotMetaData.Entry> entries = newArrayList(); for (final SnapshotMetaData.Entry snapshot : snapshots.entries()) { SnapshotMetaData.Entry updatedSnapshot = snapshot; boolean snapshotChanged = false; if (snapshot.state() == State.STARTED) { ImmutableMap.Builder<ShardId, ShardSnapshotStatus> shards = ImmutableMap.builder(); for (ImmutableMap.Entry<ShardId, ShardSnapshotStatus> shardEntry : snapshot.shards().entrySet()) { ShardSnapshotStatus shardStatus = shardEntry.getValue(); if (!shardStatus.state().completed() && shardStatus.nodeId() != null) { if (nodes.nodeExists(shardStatus.nodeId())) { shards.put(shardEntry); } else { // TODO: Restart snapshot on another node? snapshotChanged = true; logger.warn("failing snapshot of shard [{}] on closed node [{}]", shardEntry.getKey(), shardStatus.nodeId()); shards.put(shardEntry.getKey(), new ShardSnapshotStatus(shardStatus.nodeId(), State.FAILED, "node shutdown")); } } } if (snapshotChanged) { changed = true; ImmutableMap<ShardId, ShardSnapshotStatus> shardsMap = shards.build(); if (!snapshot.state().completed() && completed(shardsMap.values())) { updatedSnapshot = new SnapshotMetaData.Entry(snapshot.snapshotId(), snapshot.includeGlobalState(), State.SUCCESS, snapshot.indices(), shardsMap); endSnapshot(updatedSnapshot); } else { updatedSnapshot = new SnapshotMetaData.Entry(snapshot.snapshotId(), snapshot.includeGlobalState(), snapshot.state(), snapshot.indices(), shardsMap); } } entries.add(updatedSnapshot); } else if (snapshot.state() == State.INIT && newMaster) { // Clean up the snapshot that failed to start from the old master deleteSnapshot(snapshot.snapshotId(), new DeleteSnapshotListener() { @Override public void onResponse() { logger.debug("cleaned up abandoned snapshot {} in INIT state", snapshot.snapshotId()); } @Override public void onFailure(Throwable t) { logger.warn("failed to clean up abandoned snapshot {} in INIT state", snapshot.snapshotId()); } }); } else if (snapshot.state() == State.SUCCESS && newMaster) { // Finalize the snapshot endSnapshot(snapshot); } } if (changed) { snapshots = new SnapshotMetaData(entries.toArray(new SnapshotMetaData.Entry[entries.size()])); mdBuilder.putCustom(SnapshotMetaData.TYPE, snapshots); return ClusterState.builder(currentState).metaData(mdBuilder).build(); } return currentState; } @Override public void onFailure(String source, Throwable t) { logger.warn("failed to update snapshot state after node removal"); } });
1no label
src_main_java_org_elasticsearch_snapshots_SnapshotsService.java
47
public enum Order { /** * Increasing */ ASC, /** * Decreasing */ DESC; /** * Modulates the result of a {@link Comparable#compareTo(Object)} execution for this specific * order, i.e. it negates the result if the order is {@link #DESC}. * * @param compare * @return */ public int modulateNaturalOrder(int compare) { switch (this) { case ASC: return compare; case DESC: return -compare; default: throw new AssertionError("Unrecognized order: " + this); } } /** * The default order when none is specified */ public static final Order DEFAULT = ASC; }
0true
titan-core_src_main_java_com_thinkaurelius_titan_core_Order.java
733
private static final class PagePathItemUnit { private final long pageIndex; private final int itemIndex; private PagePathItemUnit(long pageIndex, int itemIndex) { this.pageIndex = pageIndex; this.itemIndex = itemIndex; } }
1no label
core_src_main_java_com_orientechnologies_orient_core_index_sbtree_local_OSBTree.java
360
public static class ExceptionThrowingMapper implements Mapper<Integer, Integer, String, Integer> { @Override public void map(Integer key, Integer value, Context<String, Integer> context) { throw new NullPointerException("BUMM!"); } }
0true
hazelcast-client_src_test_java_com_hazelcast_client_mapreduce_ClientMapReduceTest.java
3,017
public class ResidentQueryParserCache extends AbstractIndexComponent implements QueryParserCache { private final Cache<QueryParserSettings, Query> cache; private volatile int maxSize; private volatile TimeValue expire; @Inject public ResidentQueryParserCache(Index index, @IndexSettings Settings indexSettings) { super(index, indexSettings); this.maxSize = componentSettings.getAsInt("max_size", 100); this.expire = componentSettings.getAsTime("expire", null); logger.debug("using [resident] query cache with max_size [{}], expire [{}]", maxSize, expire); CacheBuilder cacheBuilder = CacheBuilder.newBuilder().maximumSize(maxSize); if (expire != null) { cacheBuilder.expireAfterAccess(expire.nanos(), TimeUnit.NANOSECONDS); } this.cache = cacheBuilder.build(); } @Override public Query get(QueryParserSettings queryString) { return cache.getIfPresent(queryString); } @Override public void put(QueryParserSettings queryString, Query query) { if (queryString.isCacheable()) { cache.put(queryString, query); } } @Override public void clear() { cache.invalidateAll(); } @Override public void close() throws ElasticsearchException { cache.invalidateAll(); } }
1no label
src_main_java_org_elasticsearch_index_cache_query_parser_resident_ResidentQueryParserCache.java