gt
stringclasses
1 value
context
stringlengths
2.05k
161k
// Copyright 2018 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.location.suplclient.asn1.supl2.ver2_ulp_components; // Copyright 2008 Google Inc. All Rights Reserved. /* * This class is AUTOMATICALLY GENERATED. Do NOT EDIT. */ // // import com.google.location.suplclient.asn1.base.Asn1Integer; import com.google.location.suplclient.asn1.base.Asn1Object; import com.google.location.suplclient.asn1.base.Asn1Sequence; import com.google.location.suplclient.asn1.base.Asn1Tag; import com.google.location.suplclient.asn1.base.BitStream; import com.google.location.suplclient.asn1.base.BitStreamReader; import com.google.location.suplclient.asn1.base.SequenceComponent; import com.google.common.collect.ImmutableList; import java.util.Collection; import javax.annotation.Nullable; /** * */ public class LteCellInformation extends Asn1Sequence { // private static final Asn1Tag TAG_LteCellInformation = Asn1Tag.fromClassAndNumber(-1, -1); public LteCellInformation() { super(); } @Override @Nullable protected Asn1Tag getTag() { return TAG_LteCellInformation; } @Override protected boolean isTagImplicit() { return true; } public static Collection<Asn1Tag> getPossibleFirstTags() { if (TAG_LteCellInformation != null) { return ImmutableList.of(TAG_LteCellInformation); } else { return Asn1Sequence.getPossibleFirstTags(); } } /** * Creates a new LteCellInformation from encoded stream. */ public static LteCellInformation fromPerUnaligned(byte[] encodedBytes) { LteCellInformation result = new LteCellInformation(); result.decodePerUnaligned(new BitStreamReader(encodedBytes)); return result; } /** * Creates a new LteCellInformation from encoded stream. */ public static LteCellInformation fromPerAligned(byte[] encodedBytes) { LteCellInformation result = new LteCellInformation(); result.decodePerAligned(new BitStreamReader(encodedBytes)); return result; } @Override protected boolean isExtensible() { return true; } @Override public boolean containsExtensionValues() { for (SequenceComponent extensionComponent : getExtensionComponents()) { if (extensionComponent.isExplicitlySet()) return true; } return false; } private CellGlobalIdEUTRA cellGlobalIdEUTRA_; public CellGlobalIdEUTRA getCellGlobalIdEUTRA() { return cellGlobalIdEUTRA_; } /** * @throws ClassCastException if value is not a CellGlobalIdEUTRA */ public void setCellGlobalIdEUTRA(Asn1Object value) { this.cellGlobalIdEUTRA_ = (CellGlobalIdEUTRA) value; } public CellGlobalIdEUTRA setCellGlobalIdEUTRAToNewInstance() { cellGlobalIdEUTRA_ = new CellGlobalIdEUTRA(); return cellGlobalIdEUTRA_; } private PhysCellId physCellId_; public PhysCellId getPhysCellId() { return physCellId_; } /** * @throws ClassCastException if value is not a PhysCellId */ public void setPhysCellId(Asn1Object value) { this.physCellId_ = (PhysCellId) value; } public PhysCellId setPhysCellIdToNewInstance() { physCellId_ = new PhysCellId(); return physCellId_; } private TrackingAreaCode trackingAreaCode_; public TrackingAreaCode getTrackingAreaCode() { return trackingAreaCode_; } /** * @throws ClassCastException if value is not a TrackingAreaCode */ public void setTrackingAreaCode(Asn1Object value) { this.trackingAreaCode_ = (TrackingAreaCode) value; } public TrackingAreaCode setTrackingAreaCodeToNewInstance() { trackingAreaCode_ = new TrackingAreaCode(); return trackingAreaCode_; } private RSRP_Range rsrpResult_; public RSRP_Range getRsrpResult() { return rsrpResult_; } /** * @throws ClassCastException if value is not a RSRP_Range */ public void setRsrpResult(Asn1Object value) { this.rsrpResult_ = (RSRP_Range) value; } public RSRP_Range setRsrpResultToNewInstance() { rsrpResult_ = new RSRP_Range(); return rsrpResult_; } private RSRQ_Range rsrqResult_; public RSRQ_Range getRsrqResult() { return rsrqResult_; } /** * @throws ClassCastException if value is not a RSRQ_Range */ public void setRsrqResult(Asn1Object value) { this.rsrqResult_ = (RSRQ_Range) value; } public RSRQ_Range setRsrqResultToNewInstance() { rsrqResult_ = new RSRQ_Range(); return rsrqResult_; } private LteCellInformation.tAType tA_; public LteCellInformation.tAType getTA() { return tA_; } /** * @throws ClassCastException if value is not a LteCellInformation.tAType */ public void setTA(Asn1Object value) { this.tA_ = (LteCellInformation.tAType) value; } public LteCellInformation.tAType setTAToNewInstance() { tA_ = new LteCellInformation.tAType(); return tA_; } private MeasResultListEUTRA measResultListEUTRA_; public MeasResultListEUTRA getMeasResultListEUTRA() { return measResultListEUTRA_; } /** * @throws ClassCastException if value is not a MeasResultListEUTRA */ public void setMeasResultListEUTRA(Asn1Object value) { this.measResultListEUTRA_ = (MeasResultListEUTRA) value; } public MeasResultListEUTRA setMeasResultListEUTRAToNewInstance() { measResultListEUTRA_ = new MeasResultListEUTRA(); return measResultListEUTRA_; } @Override public Iterable<? extends SequenceComponent> getComponents() { ImmutableList.Builder<SequenceComponent> builder = ImmutableList.builder(); builder.add(new SequenceComponent() { Asn1Tag tag = Asn1Tag.fromClassAndNumber(2, 0); @Override public boolean isExplicitlySet() { return getCellGlobalIdEUTRA() != null; } @Override public boolean hasDefaultValue() { return false; } @Override public boolean isOptional() { return false; } @Override public Asn1Object getComponentValue() { return getCellGlobalIdEUTRA(); } @Override public void setToNewInstance() { setCellGlobalIdEUTRAToNewInstance(); } @Override public Collection<Asn1Tag> getPossibleFirstTags() { return tag == null ? CellGlobalIdEUTRA.getPossibleFirstTags() : ImmutableList.of(tag); } @Override public Asn1Tag getTag() { return tag; } @Override public boolean isImplicitTagging() { return true; } @Override public String toIndentedString(String indent) { return "cellGlobalIdEUTRA : " + getCellGlobalIdEUTRA().toIndentedString(indent); } }); builder.add(new SequenceComponent() { Asn1Tag tag = Asn1Tag.fromClassAndNumber(2, 1); @Override public boolean isExplicitlySet() { return getPhysCellId() != null; } @Override public boolean hasDefaultValue() { return false; } @Override public boolean isOptional() { return false; } @Override public Asn1Object getComponentValue() { return getPhysCellId(); } @Override public void setToNewInstance() { setPhysCellIdToNewInstance(); } @Override public Collection<Asn1Tag> getPossibleFirstTags() { return tag == null ? PhysCellId.getPossibleFirstTags() : ImmutableList.of(tag); } @Override public Asn1Tag getTag() { return tag; } @Override public boolean isImplicitTagging() { return true; } @Override public String toIndentedString(String indent) { return "physCellId : " + getPhysCellId().toIndentedString(indent); } }); builder.add(new SequenceComponent() { Asn1Tag tag = Asn1Tag.fromClassAndNumber(2, 2); @Override public boolean isExplicitlySet() { return getTrackingAreaCode() != null; } @Override public boolean hasDefaultValue() { return false; } @Override public boolean isOptional() { return false; } @Override public Asn1Object getComponentValue() { return getTrackingAreaCode(); } @Override public void setToNewInstance() { setTrackingAreaCodeToNewInstance(); } @Override public Collection<Asn1Tag> getPossibleFirstTags() { return tag == null ? TrackingAreaCode.getPossibleFirstTags() : ImmutableList.of(tag); } @Override public Asn1Tag getTag() { return tag; } @Override public boolean isImplicitTagging() { return true; } @Override public String toIndentedString(String indent) { return "trackingAreaCode : " + getTrackingAreaCode().toIndentedString(indent); } }); builder.add(new SequenceComponent() { Asn1Tag tag = Asn1Tag.fromClassAndNumber(2, 3); @Override public boolean isExplicitlySet() { return getRsrpResult() != null; } @Override public boolean hasDefaultValue() { return false; } @Override public boolean isOptional() { return true; } @Override public Asn1Object getComponentValue() { return getRsrpResult(); } @Override public void setToNewInstance() { setRsrpResultToNewInstance(); } @Override public Collection<Asn1Tag> getPossibleFirstTags() { return tag == null ? RSRP_Range.getPossibleFirstTags() : ImmutableList.of(tag); } @Override public Asn1Tag getTag() { return tag; } @Override public boolean isImplicitTagging() { return true; } @Override public String toIndentedString(String indent) { return "rsrpResult : " + getRsrpResult().toIndentedString(indent); } }); builder.add(new SequenceComponent() { Asn1Tag tag = Asn1Tag.fromClassAndNumber(2, 4); @Override public boolean isExplicitlySet() { return getRsrqResult() != null; } @Override public boolean hasDefaultValue() { return false; } @Override public boolean isOptional() { return true; } @Override public Asn1Object getComponentValue() { return getRsrqResult(); } @Override public void setToNewInstance() { setRsrqResultToNewInstance(); } @Override public Collection<Asn1Tag> getPossibleFirstTags() { return tag == null ? RSRQ_Range.getPossibleFirstTags() : ImmutableList.of(tag); } @Override public Asn1Tag getTag() { return tag; } @Override public boolean isImplicitTagging() { return true; } @Override public String toIndentedString(String indent) { return "rsrqResult : " + getRsrqResult().toIndentedString(indent); } }); builder.add(new SequenceComponent() { Asn1Tag tag = Asn1Tag.fromClassAndNumber(2, 5); @Override public boolean isExplicitlySet() { return getTA() != null; } @Override public boolean hasDefaultValue() { return false; } @Override public boolean isOptional() { return true; } @Override public Asn1Object getComponentValue() { return getTA(); } @Override public void setToNewInstance() { setTAToNewInstance(); } @Override public Collection<Asn1Tag> getPossibleFirstTags() { return tag == null ? LteCellInformation.tAType.getPossibleFirstTags() : ImmutableList.of(tag); } @Override public Asn1Tag getTag() { return tag; } @Override public boolean isImplicitTagging() { return true; } @Override public String toIndentedString(String indent) { return "tA : " + getTA().toIndentedString(indent); } }); builder.add(new SequenceComponent() { Asn1Tag tag = Asn1Tag.fromClassAndNumber(2, 6); @Override public boolean isExplicitlySet() { return getMeasResultListEUTRA() != null; } @Override public boolean hasDefaultValue() { return false; } @Override public boolean isOptional() { return true; } @Override public Asn1Object getComponentValue() { return getMeasResultListEUTRA(); } @Override public void setToNewInstance() { setMeasResultListEUTRAToNewInstance(); } @Override public Collection<Asn1Tag> getPossibleFirstTags() { return tag == null ? MeasResultListEUTRA.getPossibleFirstTags() : ImmutableList.of(tag); } @Override public Asn1Tag getTag() { return tag; } @Override public boolean isImplicitTagging() { return true; } @Override public String toIndentedString(String indent) { return "measResultListEUTRA : " + getMeasResultListEUTRA().toIndentedString(indent); } }); return builder.build(); } @Override public Iterable<? extends SequenceComponent> getExtensionComponents() { ImmutableList.Builder<SequenceComponent> builder = ImmutableList.builder(); return builder.build(); } // Copyright 2008 Google Inc. All Rights Reserved. /* * This class is AUTOMATICALLY GENERATED. Do NOT EDIT. */ // /** * */ public static class tAType extends Asn1Integer { // private static final Asn1Tag TAG_tAType = Asn1Tag.fromClassAndNumber(-1, -1); public tAType() { super(); setValueRange(new java.math.BigInteger("0"), new java.math.BigInteger("1282")); } @Override @Nullable protected Asn1Tag getTag() { return TAG_tAType; } @Override protected boolean isTagImplicit() { return true; } public static Collection<Asn1Tag> getPossibleFirstTags() { if (TAG_tAType != null) { return ImmutableList.of(TAG_tAType); } else { return Asn1Integer.getPossibleFirstTags(); } } /** * Creates a new tAType from encoded stream. */ public static tAType fromPerUnaligned(byte[] encodedBytes) { tAType result = new tAType(); result.decodePerUnaligned(new BitStreamReader(encodedBytes)); return result; } /** * Creates a new tAType from encoded stream. */ public static tAType fromPerAligned(byte[] encodedBytes) { tAType result = new tAType(); result.decodePerAligned(new BitStreamReader(encodedBytes)); return result; } @Override public Iterable<BitStream> encodePerUnaligned() { return super.encodePerUnaligned(); } @Override public Iterable<BitStream> encodePerAligned() { return super.encodePerAligned(); } @Override public void decodePerUnaligned(BitStreamReader reader) { super.decodePerUnaligned(reader); } @Override public void decodePerAligned(BitStreamReader reader) { super.decodePerAligned(reader); } @Override public String toString() { return toIndentedString(""); } public String toIndentedString(String indent) { return "tAType = " + getInteger() + ";\n"; } } @Override public Iterable<BitStream> encodePerUnaligned() { return super.encodePerUnaligned(); } @Override public Iterable<BitStream> encodePerAligned() { return super.encodePerAligned(); } @Override public void decodePerUnaligned(BitStreamReader reader) { super.decodePerUnaligned(reader); } @Override public void decodePerAligned(BitStreamReader reader) { super.decodePerAligned(reader); } @Override public String toString() { return toIndentedString(""); } public String toIndentedString(String indent) { StringBuilder builder = new StringBuilder(); builder.append("LteCellInformation = {\n"); final String internalIndent = indent + " "; for (SequenceComponent component : getComponents()) { if (component.isExplicitlySet()) { builder.append(internalIndent) .append(component.toIndentedString(internalIndent)); } } if (isExtensible()) { builder.append(internalIndent).append("...\n"); for (SequenceComponent component : getExtensionComponents()) { if (component.isExplicitlySet()) { builder.append(internalIndent) .append(component.toIndentedString(internalIndent)); } } } builder.append(indent).append("};\n"); return builder.toString(); } }
// Copyright 2006 The Bazel Authors. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.syntax; import static com.google.common.truth.Truth.assertThat; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.fail; import com.google.common.collect.Sets; import com.google.devtools.build.lib.events.Location; import com.google.devtools.build.lib.syntax.util.EvaluationTestCase; import com.google.devtools.build.lib.vfs.PathFragment; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** * Tests of Environment. */ @RunWith(JUnit4.class) public class EnvironmentTest extends EvaluationTestCase { @Override public Environment newEnvironment() { return newBuildEnvironment(); } // Test the API directly @Test public void testLookupAndUpdate() throws Exception { assertNull(lookup("foo")); update("foo", "bar"); assertEquals("bar", lookup("foo")); } @Test public void testHasVariable() throws Exception { assertThat(getEnvironment().hasVariable("VERSION")).isFalse(); update("VERSION", 42); assertThat(getEnvironment().hasVariable("VERSION")).isTrue(); } @Test public void testDoubleUpdateSucceeds() throws Exception { update("VERSION", 42); assertEquals(42, lookup("VERSION")); update("VERSION", 43); assertEquals(43, lookup("VERSION")); } // Test assign through interpreter, lookup through API: @Test public void testAssign() throws Exception { assertNull(lookup("foo")); eval("foo = 'bar'"); assertEquals("bar", lookup("foo")); } // Test update through API, reference through interpreter: @Test public void testReference() throws Exception { setFailFast(false); try { eval("foo"); fail(); } catch (EvalException e) { assertThat(e).hasMessage("name 'foo' is not defined"); } update("foo", "bar"); assertEquals("bar", eval("foo")); } // Test assign and reference through interpreter: @Test public void testAssignAndReference() throws Exception { setFailFast(false); try { eval("foo"); fail(); } catch (EvalException e) { assertThat(e).hasMessage("name 'foo' is not defined"); } eval("foo = 'bar'"); assertEquals("bar", eval("foo")); } @Test public void testGetVariableNames() throws Exception { Environment outerEnv; Environment innerEnv; try (Mutability mut = Mutability.create("outer")) { outerEnv = Environment.builder(mut) .setGlobals(Environment.DEFAULT_GLOBALS) .build() .update("foo", "bar") .update("wiz", 3); } try (Mutability mut = Mutability.create("inner")) { innerEnv = Environment.builder(mut) .setGlobals(outerEnv.getGlobals()).build() .update("foo", "bat") .update("quux", 42); } assertEquals( Sets.newHashSet( "foo", "wiz", "False", "None", "True", "-", "all", "any", "bool", "dict", "dir", "enumerate", "fail", "getattr", "hasattr", "hash", "int", "len", "list", "max", "min", "print", "range", "repr", "reversed", "sorted", "str", "tuple", "zip"), outerEnv.getVariableNames()); assertEquals( Sets.newHashSet( "foo", "wiz", "quux", "False", "None", "True", "-", "all", "any", "bool", "dict", "dir", "enumerate", "fail", "getattr", "hasattr", "hash", "int", "len", "list", "max", "min", "print", "range", "repr", "reversed", "sorted", "str", "tuple", "zip"), innerEnv.getVariableNames()); } @Test public void testToString() throws Exception { update("subject", new StringLiteral("Hello, 'world'.", '\'')); update("from", new StringLiteral("Java", '"')); assertThat(getEnvironment().toString()).isEqualTo("<Environment[test]>"); } @Test public void testBindToNullThrowsException() throws Exception { try { update("some_name", null); fail(); } catch (NullPointerException e) { assertThat(e).hasMessage("update(value == null)"); } } @Test public void testFrozen() throws Exception { Environment env; try (Mutability mutability = Mutability.create("testFrozen")) { env = Environment.builder(mutability) .setGlobals(Environment.DEFAULT_GLOBALS) .setEventHandler(Environment.FAIL_FAST_HANDLER) .build(); env.update("x", 1); assertEquals(env.lookup("x"), 1); env.update("y", 2); assertEquals(env.lookup("y"), 2); assertEquals(env.lookup("x"), 1); env.update("x", 3); assertEquals(env.lookup("x"), 3); } try { // This update to an existing variable should fail because the environment was frozen. env.update("x", 4); throw new Exception("failed to fail"); // not an AssertionError like fail() } catch (AssertionError e) { assertThat(e).hasMessage("Can't update x to 4 in frozen environment"); } try { // This update to a new variable should also fail because the environment was frozen. env.update("newvar", 5); throw new Exception("failed to fail"); // not an AssertionError like fail() } catch (AssertionError e) { assertThat(e).hasMessage("Can't update newvar to 5 in frozen environment"); } } @Test public void testLocked() throws Exception { final Mutability mutability = Mutability.create("testLocked"); class DummyFreezable implements Mutability.Freezable { @Override public Mutability mutability() { return mutability; } } DummyFreezable dummy = new DummyFreezable(); Location locA = Location.fromPathFragment(new PathFragment("/a")); Location locB = Location.fromPathFragment(new PathFragment("/b")); Environment env = Environment.builder(mutability).build(); // Acquire two locks, release two locks, check along the way. assertThat(mutability.isLocked(dummy)).isFalse(); mutability.lock(dummy, locA); assertThat(mutability.isLocked(dummy)).isTrue(); mutability.lock(dummy, locB); assertThat(mutability.isLocked(dummy)).isTrue(); assertThat(mutability.getLockLocations(dummy)).containsExactly(locA, locB); mutability.unlock(dummy, locA); assertThat(mutability.isLocked(dummy)).isTrue(); try { Mutability.checkMutable(dummy, env); fail("Able to mutate locked object"); } catch (Mutability.MutabilityException e) { assertThat(e).hasMessage("trying to mutate a locked object (is it currently being iterated " + "over by a for loop or comprehension?)\n" + "Object locked at the following location(s): /b:1"); } try { mutability.unlock(dummy, locA); fail("Able to unlock object with wrong location"); } catch (AssertionError e) { assertThat(e).hasMessage("trying to unlock an object for a location at which " + "it was not locked (/a:1)"); } mutability.unlock(dummy, locB); assertThat(mutability.isLocked(dummy)).isFalse(); Mutability.checkMutable(dummy, env); // Acquire, then freeze. mutability.lock(dummy, locA); mutability.freeze(); assertThat(mutability.isLocked(dummy)).isFalse(); try { Mutability.checkMutable(dummy, env); fail("Able to mutate locked object"); } catch (Mutability.MutabilityException e) { assertThat(e).hasMessage("trying to mutate a frozen object"); } } @Test public void testReadOnly() throws Exception { Environment env = newSkylarkEnvironment() .setup("special_var", 42) .update("global_var", 666); // We don't even get a runtime exception trying to modify these, // because we get compile-time exceptions even before we reach runtime! try { BuildFileAST.eval(env, "special_var = 41"); throw new AssertionError("failed to fail"); } catch (EvalException e) { assertThat(e.getMessage()).contains("Variable special_var is read only"); } try { BuildFileAST.eval(env, "def foo(x): x += global_var; global_var = 36; return x", "foo(1)"); throw new AssertionError("failed to fail"); } catch (EvalExceptionWithStackTrace e) { assertThat(e.getMessage()).contains("Variable 'global_var' is referenced before assignment. " + "The variable is defined in the global scope."); } } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.sqs; import javax.annotation.Generated; import com.amazonaws.services.sqs.model.*; /** * Abstract implementation of {@code AmazonSQSAsync}. Convenient method forms pass through to the corresponding overload * that takes a request object and an {@code AsyncHandler}, which throws an {@code UnsupportedOperationException}. */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class AbstractAmazonSQSAsync extends AbstractAmazonSQS implements AmazonSQSAsync { protected AbstractAmazonSQSAsync() { } @Override public java.util.concurrent.Future<AddPermissionResult> addPermissionAsync(AddPermissionRequest request) { return addPermissionAsync(request, null); } @Override public java.util.concurrent.Future<AddPermissionResult> addPermissionAsync(AddPermissionRequest request, com.amazonaws.handlers.AsyncHandler<AddPermissionRequest, AddPermissionResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } /** * Simplified method form for invoking the AddPermission operation. * * @see #addPermissionAsync(AddPermissionRequest) */ @Override public java.util.concurrent.Future<AddPermissionResult> addPermissionAsync(String queueUrl, String label, java.util.List<String> aWSAccountIds, java.util.List<String> actions) { return addPermissionAsync(new AddPermissionRequest().withQueueUrl(queueUrl).withLabel(label).withAWSAccountIds(aWSAccountIds).withActions(actions)); } /** * Simplified method form for invoking the AddPermission operation with an AsyncHandler. * * @see #addPermissionAsync(AddPermissionRequest, com.amazonaws.handlers.AsyncHandler) */ @Override public java.util.concurrent.Future<AddPermissionResult> addPermissionAsync(String queueUrl, String label, java.util.List<String> aWSAccountIds, java.util.List<String> actions, com.amazonaws.handlers.AsyncHandler<AddPermissionRequest, AddPermissionResult> asyncHandler) { return addPermissionAsync(new AddPermissionRequest().withQueueUrl(queueUrl).withLabel(label).withAWSAccountIds(aWSAccountIds).withActions(actions), asyncHandler); } @Override public java.util.concurrent.Future<ChangeMessageVisibilityResult> changeMessageVisibilityAsync(ChangeMessageVisibilityRequest request) { return changeMessageVisibilityAsync(request, null); } @Override public java.util.concurrent.Future<ChangeMessageVisibilityResult> changeMessageVisibilityAsync(ChangeMessageVisibilityRequest request, com.amazonaws.handlers.AsyncHandler<ChangeMessageVisibilityRequest, ChangeMessageVisibilityResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } /** * Simplified method form for invoking the ChangeMessageVisibility operation. * * @see #changeMessageVisibilityAsync(ChangeMessageVisibilityRequest) */ @Override public java.util.concurrent.Future<ChangeMessageVisibilityResult> changeMessageVisibilityAsync(String queueUrl, String receiptHandle, Integer visibilityTimeout) { return changeMessageVisibilityAsync(new ChangeMessageVisibilityRequest().withQueueUrl(queueUrl).withReceiptHandle(receiptHandle) .withVisibilityTimeout(visibilityTimeout)); } /** * Simplified method form for invoking the ChangeMessageVisibility operation with an AsyncHandler. * * @see #changeMessageVisibilityAsync(ChangeMessageVisibilityRequest, com.amazonaws.handlers.AsyncHandler) */ @Override public java.util.concurrent.Future<ChangeMessageVisibilityResult> changeMessageVisibilityAsync(String queueUrl, String receiptHandle, Integer visibilityTimeout, com.amazonaws.handlers.AsyncHandler<ChangeMessageVisibilityRequest, ChangeMessageVisibilityResult> asyncHandler) { return changeMessageVisibilityAsync( new ChangeMessageVisibilityRequest().withQueueUrl(queueUrl).withReceiptHandle(receiptHandle).withVisibilityTimeout(visibilityTimeout), asyncHandler); } @Override public java.util.concurrent.Future<ChangeMessageVisibilityBatchResult> changeMessageVisibilityBatchAsync(ChangeMessageVisibilityBatchRequest request) { return changeMessageVisibilityBatchAsync(request, null); } @Override public java.util.concurrent.Future<ChangeMessageVisibilityBatchResult> changeMessageVisibilityBatchAsync(ChangeMessageVisibilityBatchRequest request, com.amazonaws.handlers.AsyncHandler<ChangeMessageVisibilityBatchRequest, ChangeMessageVisibilityBatchResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } /** * Simplified method form for invoking the ChangeMessageVisibilityBatch operation. * * @see #changeMessageVisibilityBatchAsync(ChangeMessageVisibilityBatchRequest) */ @Override public java.util.concurrent.Future<ChangeMessageVisibilityBatchResult> changeMessageVisibilityBatchAsync(String queueUrl, java.util.List<ChangeMessageVisibilityBatchRequestEntry> entries) { return changeMessageVisibilityBatchAsync(new ChangeMessageVisibilityBatchRequest().withQueueUrl(queueUrl).withEntries(entries)); } /** * Simplified method form for invoking the ChangeMessageVisibilityBatch operation with an AsyncHandler. * * @see #changeMessageVisibilityBatchAsync(ChangeMessageVisibilityBatchRequest, com.amazonaws.handlers.AsyncHandler) */ @Override public java.util.concurrent.Future<ChangeMessageVisibilityBatchResult> changeMessageVisibilityBatchAsync(String queueUrl, java.util.List<ChangeMessageVisibilityBatchRequestEntry> entries, com.amazonaws.handlers.AsyncHandler<ChangeMessageVisibilityBatchRequest, ChangeMessageVisibilityBatchResult> asyncHandler) { return changeMessageVisibilityBatchAsync(new ChangeMessageVisibilityBatchRequest().withQueueUrl(queueUrl).withEntries(entries), asyncHandler); } @Override public java.util.concurrent.Future<CreateQueueResult> createQueueAsync(CreateQueueRequest request) { return createQueueAsync(request, null); } @Override public java.util.concurrent.Future<CreateQueueResult> createQueueAsync(CreateQueueRequest request, com.amazonaws.handlers.AsyncHandler<CreateQueueRequest, CreateQueueResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } /** * Simplified method form for invoking the CreateQueue operation. * * @see #createQueueAsync(CreateQueueRequest) */ @Override public java.util.concurrent.Future<CreateQueueResult> createQueueAsync(String queueName) { return createQueueAsync(new CreateQueueRequest().withQueueName(queueName)); } /** * Simplified method form for invoking the CreateQueue operation with an AsyncHandler. * * @see #createQueueAsync(CreateQueueRequest, com.amazonaws.handlers.AsyncHandler) */ @Override public java.util.concurrent.Future<CreateQueueResult> createQueueAsync(String queueName, com.amazonaws.handlers.AsyncHandler<CreateQueueRequest, CreateQueueResult> asyncHandler) { return createQueueAsync(new CreateQueueRequest().withQueueName(queueName), asyncHandler); } @Override public java.util.concurrent.Future<DeleteMessageResult> deleteMessageAsync(DeleteMessageRequest request) { return deleteMessageAsync(request, null); } @Override public java.util.concurrent.Future<DeleteMessageResult> deleteMessageAsync(DeleteMessageRequest request, com.amazonaws.handlers.AsyncHandler<DeleteMessageRequest, DeleteMessageResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } /** * Simplified method form for invoking the DeleteMessage operation. * * @see #deleteMessageAsync(DeleteMessageRequest) */ @Override public java.util.concurrent.Future<DeleteMessageResult> deleteMessageAsync(String queueUrl, String receiptHandle) { return deleteMessageAsync(new DeleteMessageRequest().withQueueUrl(queueUrl).withReceiptHandle(receiptHandle)); } /** * Simplified method form for invoking the DeleteMessage operation with an AsyncHandler. * * @see #deleteMessageAsync(DeleteMessageRequest, com.amazonaws.handlers.AsyncHandler) */ @Override public java.util.concurrent.Future<DeleteMessageResult> deleteMessageAsync(String queueUrl, String receiptHandle, com.amazonaws.handlers.AsyncHandler<DeleteMessageRequest, DeleteMessageResult> asyncHandler) { return deleteMessageAsync(new DeleteMessageRequest().withQueueUrl(queueUrl).withReceiptHandle(receiptHandle), asyncHandler); } @Override public java.util.concurrent.Future<DeleteMessageBatchResult> deleteMessageBatchAsync(DeleteMessageBatchRequest request) { return deleteMessageBatchAsync(request, null); } @Override public java.util.concurrent.Future<DeleteMessageBatchResult> deleteMessageBatchAsync(DeleteMessageBatchRequest request, com.amazonaws.handlers.AsyncHandler<DeleteMessageBatchRequest, DeleteMessageBatchResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } /** * Simplified method form for invoking the DeleteMessageBatch operation. * * @see #deleteMessageBatchAsync(DeleteMessageBatchRequest) */ @Override public java.util.concurrent.Future<DeleteMessageBatchResult> deleteMessageBatchAsync(String queueUrl, java.util.List<DeleteMessageBatchRequestEntry> entries) { return deleteMessageBatchAsync(new DeleteMessageBatchRequest().withQueueUrl(queueUrl).withEntries(entries)); } /** * Simplified method form for invoking the DeleteMessageBatch operation with an AsyncHandler. * * @see #deleteMessageBatchAsync(DeleteMessageBatchRequest, com.amazonaws.handlers.AsyncHandler) */ @Override public java.util.concurrent.Future<DeleteMessageBatchResult> deleteMessageBatchAsync(String queueUrl, java.util.List<DeleteMessageBatchRequestEntry> entries, com.amazonaws.handlers.AsyncHandler<DeleteMessageBatchRequest, DeleteMessageBatchResult> asyncHandler) { return deleteMessageBatchAsync(new DeleteMessageBatchRequest().withQueueUrl(queueUrl).withEntries(entries), asyncHandler); } @Override public java.util.concurrent.Future<DeleteQueueResult> deleteQueueAsync(DeleteQueueRequest request) { return deleteQueueAsync(request, null); } @Override public java.util.concurrent.Future<DeleteQueueResult> deleteQueueAsync(DeleteQueueRequest request, com.amazonaws.handlers.AsyncHandler<DeleteQueueRequest, DeleteQueueResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } /** * Simplified method form for invoking the DeleteQueue operation. * * @see #deleteQueueAsync(DeleteQueueRequest) */ @Override public java.util.concurrent.Future<DeleteQueueResult> deleteQueueAsync(String queueUrl) { return deleteQueueAsync(new DeleteQueueRequest().withQueueUrl(queueUrl)); } /** * Simplified method form for invoking the DeleteQueue operation with an AsyncHandler. * * @see #deleteQueueAsync(DeleteQueueRequest, com.amazonaws.handlers.AsyncHandler) */ @Override public java.util.concurrent.Future<DeleteQueueResult> deleteQueueAsync(String queueUrl, com.amazonaws.handlers.AsyncHandler<DeleteQueueRequest, DeleteQueueResult> asyncHandler) { return deleteQueueAsync(new DeleteQueueRequest().withQueueUrl(queueUrl), asyncHandler); } @Override public java.util.concurrent.Future<GetQueueAttributesResult> getQueueAttributesAsync(GetQueueAttributesRequest request) { return getQueueAttributesAsync(request, null); } @Override public java.util.concurrent.Future<GetQueueAttributesResult> getQueueAttributesAsync(GetQueueAttributesRequest request, com.amazonaws.handlers.AsyncHandler<GetQueueAttributesRequest, GetQueueAttributesResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } /** * Simplified method form for invoking the GetQueueAttributes operation. * * @see #getQueueAttributesAsync(GetQueueAttributesRequest) */ @Override public java.util.concurrent.Future<GetQueueAttributesResult> getQueueAttributesAsync(String queueUrl, java.util.List<String> attributeNames) { return getQueueAttributesAsync(new GetQueueAttributesRequest().withQueueUrl(queueUrl).withAttributeNames(attributeNames)); } /** * Simplified method form for invoking the GetQueueAttributes operation with an AsyncHandler. * * @see #getQueueAttributesAsync(GetQueueAttributesRequest, com.amazonaws.handlers.AsyncHandler) */ @Override public java.util.concurrent.Future<GetQueueAttributesResult> getQueueAttributesAsync(String queueUrl, java.util.List<String> attributeNames, com.amazonaws.handlers.AsyncHandler<GetQueueAttributesRequest, GetQueueAttributesResult> asyncHandler) { return getQueueAttributesAsync(new GetQueueAttributesRequest().withQueueUrl(queueUrl).withAttributeNames(attributeNames), asyncHandler); } @Override public java.util.concurrent.Future<GetQueueUrlResult> getQueueUrlAsync(GetQueueUrlRequest request) { return getQueueUrlAsync(request, null); } @Override public java.util.concurrent.Future<GetQueueUrlResult> getQueueUrlAsync(GetQueueUrlRequest request, com.amazonaws.handlers.AsyncHandler<GetQueueUrlRequest, GetQueueUrlResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } /** * Simplified method form for invoking the GetQueueUrl operation. * * @see #getQueueUrlAsync(GetQueueUrlRequest) */ @Override public java.util.concurrent.Future<GetQueueUrlResult> getQueueUrlAsync(String queueName) { return getQueueUrlAsync(new GetQueueUrlRequest().withQueueName(queueName)); } /** * Simplified method form for invoking the GetQueueUrl operation with an AsyncHandler. * * @see #getQueueUrlAsync(GetQueueUrlRequest, com.amazonaws.handlers.AsyncHandler) */ @Override public java.util.concurrent.Future<GetQueueUrlResult> getQueueUrlAsync(String queueName, com.amazonaws.handlers.AsyncHandler<GetQueueUrlRequest, GetQueueUrlResult> asyncHandler) { return getQueueUrlAsync(new GetQueueUrlRequest().withQueueName(queueName), asyncHandler); } @Override public java.util.concurrent.Future<ListDeadLetterSourceQueuesResult> listDeadLetterSourceQueuesAsync(ListDeadLetterSourceQueuesRequest request) { return listDeadLetterSourceQueuesAsync(request, null); } @Override public java.util.concurrent.Future<ListDeadLetterSourceQueuesResult> listDeadLetterSourceQueuesAsync(ListDeadLetterSourceQueuesRequest request, com.amazonaws.handlers.AsyncHandler<ListDeadLetterSourceQueuesRequest, ListDeadLetterSourceQueuesResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<ListQueueTagsResult> listQueueTagsAsync(ListQueueTagsRequest request) { return listQueueTagsAsync(request, null); } @Override public java.util.concurrent.Future<ListQueueTagsResult> listQueueTagsAsync(ListQueueTagsRequest request, com.amazonaws.handlers.AsyncHandler<ListQueueTagsRequest, ListQueueTagsResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } /** * Simplified method form for invoking the ListQueueTags operation. * * @see #listQueueTagsAsync(ListQueueTagsRequest) */ @Override public java.util.concurrent.Future<ListQueueTagsResult> listQueueTagsAsync(String queueUrl) { return listQueueTagsAsync(new ListQueueTagsRequest().withQueueUrl(queueUrl)); } /** * Simplified method form for invoking the ListQueueTags operation with an AsyncHandler. * * @see #listQueueTagsAsync(ListQueueTagsRequest, com.amazonaws.handlers.AsyncHandler) */ @Override public java.util.concurrent.Future<ListQueueTagsResult> listQueueTagsAsync(String queueUrl, com.amazonaws.handlers.AsyncHandler<ListQueueTagsRequest, ListQueueTagsResult> asyncHandler) { return listQueueTagsAsync(new ListQueueTagsRequest().withQueueUrl(queueUrl), asyncHandler); } @Override public java.util.concurrent.Future<ListQueuesResult> listQueuesAsync(ListQueuesRequest request) { return listQueuesAsync(request, null); } @Override public java.util.concurrent.Future<ListQueuesResult> listQueuesAsync(ListQueuesRequest request, com.amazonaws.handlers.AsyncHandler<ListQueuesRequest, ListQueuesResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } /** * Simplified method form for invoking the ListQueues operation. * * @see #listQueuesAsync(ListQueuesRequest) */ @Override public java.util.concurrent.Future<ListQueuesResult> listQueuesAsync() { return listQueuesAsync(new ListQueuesRequest()); } /** * Simplified method form for invoking the ListQueues operation with an AsyncHandler. * * @see #listQueuesAsync(ListQueuesRequest, com.amazonaws.handlers.AsyncHandler) */ @Override public java.util.concurrent.Future<ListQueuesResult> listQueuesAsync(com.amazonaws.handlers.AsyncHandler<ListQueuesRequest, ListQueuesResult> asyncHandler) { return listQueuesAsync(new ListQueuesRequest(), asyncHandler); } /** * Simplified method form for invoking the ListQueues operation. * * @see #listQueuesAsync(ListQueuesRequest) */ @Override public java.util.concurrent.Future<ListQueuesResult> listQueuesAsync(String queueNamePrefix) { return listQueuesAsync(new ListQueuesRequest().withQueueNamePrefix(queueNamePrefix)); } /** * Simplified method form for invoking the ListQueues operation with an AsyncHandler. * * @see #listQueuesAsync(ListQueuesRequest, com.amazonaws.handlers.AsyncHandler) */ @Override public java.util.concurrent.Future<ListQueuesResult> listQueuesAsync(String queueNamePrefix, com.amazonaws.handlers.AsyncHandler<ListQueuesRequest, ListQueuesResult> asyncHandler) { return listQueuesAsync(new ListQueuesRequest().withQueueNamePrefix(queueNamePrefix), asyncHandler); } @Override public java.util.concurrent.Future<PurgeQueueResult> purgeQueueAsync(PurgeQueueRequest request) { return purgeQueueAsync(request, null); } @Override public java.util.concurrent.Future<PurgeQueueResult> purgeQueueAsync(PurgeQueueRequest request, com.amazonaws.handlers.AsyncHandler<PurgeQueueRequest, PurgeQueueResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<ReceiveMessageResult> receiveMessageAsync(ReceiveMessageRequest request) { return receiveMessageAsync(request, null); } @Override public java.util.concurrent.Future<ReceiveMessageResult> receiveMessageAsync(ReceiveMessageRequest request, com.amazonaws.handlers.AsyncHandler<ReceiveMessageRequest, ReceiveMessageResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } /** * Simplified method form for invoking the ReceiveMessage operation. * * @see #receiveMessageAsync(ReceiveMessageRequest) */ @Override public java.util.concurrent.Future<ReceiveMessageResult> receiveMessageAsync(String queueUrl) { return receiveMessageAsync(new ReceiveMessageRequest().withQueueUrl(queueUrl)); } /** * Simplified method form for invoking the ReceiveMessage operation with an AsyncHandler. * * @see #receiveMessageAsync(ReceiveMessageRequest, com.amazonaws.handlers.AsyncHandler) */ @Override public java.util.concurrent.Future<ReceiveMessageResult> receiveMessageAsync(String queueUrl, com.amazonaws.handlers.AsyncHandler<ReceiveMessageRequest, ReceiveMessageResult> asyncHandler) { return receiveMessageAsync(new ReceiveMessageRequest().withQueueUrl(queueUrl), asyncHandler); } @Override public java.util.concurrent.Future<RemovePermissionResult> removePermissionAsync(RemovePermissionRequest request) { return removePermissionAsync(request, null); } @Override public java.util.concurrent.Future<RemovePermissionResult> removePermissionAsync(RemovePermissionRequest request, com.amazonaws.handlers.AsyncHandler<RemovePermissionRequest, RemovePermissionResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } /** * Simplified method form for invoking the RemovePermission operation. * * @see #removePermissionAsync(RemovePermissionRequest) */ @Override public java.util.concurrent.Future<RemovePermissionResult> removePermissionAsync(String queueUrl, String label) { return removePermissionAsync(new RemovePermissionRequest().withQueueUrl(queueUrl).withLabel(label)); } /** * Simplified method form for invoking the RemovePermission operation with an AsyncHandler. * * @see #removePermissionAsync(RemovePermissionRequest, com.amazonaws.handlers.AsyncHandler) */ @Override public java.util.concurrent.Future<RemovePermissionResult> removePermissionAsync(String queueUrl, String label, com.amazonaws.handlers.AsyncHandler<RemovePermissionRequest, RemovePermissionResult> asyncHandler) { return removePermissionAsync(new RemovePermissionRequest().withQueueUrl(queueUrl).withLabel(label), asyncHandler); } @Override public java.util.concurrent.Future<SendMessageResult> sendMessageAsync(SendMessageRequest request) { return sendMessageAsync(request, null); } @Override public java.util.concurrent.Future<SendMessageResult> sendMessageAsync(SendMessageRequest request, com.amazonaws.handlers.AsyncHandler<SendMessageRequest, SendMessageResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } /** * Simplified method form for invoking the SendMessage operation. * * @see #sendMessageAsync(SendMessageRequest) */ @Override public java.util.concurrent.Future<SendMessageResult> sendMessageAsync(String queueUrl, String messageBody) { return sendMessageAsync(new SendMessageRequest().withQueueUrl(queueUrl).withMessageBody(messageBody)); } /** * Simplified method form for invoking the SendMessage operation with an AsyncHandler. * * @see #sendMessageAsync(SendMessageRequest, com.amazonaws.handlers.AsyncHandler) */ @Override public java.util.concurrent.Future<SendMessageResult> sendMessageAsync(String queueUrl, String messageBody, com.amazonaws.handlers.AsyncHandler<SendMessageRequest, SendMessageResult> asyncHandler) { return sendMessageAsync(new SendMessageRequest().withQueueUrl(queueUrl).withMessageBody(messageBody), asyncHandler); } @Override public java.util.concurrent.Future<SendMessageBatchResult> sendMessageBatchAsync(SendMessageBatchRequest request) { return sendMessageBatchAsync(request, null); } @Override public java.util.concurrent.Future<SendMessageBatchResult> sendMessageBatchAsync(SendMessageBatchRequest request, com.amazonaws.handlers.AsyncHandler<SendMessageBatchRequest, SendMessageBatchResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } /** * Simplified method form for invoking the SendMessageBatch operation. * * @see #sendMessageBatchAsync(SendMessageBatchRequest) */ @Override public java.util.concurrent.Future<SendMessageBatchResult> sendMessageBatchAsync(String queueUrl, java.util.List<SendMessageBatchRequestEntry> entries) { return sendMessageBatchAsync(new SendMessageBatchRequest().withQueueUrl(queueUrl).withEntries(entries)); } /** * Simplified method form for invoking the SendMessageBatch operation with an AsyncHandler. * * @see #sendMessageBatchAsync(SendMessageBatchRequest, com.amazonaws.handlers.AsyncHandler) */ @Override public java.util.concurrent.Future<SendMessageBatchResult> sendMessageBatchAsync(String queueUrl, java.util.List<SendMessageBatchRequestEntry> entries, com.amazonaws.handlers.AsyncHandler<SendMessageBatchRequest, SendMessageBatchResult> asyncHandler) { return sendMessageBatchAsync(new SendMessageBatchRequest().withQueueUrl(queueUrl).withEntries(entries), asyncHandler); } @Override public java.util.concurrent.Future<SetQueueAttributesResult> setQueueAttributesAsync(SetQueueAttributesRequest request) { return setQueueAttributesAsync(request, null); } @Override public java.util.concurrent.Future<SetQueueAttributesResult> setQueueAttributesAsync(SetQueueAttributesRequest request, com.amazonaws.handlers.AsyncHandler<SetQueueAttributesRequest, SetQueueAttributesResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } /** * Simplified method form for invoking the SetQueueAttributes operation. * * @see #setQueueAttributesAsync(SetQueueAttributesRequest) */ @Override public java.util.concurrent.Future<SetQueueAttributesResult> setQueueAttributesAsync(String queueUrl, java.util.Map<String, String> attributes) { return setQueueAttributesAsync(new SetQueueAttributesRequest().withQueueUrl(queueUrl).withAttributes(attributes)); } /** * Simplified method form for invoking the SetQueueAttributes operation with an AsyncHandler. * * @see #setQueueAttributesAsync(SetQueueAttributesRequest, com.amazonaws.handlers.AsyncHandler) */ @Override public java.util.concurrent.Future<SetQueueAttributesResult> setQueueAttributesAsync(String queueUrl, java.util.Map<String, String> attributes, com.amazonaws.handlers.AsyncHandler<SetQueueAttributesRequest, SetQueueAttributesResult> asyncHandler) { return setQueueAttributesAsync(new SetQueueAttributesRequest().withQueueUrl(queueUrl).withAttributes(attributes), asyncHandler); } @Override public java.util.concurrent.Future<TagQueueResult> tagQueueAsync(TagQueueRequest request) { return tagQueueAsync(request, null); } @Override public java.util.concurrent.Future<TagQueueResult> tagQueueAsync(TagQueueRequest request, com.amazonaws.handlers.AsyncHandler<TagQueueRequest, TagQueueResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } /** * Simplified method form for invoking the TagQueue operation. * * @see #tagQueueAsync(TagQueueRequest) */ @Override public java.util.concurrent.Future<TagQueueResult> tagQueueAsync(String queueUrl, java.util.Map<String, String> tags) { return tagQueueAsync(new TagQueueRequest().withQueueUrl(queueUrl).withTags(tags)); } /** * Simplified method form for invoking the TagQueue operation with an AsyncHandler. * * @see #tagQueueAsync(TagQueueRequest, com.amazonaws.handlers.AsyncHandler) */ @Override public java.util.concurrent.Future<TagQueueResult> tagQueueAsync(String queueUrl, java.util.Map<String, String> tags, com.amazonaws.handlers.AsyncHandler<TagQueueRequest, TagQueueResult> asyncHandler) { return tagQueueAsync(new TagQueueRequest().withQueueUrl(queueUrl).withTags(tags), asyncHandler); } @Override public java.util.concurrent.Future<UntagQueueResult> untagQueueAsync(UntagQueueRequest request) { return untagQueueAsync(request, null); } @Override public java.util.concurrent.Future<UntagQueueResult> untagQueueAsync(UntagQueueRequest request, com.amazonaws.handlers.AsyncHandler<UntagQueueRequest, UntagQueueResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } /** * Simplified method form for invoking the UntagQueue operation. * * @see #untagQueueAsync(UntagQueueRequest) */ @Override public java.util.concurrent.Future<UntagQueueResult> untagQueueAsync(String queueUrl, java.util.List<String> tagKeys) { return untagQueueAsync(new UntagQueueRequest().withQueueUrl(queueUrl).withTagKeys(tagKeys)); } /** * Simplified method form for invoking the UntagQueue operation with an AsyncHandler. * * @see #untagQueueAsync(UntagQueueRequest, com.amazonaws.handlers.AsyncHandler) */ @Override public java.util.concurrent.Future<UntagQueueResult> untagQueueAsync(String queueUrl, java.util.List<String> tagKeys, com.amazonaws.handlers.AsyncHandler<UntagQueueRequest, UntagQueueResult> asyncHandler) { return untagQueueAsync(new UntagQueueRequest().withQueueUrl(queueUrl).withTagKeys(tagKeys), asyncHandler); } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.mahout.clustering.lda.cvb; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import org.apache.hadoop.fs.Path; import org.apache.mahout.math.Matrix; import org.apache.mahout.math.MatrixSlice; import org.apache.mahout.math.SparseRowMatrix; import org.apache.mahout.math.Vector; import org.apache.mahout.math.VectorIterable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.BlockingQueue; import java.util.concurrent.Callable; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; /** * Multithreaded LDA model trainer class, which primarily operates by running a "map/reduce" * operation, all in memory locally (ie not a hadoop job!) : the "map" operation is to take * the "read-only" {@link TopicModel} and use it to iteratively learn the p(topic|term, doc) * distribution for documents (this can be done in parallel across many documents, as the * "read-only" model is, well, read-only. Then the outputs of this are "reduced" onto the * "write" model, and these updates are not parallelizable in the same way: individual * documents can't be added to the same entries in different threads at the same time, but * updates across many topics to the same term from the same document can be done in parallel, * so they are. * * Because computation is done asynchronously, when iteration is done, it's important to call * the stop() method, which blocks until work is complete. * * Setting the read model and the write model to be the same object may not quite work yet, * on account of parallelism badness. */ public class ModelTrainer { private static final Logger log = LoggerFactory.getLogger(ModelTrainer.class); private final int numTopics; private final int numTerms; private TopicModel readModel; private TopicModel writeModel; private ThreadPoolExecutor threadPool; private BlockingQueue<Runnable> workQueue; private final int numTrainThreads; private final boolean isReadWrite; public ModelTrainer(TopicModel initialReadModel, TopicModel initialWriteModel, int numTrainThreads, int numTopics, int numTerms) { this.readModel = initialReadModel; this.writeModel = initialWriteModel; this.numTrainThreads = numTrainThreads; this.numTopics = numTopics; this.numTerms = numTerms; isReadWrite = initialReadModel == initialWriteModel; } /** * WARNING: this constructor may not lead to good behavior. What should be verified is that * the model updating process does not conflict with model reading. It might work, but then * again, it might not! * @param model to be used for both reading (inference) and accumulating (learning) * @param numTrainThreads * @param numTopics * @param numTerms */ public ModelTrainer(TopicModel model, int numTrainThreads, int numTopics, int numTerms) { this(model, model, numTrainThreads, numTopics, numTerms); } public TopicModel getReadModel() { return readModel; } public void start() { log.info("Starting training threadpool with " + numTrainThreads + " threads"); workQueue = new ArrayBlockingQueue<Runnable>(numTrainThreads * 10); threadPool = new ThreadPoolExecutor(numTrainThreads, numTrainThreads, 0, TimeUnit.SECONDS, workQueue); threadPool.allowCoreThreadTimeOut(false); threadPool.prestartAllCoreThreads(); } public void train(VectorIterable matrix, VectorIterable docTopicCounts) { train(matrix, docTopicCounts, 1); } public double calculatePerplexity(VectorIterable matrix, VectorIterable docTopicCounts) { return calculatePerplexity(matrix, docTopicCounts, 0); } public double calculatePerplexity(VectorIterable matrix, VectorIterable docTopicCounts, double testFraction) { Iterator<MatrixSlice> docIterator = matrix.iterator(); Iterator<MatrixSlice> docTopicIterator = docTopicCounts.iterator(); double perplexity = 0; double matrixNorm = 0; while (docIterator.hasNext() && docTopicIterator.hasNext()) { MatrixSlice docSlice = docIterator.next(); MatrixSlice topicSlice = docTopicIterator.next(); int docId = docSlice.index(); Vector document = docSlice.vector(); Vector topicDist = topicSlice.vector(); if (testFraction == 0 || docId % (1/testFraction) == 0) { trainSync(document, topicDist, false, 10); perplexity += readModel.perplexity(document, topicDist); matrixNorm += document.norm(1); } } return perplexity / matrixNorm; } public void train(VectorIterable matrix, VectorIterable docTopicCounts, int numDocTopicIters) { start(); Iterator<MatrixSlice> docIterator = matrix.iterator(); Iterator<MatrixSlice> docTopicIterator = docTopicCounts.iterator(); long startTime = System.nanoTime(); int i = 0; double[] times = new double[100]; Map<Vector, Vector> batch = Maps.newHashMap(); int numTokensInBatch = 0; long batchStart = System.nanoTime(); while (docIterator.hasNext() && docTopicIterator.hasNext()) { i++; Vector document = docIterator.next().vector(); Vector topicDist = docTopicIterator.next().vector(); if (isReadWrite) { if (batch.size() < numTrainThreads) { batch.put(document, topicDist); if (log.isDebugEnabled()) { numTokensInBatch += document.getNumNondefaultElements(); } } else { batchTrain(batch, true, numDocTopicIters); long time = System.nanoTime(); log.debug("trained {} docs with {} tokens, start time {}, end time {}", new Object[] {numTrainThreads, numTokensInBatch, batchStart, time}); batchStart = time; numTokensInBatch = 0; } } else { long start = System.nanoTime(); train(document, topicDist, true, numDocTopicIters); if (log.isDebugEnabled()) { times[i % times.length] = (System.nanoTime() - start) /(1.0e6 * document.getNumNondefaultElements()); if (i % 100 == 0) { long time = System.nanoTime() - startTime; log.debug("trained " + i + " documents in " + (time / 1.0e6) + "ms"); if (i % 500 == 0) { Arrays.sort(times); log.debug("training took median " + times[times.length / 2] + "ms per token-instance"); } } } } } stop(); } public void batchTrain(Map<Vector, Vector> batch, boolean update, int numDocTopicsIters) { while (true) { try { List<TrainerRunnable> runnables = Lists.newArrayList(); for (Map.Entry<Vector, Vector> entry : batch.entrySet()) { runnables.add(new TrainerRunnable(readModel, null, entry.getKey(), entry.getValue(), new SparseRowMatrix(numTopics, numTerms, true), numDocTopicsIters)); } threadPool.invokeAll(runnables); if (update) { for (TrainerRunnable runnable : runnables) { writeModel.update(runnable.docTopicModel); } } break; } catch (InterruptedException e) { log.warn("Interrupted during batch training, retrying!", e); } } } public void train(Vector document, Vector docTopicCounts, boolean update, int numDocTopicIters) { while (true) { try { workQueue.put(new TrainerRunnable(readModel, update ? writeModel : null, document, docTopicCounts, new SparseRowMatrix( numTopics, numTerms, true), numDocTopicIters)); return; } catch (InterruptedException e) { log.warn("Interrupted waiting to submit document to work queue: " + document, e); } } } public void trainSync(Vector document, Vector docTopicCounts, boolean update, int numDocTopicIters) { new TrainerRunnable(readModel, update ? writeModel : null, document, docTopicCounts, new SparseRowMatrix( numTopics, numTerms, true), numDocTopicIters).run(); } public double calculatePerplexity(Vector document, Vector docTopicCounts, int numDocTopicIters) { TrainerRunnable runner = new TrainerRunnable(readModel, null, document, docTopicCounts, new SparseRowMatrix( numTopics, numTerms, true), numDocTopicIters); return runner.call(); } public void stop() { long startTime = System.nanoTime(); log.info("Initiating stopping of training threadpool"); try { threadPool.shutdown(); if (!threadPool.awaitTermination(60, TimeUnit.SECONDS)) { log.warn("Threadpool timed out on await termination - jobs still running!"); } long newTime = System.nanoTime(); log.info("threadpool took: " + (newTime - startTime) / 1.0e6 + "ms"); startTime = newTime; writeModel.awaitTermination(); newTime = System.nanoTime(); log.info("writeModel.awaitTermination() took " + (newTime - startTime) / 1.0e6 + "ms"); TopicModel tmpModel = writeModel; writeModel = readModel; readModel = tmpModel; writeModel.reset(); } catch (InterruptedException e) { log.error("Interrupted shutting down!", e); } } public void persist(Path outputPath) throws IOException { readModel.persist(outputPath, true); } private static class TrainerRunnable implements Runnable, Callable<Double> { private final TopicModel readModel; private final TopicModel writeModel; private final Vector document; private final Vector docTopics; private final Matrix docTopicModel; private final int numDocTopicIters; private TrainerRunnable(TopicModel readModel, TopicModel writeModel, Vector document, Vector docTopics, Matrix docTopicModel, int numDocTopicIters) { this.readModel = readModel; this.writeModel = writeModel; this.document = document; this.docTopics = docTopics; this.docTopicModel = docTopicModel; this.numDocTopicIters = numDocTopicIters; } @Override public void run() { for (int i = 0; i < numDocTopicIters; i++) { // synchronous read-only call: readModel.trainDocTopicModel(document, docTopics, docTopicModel); } if (writeModel != null) { // parallel call which is read-only on the docTopicModel, and write-only on the writeModel // this method does not return until all rows of the docTopicModel have been submitted // to write work queues writeModel.update(docTopicModel); } } @Override public Double call() { run(); return readModel.perplexity(document, docTopics); } } }
package net.bytebuddy.dynamic.loading; import net.bytebuddy.build.HashCodeAndEqualsPlugin; import net.bytebuddy.description.type.TypeDescription; import java.io.File; import java.lang.instrument.Instrumentation; import java.security.ProtectionDomain; import java.util.Map; /** * A strategy for loading a collection of types. * * @param <T> The least specific type of class loader this strategy can apply to. */ public interface ClassLoadingStrategy<T extends ClassLoader> { /** * A type-safe constant representing the bootstrap class loader which is represented by {@code null} within Java. */ ClassLoader BOOTSTRAP_LOADER = null; /** * An undefined protection domain. */ ProtectionDomain NO_PROTECTION_DOMAIN = null; /** * Loads a given collection of classes given their binary representation. * * @param classLoader The class loader to used for loading the classes. * @param types Byte array representations of the types to be loaded mapped by their descriptions, * where an iteration order defines an order in which they are supposed to be loaded, * if relevant. * @return A collection of the loaded classes which will be initialized in the iteration order of the * returned collection. */ Map<TypeDescription, Class<?>> load(T classLoader, Map<TypeDescription, byte[]> types); /** * This class contains implementations of default class loading strategies. */ enum Default implements Configurable<ClassLoader> { /** * This strategy creates a new {@link net.bytebuddy.dynamic.loading.ByteArrayClassLoader} with the given * class loader as its parent. The byte array class loader is aware of a any dynamically created type and can * natively load the given classes. This allows to load classes with cyclic load-time dependencies since the * byte array class loader is queried on each encountered unknown class. Due to the encapsulation of the * classes that were loaded by a byte array class loader, this strategy will lead to the unloading of these * classes once this class loader, its classes or any instances of these classes become unreachable. */ WRAPPER(new WrappingDispatcher(ByteArrayClassLoader.PersistenceHandler.LATENT, WrappingDispatcher.PARENT_FIRST)), /** * The strategy is identical to {@link ClassLoadingStrategy.Default#WRAPPER} but exposes * the byte arrays that represent a class by {@link java.lang.ClassLoader#getResourceAsStream(String)}. For * this purpose, all class files are persisted as byte arrays withing the wrapping class loader. */ WRAPPER_PERSISTENT(new WrappingDispatcher(ByteArrayClassLoader.PersistenceHandler.MANIFEST, WrappingDispatcher.PARENT_FIRST)), /** * <p> * The child-first class loading strategy is a modified version of the * {@link ClassLoadingStrategy.Default#WRAPPER} where the dynamic types are given * priority over any types of a parent class loader with the same name. * </p> * <p> * <b>Important</b>: This does <i>not</i> replace a type of the same name, but it makes the type invisible by * the reach of this class loader. * </p> */ CHILD_FIRST(new WrappingDispatcher(ByteArrayClassLoader.PersistenceHandler.LATENT, WrappingDispatcher.CHILD_FIRST)), /** * The strategy is identical to {@link ClassLoadingStrategy.Default#CHILD_FIRST} but * exposes the byte arrays that represent a class by {@link java.lang.ClassLoader#getResourceAsStream(String)}. * For this purpose, all class files are persisted as byte arrays withing the wrapping class loader. */ CHILD_FIRST_PERSISTENT(new WrappingDispatcher(ByteArrayClassLoader.PersistenceHandler.MANIFEST, WrappingDispatcher.CHILD_FIRST)), /** * <p> * This strategy does not create a new class loader but injects all classes into the given {@link java.lang.ClassLoader} * by reflective access. This prevents the loading of classes with cyclic load-time dependencies but avoids the * creation of an additional class loader. The advantage of this strategy is that the loaded classes will have * package-private access to other classes within their package of the class loader into which they are * injected what is not permitted when the wrapper class loader is used. This strategy is implemented using a * {@link net.bytebuddy.dynamic.loading.ClassInjector.UsingReflection}. Note that this strategy usually yields * a better runtime performance. * </p> * <p> * <b>Important</b>: Class injection requires access to JVM internal methods that are sealed by security managers and the * Java Platform module system. Since Java 11, access to these methods is no longer feasible unless those packages * are explicitly opened. * </p> * <p> * <b>Note</b>: This class loader does not define packages for injected classes by default. Therefore, calls to * {@link Class#getPackage()} might return {@code null}. Packages are only defined manually by a class loader prior to * Java 9. * </p> */ INJECTION(new InjectionDispatcher()); /** * The default behavior when attempting to load a type that was already loaded. */ private static final boolean DEFAULT_FORBID_EXISTING = true; /** * The dispatcher to be used when loading a class. */ private final Configurable<ClassLoader> dispatcher; /** * Creates a new default class loading strategy. * * @param dispatcher The dispatcher to be used when loading a class. */ Default(Configurable<ClassLoader> dispatcher) { this.dispatcher = dispatcher; } /** * {@inheritDoc} */ public Map<TypeDescription, Class<?>> load(ClassLoader classLoader, Map<TypeDescription, byte[]> types) { return dispatcher.load(classLoader, types); } /** * {@inheritDoc} */ public Configurable<ClassLoader> with(ProtectionDomain protectionDomain) { return dispatcher.with(protectionDomain); } /** * {@inheritDoc} */ public Configurable<ClassLoader> with(PackageDefinitionStrategy packageDefinitionStrategy) { return dispatcher.with(packageDefinitionStrategy); } /** * {@inheritDoc} */ public Configurable<ClassLoader> allowExistingTypes() { return dispatcher.allowExistingTypes(); } /** * {@inheritDoc} */ public Configurable<ClassLoader> opened() { return dispatcher.opened(); } /** * <p> * A class loading strategy which applies a class loader injection while applying a given {@link java.security.ProtectionDomain} on class injection. * </p> * <p> * <b>Important</b>: Class injection requires access to JVM internal methods that are sealed by security managers and the * Java Platform module system. Since Java 11, access to these methods is no longer feasible unless those packages * are explicitly opened. * </p> */ @HashCodeAndEqualsPlugin.Enhance protected static class InjectionDispatcher implements ClassLoadingStrategy.Configurable<ClassLoader> { /** * The protection domain to apply or {@code null} if no protection domain is set. */ @HashCodeAndEqualsPlugin.ValueHandling(HashCodeAndEqualsPlugin.ValueHandling.Sort.REVERSE_NULLABILITY) private final ProtectionDomain protectionDomain; /** * The package definer to be used for querying information on package information. */ private final PackageDefinitionStrategy packageDefinitionStrategy; /** * Determines if an exception should be thrown when attempting to load a type that already exists. */ private final boolean forbidExisting; /** * Creates a new injection dispatcher. */ protected InjectionDispatcher() { this(NO_PROTECTION_DOMAIN, PackageDefinitionStrategy.NoOp.INSTANCE, DEFAULT_FORBID_EXISTING); } /** * Creates a new injection dispatcher. * * @param protectionDomain The protection domain to apply or {@code null} if no protection domain is set. * @param packageDefinitionStrategy The package definer to be used for querying information on package information. * @param forbidExisting Determines if an exception should be thrown when attempting to load a type that already exists. */ private InjectionDispatcher(ProtectionDomain protectionDomain, PackageDefinitionStrategy packageDefinitionStrategy, boolean forbidExisting) { this.protectionDomain = protectionDomain; this.packageDefinitionStrategy = packageDefinitionStrategy; this.forbidExisting = forbidExisting; } /** * {@inheritDoc} */ public Map<TypeDescription, Class<?>> load(ClassLoader classLoader, Map<TypeDescription, byte[]> types) { return new ClassInjector.UsingReflection(classLoader, protectionDomain, packageDefinitionStrategy, forbidExisting).inject(types); } /** * {@inheritDoc} */ public Configurable<ClassLoader> with(ProtectionDomain protectionDomain) { return new InjectionDispatcher(protectionDomain, packageDefinitionStrategy, forbidExisting); } /** * {@inheritDoc} */ public Configurable<ClassLoader> with(PackageDefinitionStrategy packageDefinitionStrategy) { return new InjectionDispatcher(protectionDomain, packageDefinitionStrategy, forbidExisting); } /** * {@inheritDoc} */ public Configurable<ClassLoader> allowExistingTypes() { return new InjectionDispatcher(protectionDomain, packageDefinitionStrategy, false); } /** * {@inheritDoc} */ public Configurable<ClassLoader> opened() { return this; } } /** * A class loading strategy which creates a wrapping class loader while applying a given * {@link java.security.ProtectionDomain} on class loading. */ @HashCodeAndEqualsPlugin.Enhance protected static class WrappingDispatcher implements ClassLoadingStrategy.Configurable<ClassLoader> { /** * Indicates that a child first loading strategy should be attempted. */ private static final boolean CHILD_FIRST = true; /** * Indicates that a parent first loading strategy should be attempted. */ private static final boolean PARENT_FIRST = false; /** * The protection domain to apply or {@code null} if no protection domain is set. */ @HashCodeAndEqualsPlugin.ValueHandling(HashCodeAndEqualsPlugin.ValueHandling.Sort.REVERSE_NULLABILITY) private final ProtectionDomain protectionDomain; /** * The persistence handler to apply. */ private final ByteArrayClassLoader.PersistenceHandler persistenceHandler; /** * The package definer to be used for querying information on package information. */ private final PackageDefinitionStrategy packageDefinitionStrategy; /** * {@code true} if the created class loader should apply child-first semantics. */ private final boolean childFirst; /** * Determines if an exception should be thrown when attempting to load a type that already exists. */ private final boolean forbidExisting; /** * {@code true} if the class loader should be sealed. */ private final boolean sealed; /** * Creates a new wrapping dispatcher with a default protection domain and a default access control context. * * @param persistenceHandler The persistence handler to apply. * @param childFirst {@code true} if the created class loader should apply child-first semantics. */ protected WrappingDispatcher(ByteArrayClassLoader.PersistenceHandler persistenceHandler, boolean childFirst) { this(NO_PROTECTION_DOMAIN, PackageDefinitionStrategy.Trivial.INSTANCE, persistenceHandler, childFirst, DEFAULT_FORBID_EXISTING, true); } /** * Creates a new protection domain specific class loading wrapper. * * @param protectionDomain The protection domain to apply or {@code null} if no protection domain is set. * @param packageDefinitionStrategy The package definer to be used for querying information on package information. * @param persistenceHandler The persistence handler to apply. * @param childFirst {@code true} if the created class loader should apply child-first semantics. * @param forbidExisting Determines if an exception should be thrown when attempting to load a type that already exists. * @param sealed {@code true} if the class loader should be sealed. */ private WrappingDispatcher(ProtectionDomain protectionDomain, PackageDefinitionStrategy packageDefinitionStrategy, ByteArrayClassLoader.PersistenceHandler persistenceHandler, boolean childFirst, boolean forbidExisting, boolean sealed) { this.protectionDomain = protectionDomain; this.packageDefinitionStrategy = packageDefinitionStrategy; this.persistenceHandler = persistenceHandler; this.childFirst = childFirst; this.forbidExisting = forbidExisting; this.sealed = sealed; } /** * {@inheritDoc} */ public Map<TypeDescription, Class<?>> load(ClassLoader classLoader, Map<TypeDescription, byte[]> types) { return childFirst ? ByteArrayClassLoader.ChildFirst.load(classLoader, types, protectionDomain, persistenceHandler, packageDefinitionStrategy, forbidExisting, sealed) : ByteArrayClassLoader.load(classLoader, types, protectionDomain, persistenceHandler, packageDefinitionStrategy, forbidExisting, sealed); } /** * {@inheritDoc} */ public Configurable<ClassLoader> with(ProtectionDomain protectionDomain) { return new WrappingDispatcher(protectionDomain, packageDefinitionStrategy, persistenceHandler, childFirst, forbidExisting, sealed); } /** * {@inheritDoc} */ public Configurable<ClassLoader> with(PackageDefinitionStrategy packageDefinitionStrategy) { return new WrappingDispatcher(protectionDomain, packageDefinitionStrategy, persistenceHandler, childFirst, forbidExisting, sealed); } /** * {@inheritDoc} */ public Configurable<ClassLoader> allowExistingTypes() { return new WrappingDispatcher(protectionDomain, packageDefinitionStrategy, persistenceHandler, childFirst, false, sealed); } /** * {@inheritDoc} */ public Configurable<ClassLoader> opened() { return new WrappingDispatcher(protectionDomain, packageDefinitionStrategy, persistenceHandler, childFirst, forbidExisting, false); } } } /** * A {@link ClassLoadingStrategy} that allows configuring the strategy's behavior. * * @param <S> The least specific type of class loader this strategy can apply to. */ interface Configurable<S extends ClassLoader> extends ClassLoadingStrategy<S> { /** * Overrides the implicitly set default {@link java.security.ProtectionDomain} with an explicit one. * * @param protectionDomain The protection domain to apply or {@code null} if no protection domain is set. * @return This class loading strategy with an explicitly set {@link java.security.ProtectionDomain}. */ Configurable<S> with(ProtectionDomain protectionDomain); /** * Defines the supplied package definition strategy to be used for defining packages. * * @param packageDefinitionStrategy The package definer to be used. * @return A version of this class loading strategy that applies the supplied package definition strategy. */ Configurable<S> with(PackageDefinitionStrategy packageDefinitionStrategy); /** * Determines if this class loading strategy should not throw an exception when attempting to load a class that * was already loaded. In this case, the already loaded class is used instead of the generated class. * * @return A version of this class loading strategy that does not throw an exception when a class is already loaded. */ Configurable<S> allowExistingTypes(); /** * With an opened class loading strategy, it is assured that types can be added to the class loader, either by * indirect injection using this strategy or by creating a class loader that explicitly supports injection. * * @return A version of this class loading strategy that opens for future injections into a class loader. */ Configurable<S> opened(); } /** * A class loading strategy that uses a {@code java.lang.invoke.MethodHandles$Lookup} instance for defining types. * A lookup instance can define types only in the same class loader and in the same package as the type within which * it was created. The supplied lookup must have package privileges, i.e. it must not be a public lookup. */ @HashCodeAndEqualsPlugin.Enhance class UsingLookup implements ClassLoadingStrategy<ClassLoader> { /** * The class injector to use. */ private final ClassInjector classInjector; /** * Creates a new class loading strategy that uses a lookup type. * * @param classInjector The class injector to use. */ protected UsingLookup(ClassInjector classInjector) { this.classInjector = classInjector; } /** * Creates a new class loading strategy that uses a {@code java.lang.invoke.MethodHandles$Lookup} instance. * * @param lookup The lookup instance to use for defining new types. * @return A suitable class loading strategy. */ public static ClassLoadingStrategy<ClassLoader> of(Object lookup) { return new UsingLookup(ClassInjector.UsingLookup.of(lookup)); } /** * {@inheritDoc} */ public Map<TypeDescription, Class<?>> load(ClassLoader classLoader, Map<TypeDescription, byte[]> types) { return classInjector.inject(types); } } /** * A class loading strategy which allows class injection into the bootstrap class loader if * appropriate. */ @HashCodeAndEqualsPlugin.Enhance class ForBootstrapInjection implements ClassLoadingStrategy<ClassLoader> { /** * The instrumentation to use. */ private final Instrumentation instrumentation; /** * The folder to save jar files in. */ private final File folder; /** * Creates a new injector which is capable of injecting classes into the bootstrap class loader. * * @param instrumentation The instrumentation to use. * @param folder The folder to save jar files in. */ public ForBootstrapInjection(Instrumentation instrumentation, File folder) { this.instrumentation = instrumentation; this.folder = folder; } /** * {@inheritDoc} */ public Map<TypeDescription, Class<?>> load(ClassLoader classLoader, Map<TypeDescription, byte[]> types) { ClassInjector classInjector = classLoader == null ? ClassInjector.UsingInstrumentation.of(folder, ClassInjector.UsingInstrumentation.Target.BOOTSTRAP, instrumentation) : new ClassInjector.UsingReflection(classLoader); return classInjector.inject(types); } } /** * <p> * A class loading strategy that injects a class using {@code sun.misc.Unsafe}. * </p> * <p> * <b>Important</b>: This strategy is no longer available after Java 11. * </p> */ @HashCodeAndEqualsPlugin.Enhance class ForUnsafeInjection implements ClassLoadingStrategy<ClassLoader> { /** * The protection domain to use or {@code null} if no protection domain is set. */ @HashCodeAndEqualsPlugin.ValueHandling(HashCodeAndEqualsPlugin.ValueHandling.Sort.REVERSE_NULLABILITY) private final ProtectionDomain protectionDomain; /** * Creates a new class loading strategy for unsafe injection with a default protection domain. */ public ForUnsafeInjection() { this(NO_PROTECTION_DOMAIN); } /** * Creates a new class loading strategy for unsafe injection. * * @param protectionDomain The protection domain to use or {@code null} if no protection domain is set. */ public ForUnsafeInjection(ProtectionDomain protectionDomain) { this.protectionDomain = protectionDomain; } /** * {@inheritDoc} */ public Map<TypeDescription, Class<?>> load(ClassLoader classLoader, Map<TypeDescription, byte[]> types) { return new ClassInjector.UsingUnsafe(classLoader, protectionDomain).inject(types); } } }
/* * Copyright 2013 MovingBlocks * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.terasology.world.block.tiles; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Queues; import com.google.common.math.IntMath; import de.matthiasmann.twl.utils.PNGDecoder; import gnu.trove.map.TObjectIntMap; import gnu.trove.map.hash.TObjectIntHashMap; import gnu.trove.procedure.TObjectIntProcedure; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.terasology.asset.Assets; import org.terasology.assets.ResourceUrn; import org.terasology.engine.paths.PathManager; import org.terasology.math.Rect2f; import org.terasology.math.TeraMath; import org.terasology.math.geom.Vector2f; import org.terasology.naming.Name; import org.terasology.rendering.assets.atlas.Atlas; import org.terasology.rendering.assets.atlas.AtlasData; import org.terasology.rendering.assets.material.Material; import org.terasology.rendering.assets.material.MaterialData; import org.terasology.rendering.assets.texture.Texture; import org.terasology.rendering.assets.texture.TextureData; import org.terasology.rendering.assets.texture.subtexture.SubtextureData; import javax.imageio.ImageIO; import java.awt.*; import java.awt.image.BufferedImage; import java.io.BufferedOutputStream; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.OutputStream; import java.nio.ByteBuffer; import java.nio.file.Files; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.concurrent.BlockingQueue; import java.util.function.Consumer; /** * @author Immortius */ public class WorldAtlasImpl implements WorldAtlas { private static final Logger logger = LoggerFactory.getLogger(WorldAtlasImpl.class); private static final int MAX_TILES = 65536; private static final Color UNIT_Z_COLOR = new Color(0.5f, 0.5f, 1.0f, 1.0f); private static final Color TRANSPARENT_COLOR = new Color(0.0f, 0.0f, 0.0f, 0.0f); private static final Color BLACK_COLOR = new Color(0.0f, 0.0f, 0.0f, 1.0f); private int maxAtlasSize = 4096; private int atlasSize = 256; private int tileSize = 16; private TObjectIntMap<ResourceUrn> tileIndexes = new TObjectIntHashMap<>(); private List<BlockTile> tiles = Lists.newArrayList(); private List<BlockTile> tilesNormal = Lists.newArrayList(); private List<BlockTile> tilesHeight = Lists.newArrayList(); private BlockingQueue<BlockTile> reloadQueue = Queues.newLinkedBlockingQueue(); private Consumer<BlockTile> tileReloadListener = reloadQueue::add; /** * @param maxAtlasSize The maximum dimensions of the atlas (both width and height, in pixels) */ public WorldAtlasImpl(int maxAtlasSize) { this.maxAtlasSize = maxAtlasSize; for (ResourceUrn tile : Assets.list(BlockTile.class)) { indexTile(tile); } buildAtlas(); } @Override public int getTileSize() { return tileSize; } @Override public int getAtlasSize() { return atlasSize; } @Override public float getRelativeTileSize() { return ((float) getTileSize()) / (float) getAtlasSize(); } @Override public int getNumMipmaps() { return TeraMath.sizeOfPower(tileSize) + 1; } @Override public Vector2f getTexCoords(BlockTile tile, boolean warnOnError) { return getTexCoords(tile.getUrn(), warnOnError); } /** * Obtains the tex coords of a block tile. If it isn't part of the atlas it is added to the atlas. * * @param uri The uri of the block tile of interest. * @param warnOnError Whether a warning should be logged if the asset canot be found * @return The tex coords of the tile in the atlas. */ @Override public Vector2f getTexCoords(ResourceUrn uri, boolean warnOnError) { return getTexCoords(getTileIndex(uri, warnOnError)); } public void update() { if (!reloadQueue.isEmpty()) { List<BlockTile> reloadList = Lists.newArrayListWithExpectedSize(reloadQueue.size()); reloadQueue.drainTo(reloadList); // TODO: does this need to be more efficient? could just reload individual block tile locations. buildAtlas(); } } @Override public void dispose() { for (BlockTile tile : tiles) { tile.unsubscribe(tileReloadListener); } } private Vector2f getTexCoords(int id) { int tilesPerDim = atlasSize / tileSize; return new Vector2f((id % tilesPerDim) * getRelativeTileSize(), (id / tilesPerDim) * getRelativeTileSize()); } private int getTileIndex(ResourceUrn uri, boolean warnOnError) { if (tileIndexes.containsKey(uri)) { return tileIndexes.get(uri); } if (warnOnError) { logger.warn("Tile {} could not be resolved", uri); } return 0; } private int indexTile(ResourceUrn uri) { if (tiles.size() == MAX_TILES) { logger.error("Maximum tiles exceeded"); return 0; } Optional<BlockTile> tile = Assets.get(uri, BlockTile.class); if (tile.isPresent()) { if (checkTile(tile.get())) { int index = tiles.size(); tiles.add(tile.get()); addNormal(uri); addHeightMap(uri); tileIndexes.put(uri, index); tile.get().subscribe(tileReloadListener); return index; } else { logger.error("Invalid tile {}, must be a square with power-of-two sides.", uri); return 0; } } return 0; } private boolean checkTile(BlockTile tile) { return tile.getImage().getWidth() == tile.getImage().getHeight() && IntMath.isPowerOfTwo(tile.getImage().getWidth()); } private void addNormal(ResourceUrn uri) { String name = uri.toString() + "Normal"; Optional<BlockTile> tile = Assets.get(name, BlockTile.class); if (tile.isPresent()) { tilesNormal.add(tile.get()); } } private void addHeightMap(ResourceUrn uri) { String name = uri.toString() + "Height"; Optional<BlockTile> tile = Assets.get(name, BlockTile.class); if (tile.isPresent()) { tilesHeight.add(tile.get()); } } private void buildAtlas() { calculateAtlasSizes(); int numMipMaps = getNumMipmaps(); ByteBuffer[] data = createAtlasMipmaps(numMipMaps, TRANSPARENT_COLOR, tiles, "tiles.png"); ByteBuffer[] dataNormal = createAtlasMipmaps(numMipMaps, UNIT_Z_COLOR, tilesNormal, "tilesNormal.png"); ByteBuffer[] dataHeight = createAtlasMipmaps(numMipMaps, BLACK_COLOR, tilesHeight, "tilesHeight.png"); TextureData terrainTexData = new TextureData(atlasSize, atlasSize, data, Texture.WrapMode.CLAMP, Texture.FilterMode.NEAREST); Texture terrainTex = Assets.generateAsset(new ResourceUrn("engine:terrain"), terrainTexData, Texture.class); TextureData terrainNormalData = new TextureData(atlasSize, atlasSize, dataNormal, Texture.WrapMode.CLAMP, Texture.FilterMode.NEAREST); Assets.generateAsset(new ResourceUrn("engine:terrainNormal"), terrainNormalData, Texture.class); TextureData terrainHeightData = new TextureData(atlasSize, atlasSize, dataHeight, Texture.WrapMode.CLAMP, Texture.FilterMode.NEAREST); Assets.generateAsset(new ResourceUrn("engine:terrainHeight"), terrainHeightData, Texture.class); MaterialData terrainMatData = new MaterialData(Assets.getShader("engine:block").get()); terrainMatData.setParam("textureAtlas", terrainTex); terrainMatData.setParam("colorOffset", new float[]{1, 1, 1}); terrainMatData.setParam("textured", true); Assets.generateAsset(new ResourceUrn("engine:terrain"), terrainMatData, Material.class); createTextureAtlas(terrainTex); } private void createTextureAtlas(final Texture texture) { final Map<Name, Map<Name, SubtextureData>> textureAtlases = Maps.newHashMap(); final Vector2f texSize = new Vector2f(getRelativeTileSize(), getRelativeTileSize()); tileIndexes.forEachEntry(new TObjectIntProcedure<ResourceUrn>() { @Override public boolean execute(ResourceUrn tileUri, int index) { Vector2f coords = getTexCoords(index); SubtextureData subtextureData = new SubtextureData(texture, Rect2f.createFromMinAndSize(coords, texSize)); Map<Name, SubtextureData> textureAtlas = textureAtlases.get(tileUri.getModuleName()); if (textureAtlas == null) { textureAtlas = Maps.newHashMap(); textureAtlases.put(tileUri.getModuleName(), textureAtlas); } textureAtlas.put(tileUri.getResourceName(), subtextureData); return true; } }); for (Map.Entry<Name, Map<Name, SubtextureData>> atlas : textureAtlases.entrySet()) { AtlasData data = new AtlasData(atlas.getValue()); Assets.generateAsset(new ResourceUrn(atlas.getKey(), new Name("terrain")), data, Atlas.class); } } private ByteBuffer[] createAtlasMipmaps(int numMipMaps, Color initialColor, List<BlockTile> tileImages, String screenshotName) { ByteBuffer[] data = new ByteBuffer[numMipMaps]; for (int i = 0; i < numMipMaps; ++i) { BufferedImage image = generateAtlas(i, tileImages, initialColor); if (i == 0) { try (OutputStream stream = new BufferedOutputStream(Files.newOutputStream(PathManager.getInstance().getScreenshotPath().resolve(screenshotName)))) { ImageIO.write(image, "png", stream); } catch (IOException e) { logger.warn("Failed to write atlas"); } } try (ByteArrayOutputStream bos = new ByteArrayOutputStream()) { ImageIO.write(image, "png", bos); PNGDecoder decoder = new PNGDecoder(new ByteArrayInputStream(bos.toByteArray())); ByteBuffer buf = ByteBuffer.allocateDirect(4 * decoder.getWidth() * decoder.getHeight()); decoder.decode(buf, decoder.getWidth() * 4, PNGDecoder.Format.RGBA); buf.flip(); data[i] = buf; } catch (IOException e) { logger.error("Failed to create atlas texture"); } } return data; } // The atlas is configured using the following constraints... // 1. The overall tile size is the size of the largest tile loaded // 2. The atlas will never be larger than 4096*4096 px // 3. The tile size gets adjusted if the tiles won't fit into the atlas using the overall tile size // (the tile size gets halved until all tiles will fit into the atlas) // 4. The size of the atlas is always a power of two - as is the tile size private void calculateAtlasSizes() { tileSize = 16; tiles.stream().filter(tile -> tile.getImage().getWidth() > tileSize).forEach(tile -> { tileSize = tile.getImage().getWidth(); }); atlasSize = 1; while (atlasSize * atlasSize < tiles.size()) { atlasSize *= 2; } atlasSize = atlasSize * tileSize; if (atlasSize > maxAtlasSize) { atlasSize = maxAtlasSize; int maxTiles = (atlasSize / tileSize) * (atlasSize / tileSize); while (maxTiles < tiles.size()) { tileSize >>= 1; maxTiles = (atlasSize / tileSize) * (atlasSize / tileSize); } } } private BufferedImage generateAtlas(int mipMapLevel, List<BlockTile> tileImages, Color clearColor) { int size = atlasSize / (1 << mipMapLevel); int textureSize = tileSize / (1 << mipMapLevel); int tilesPerDim = atlasSize / tileSize; BufferedImage result = new BufferedImage(size, size, BufferedImage.TYPE_INT_ARGB); Graphics g = result.getGraphics(); g.setColor(clearColor); g.fillRect(0, 0, size, size); for (int index = 0; index < tileImages.size(); ++index) { int posX = (index) % tilesPerDim; int posY = (index) / tilesPerDim; BlockTile tile = tileImages.get(index); if (tile != null) { g.drawImage(tile.getImage().getScaledInstance(textureSize, textureSize, Image.SCALE_SMOOTH), posX * textureSize, posY * textureSize, null); } } return result; } }
/* * Zed Attack Proxy (ZAP) and its related class files. * * ZAP is an HTTP/HTTPS proxy for assessing web application security. * * Copyright 2013 The ZAP Development Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.zaproxy.zap.extension.users; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.management.relation.Role; import org.apache.commons.configuration.Configuration; import org.apache.log4j.Logger; import org.parosproxy.paros.Constant; import org.parosproxy.paros.control.Control; import org.parosproxy.paros.db.RecordContext; import org.parosproxy.paros.extension.ExtensionAdaptor; import org.parosproxy.paros.extension.ExtensionHook; import org.parosproxy.paros.model.Model; import org.parosproxy.paros.model.Session; import org.zaproxy.zap.authentication.AuthenticationMethodType; import org.zaproxy.zap.control.ExtensionFactory; import org.zaproxy.zap.extension.authentication.ExtensionAuthentication; import org.zaproxy.zap.extension.httpsessions.ExtensionHttpSessions; import org.zaproxy.zap.extension.sessions.ExtensionSessionManagement; import org.zaproxy.zap.model.Context; import org.zaproxy.zap.model.ContextDataFactory; import org.zaproxy.zap.users.User; import org.zaproxy.zap.view.AbstractContextPropertiesPanel; import org.zaproxy.zap.view.ContextPanelFactory; /** * The Extension for managing {@link User Users}, {@link Role Roles}, and related entities. * <p> * This class also handles the loading of {@link AuthenticationMethodType} and * {@link AuthenticationMethodType} classes in the system using the AddOnLoader ( * {@link ExtensionFactory#getAddOnLoader()}). * </p> */ public class ExtensionUserManagement extends ExtensionAdaptor implements ContextPanelFactory, ContextDataFactory { public static final String CONTEXT_CONFIG_USERS = Context.CONTEXT_CONFIG + ".users"; public static final String CONTEXT_CONFIG_USERS_USER = CONTEXT_CONFIG_USERS + ".user"; /** * The extension's order during loading. Make sure we load this extension AFTER the * Authentication one. */ public static final int EXTENSION_ORDER = ExtensionAuthentication.EXTENSION_ORDER + 5; /** The NAME of the extension. */ public static final String NAME = "ExtensionUserManagement"; /** The Constant log. */ private static final Logger log = Logger.getLogger(ExtensionUserManagement.class); /** The user panels, mapped to each context. */ private Map<Integer, ContextUsersPanel> userPanelsMap = new HashMap<>(); /** The context managers, mapped to each context. */ private Map<Integer, ContextUserAuthManager> contextManagers = new HashMap<>(); private UsersAPI api; /** The Constant EXTENSION DEPENDENCIES. */ private static final List<Class<?>> EXTENSION_DEPENDENCIES; static { // Prepare a list of Extensions on which this extension depends List<Class<?>> dependencies = new ArrayList<>(3); dependencies.add(ExtensionHttpSessions.class); dependencies.add(ExtensionAuthentication.class); dependencies.add(ExtensionSessionManagement.class); EXTENSION_DEPENDENCIES = Collections.unmodifiableList(dependencies); } /** A reference to the http sessions extension. */ private ExtensionHttpSessions extensionHttpSessions; /** * Instantiates a new extension. */ public ExtensionUserManagement() { initialize(); } /** * Gets the ExtensionHttpSessions, if it's enabled. * * @return the Http Sessions extension or null, if it's not available */ protected ExtensionHttpSessions getExtensionHttpSessions() { if (extensionHttpSessions == null) { extensionHttpSessions = (ExtensionHttpSessions) Control.getSingleton().getExtensionLoader() .getExtension(ExtensionHttpSessions.NAME); if (extensionHttpSessions == null) log.error("Http Sessions Extension should be enabled for the " + ExtensionUserManagement.class.getSimpleName() + " to work."); } return extensionHttpSessions; } /** * Initialize the extension. */ private void initialize() { this.setName(NAME); // Added to make sure the ExtensionForcedUser is loaded after this one. // See: ExtensionForcedUser#getOrder() this.setOrder(EXTENSION_ORDER); } @Override public String getAuthor() { return Constant.ZAP_TEAM; } @Override public void hook(ExtensionHook extensionHook) { super.hook(extensionHook); // Register this as a context data factory Model.getSingleton().addContextDataFactory(this); if (getView() != null) { // Factory for generating Session Context Users panels getView().addContextPanelFactory(this); } // Prepare API this.api = new UsersAPI(this); extensionHook.addApiImplementor(api); } @Override public List<Class<?>> getDependencies() { return EXTENSION_DEPENDENCIES; } @Override public URL getURL() { try { return new URL(Constant.ZAP_HOMEPAGE); } catch (MalformedURLException e) { return null; } } @Override public AbstractContextPropertiesPanel getContextPanel(Context ctx) { return getContextPanel(ctx.getIndex()); } /** * Gets the context panel for a given context. * * @param contextId the context id * @return the context panel */ private ContextUsersPanel getContextPanel(int contextId) { ContextUsersPanel panel = this.userPanelsMap.get(contextId); if (panel == null) { panel = new ContextUsersPanel(this, contextId); this.userPanelsMap.put(contextId, panel); } return panel; } /** * Gets the context user auth manager for a given context. * * @param contextId the context id * @return the context user auth manager */ public ContextUserAuthManager getContextUserAuthManager(int contextId) { ContextUserAuthManager manager = contextManagers.get(contextId); if (manager == null) { manager = new ContextUserAuthManager(contextId); contextManagers.put(contextId, manager); } return manager; } /** * Gets an unmodifiable view of the users that are currently shown in the UI. * * @param contextId the context id * @return the uI configured users */ public List<User> getUIConfiguredUsers(int contextId) { ContextUsersPanel panel = this.userPanelsMap.get(contextId); if (panel != null) { return Collections.unmodifiableList(panel.getUsersTableModel().getUsers()); } return null; } /** * Gets the model of the users that are currently shown in the UI. * * @param contextId the context id * @return the users model, if any, or null, if there is no panel for the given model */ public UsersTableModel getUIConfiguredUsersModel(int contextId) { ContextUsersPanel panel = this.userPanelsMap.get(contextId); if (panel != null) { return panel.getUsersTableModel(); } return null; } @Override public void discardContexts() { this.contextManagers.clear(); this.userPanelsMap.clear(); } @Override public void discardContext(Context ctx) { this.contextManagers.remove(ctx.getIndex()); this.userPanelsMap.remove(ctx.getIndex()); } @Override public void loadContextData(Session session, Context context) { try { List<String> encodedUsers = session.getContextDataStrings(context.getIndex(), RecordContext.TYPE_USER); ContextUserAuthManager usersManager = getContextUserAuthManager(context.getIndex()); for (String e : encodedUsers) { User u = User.decode(context.getIndex(), e); usersManager.addUser(u); } } catch (Exception ex) { log.error("Unable to load Users.", ex); } } @Override public void persistContextData(Session session, Context context) { try { List<String> encodedUsers = new ArrayList<>(); ContextUserAuthManager m = contextManagers.get(context.getIndex()); if (m != null) { for (User u : m.getUsers()) { encodedUsers.add(User.encode(u)); } session.setContextData(context.getIndex(), RecordContext.TYPE_USER, encodedUsers); } } catch (Exception ex) { log.error("Unable to persist Users.", ex); } } /** * Removes all the users that are shown in the UI (for the Users context panel) and correspond * to a particular shared Context. * * @param sharedContext the shared context */ public void removeSharedContextUsers(Context sharedContext) { this.getContextPanel(sharedContext.getIndex()).getUsersTableModel().removeAllUsers(); } /** * Add a new user shown in the UI (for the Users context panel) that corresponds * to a particular shared Context. * * @param sharedContext the shared context * @param user the user */ public void addSharedContextUser(Context sharedContext, User user) { this.getContextPanel(sharedContext.getIndex()).getUsersTableModel().addUser(user); } public List<User> getSharedContextUsers(Context sharedContext){ return getContextPanel(sharedContext.getIndex()).getUsersTableModel().getUsers(); } /** * Removes all the that correspond to a Context with a given id. * * @param contextId the context id */ public void removeContextUsers(int contextId) { this.getContextUserAuthManager(contextId).removeAllUsers(); } @Override public void exportContextData(Context ctx, Configuration config) { ContextUserAuthManager m = contextManagers.get(ctx.getIndex()); if (m != null) { for (User u : m.getUsers()) { config.addProperty(CONTEXT_CONFIG_USERS_USER, User.encode(u)); } } } @Override public void importContextData(Context ctx, Configuration config) { List<Object> list = config.getList(CONTEXT_CONFIG_USERS_USER); ContextUserAuthManager m = getContextUserAuthManager(ctx.getIndex()); for (Object o : list) { User usersManager = User.decode(ctx.getIndex(), o.toString()); m.addUser(usersManager); } } /** * No database tables used, so all supported */ @Override public boolean supportsDb(String type) { return true; } }
/* * Copyright 2010 Ning, Inc. * * Ning licenses this file to you under the Apache License, version 2.0 * (the "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package org.asynchttpclient.async; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertNull; import static org.testng.Assert.assertTrue; import org.asynchttpclient.FluentStringsMap; import org.testng.annotations.Test; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.Map; public class FluentStringsMapTest { @Test public void emptyTest() { FluentStringsMap map = new FluentStringsMap(); assertTrue(map.keySet().isEmpty()); } @Test public void normalTest() { FluentStringsMap map = new FluentStringsMap(); map.add("fOO", "bAr"); map.add("Baz", Arrays.asList("fOo", "bar")); assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("fOO", "Baz"))); assertEquals(map.getFirstValue("fOO"), "bAr"); assertEquals(map.getJoinedValue("fOO", ", "), "bAr"); assertEquals(map.get("fOO"), Arrays.asList("bAr")); assertNull(map.getFirstValue("foo")); assertNull(map.getJoinedValue("foo", ", ")); assertNull(map.get("foo")); assertEquals(map.getFirstValue("Baz"), "fOo"); assertEquals(map.getJoinedValue("Baz", ", "), "fOo, bar"); assertEquals(map.get("Baz"), Arrays.asList("fOo", "bar")); assertNull(map.getFirstValue("baz")); assertNull(map.getJoinedValue("baz", ", ")); assertNull(map.get("baz")); } @Test public void addNullTest() { FluentStringsMap map = new FluentStringsMap(); map.add("fOO", "bAr"); map.add(null, Arrays.asList("fOo", "bar")); assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("fOO"))); assertEquals(map.getFirstValue("fOO"), "bAr"); assertEquals(map.getJoinedValue("fOO", ", "), "bAr"); assertEquals(map.get("fOO"), Arrays.asList("bAr")); assertNull(map.getFirstValue("foo")); assertNull(map.getJoinedValue("foo", ", ")); assertNull(map.get("foo")); assertNull(map.getFirstValue(null)); assertNull(map.getJoinedValue("Baz", ", ")); assertNull(map.get(null)); } @Test public void sameKeyMultipleTimesTest() { FluentStringsMap map = new FluentStringsMap(); map.add("foo", "baz,foo"); map.add("foo", Arrays.asList("bar")); map.add("foo", "bla", "blubb"); map.add("fOO", "duh"); assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "fOO"))); assertEquals(map.getFirstValue("foo"), "baz,foo"); assertEquals(map.getJoinedValue("foo", ", "), "baz,foo, bar, bla, blubb"); assertEquals(map.get("foo"), Arrays.asList("baz,foo", "bar", "bla", "blubb")); assertEquals(map.getFirstValue("fOO"), "duh"); assertEquals(map.getJoinedValue("fOO", ", "), "duh"); assertEquals(map.get("fOO"), Arrays.asList("duh")); } @Test public void emptyValueTest() { FluentStringsMap map = new FluentStringsMap(); map.add("foo", ""); assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo"))); assertEquals(map.getFirstValue("foo"), ""); assertEquals(map.getJoinedValue("foo", ", "), ""); assertEquals(map.get("foo"), Arrays.asList("")); } @Test public void nullValueTest() { FluentStringsMap map = new FluentStringsMap(); map.add("foo", (String) null); assertEquals(map.getFirstValue("foo"), null); assertEquals(map.getJoinedValue("foo", ", "), null); assertEquals(map.get("foo").size(), 1); } @Test public void mapConstructorTest() { Map<String, Collection<String>> headerMap = new LinkedHashMap<String, Collection<String>>(); headerMap.put("foo", Arrays.asList("baz,foo")); headerMap.put("baz", Arrays.asList("bar")); headerMap.put("bar", Arrays.asList("bla", "blubb")); FluentStringsMap map = new FluentStringsMap(headerMap); headerMap.remove("foo"); headerMap.remove("bar"); headerMap.remove("baz"); assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "baz", "bar"))); assertEquals(map.getFirstValue("foo"), "baz,foo"); assertEquals(map.getJoinedValue("foo", ", "), "baz,foo"); assertEquals(map.get("foo"), Arrays.asList("baz,foo")); assertEquals(map.getFirstValue("baz"), "bar"); assertEquals(map.getJoinedValue("baz", ", "), "bar"); assertEquals(map.get("baz"), Arrays.asList("bar")); assertEquals(map.getFirstValue("bar"), "bla"); assertEquals(map.getJoinedValue("bar", ", "), "bla, blubb"); assertEquals(map.get("bar"), Arrays.asList("bla", "blubb")); } @Test public void mapConstructorNullTest() { FluentStringsMap map = new FluentStringsMap((Map<String, Collection<String>>) null); assertEquals(map.keySet().size(), 0); } @Test public void copyConstructorTest() { FluentStringsMap srcHeaders = new FluentStringsMap(); srcHeaders.add("foo", "baz,foo"); srcHeaders.add("baz", Arrays.asList("bar")); srcHeaders.add("bar", "bla", "blubb"); FluentStringsMap map = new FluentStringsMap(srcHeaders); srcHeaders.delete("foo"); srcHeaders.delete("bar"); srcHeaders.delete("baz"); assertTrue(srcHeaders.keySet().isEmpty()); assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "baz", "bar"))); assertEquals(map.getFirstValue("foo"), "baz,foo"); assertEquals(map.getJoinedValue("foo", ", "), "baz,foo"); assertEquals(map.get("foo"), Arrays.asList("baz,foo")); assertEquals(map.getFirstValue("baz"), "bar"); assertEquals(map.getJoinedValue("baz", ", "), "bar"); assertEquals(map.get("baz"), Arrays.asList("bar")); assertEquals(map.getFirstValue("bar"), "bla"); assertEquals(map.getJoinedValue("bar", ", "), "bla, blubb"); assertEquals(map.get("bar"), Arrays.asList("bla", "blubb")); } @Test public void copyConstructorNullTest() { FluentStringsMap map = new FluentStringsMap((FluentStringsMap) null); assertEquals(map.keySet().size(), 0); } @Test public void deleteTest() { FluentStringsMap map = new FluentStringsMap(); map.add("foo", "bar"); map.add("baz", Arrays.asList("foo", "bar")); assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); map.delete("baz"); assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertNull(map.getFirstValue("baz")); assertNull(map.getJoinedValue("baz", ", ")); assertNull(map.get("baz")); } @Test public void deleteTestDifferentCase() { FluentStringsMap map = new FluentStringsMap(); map.add("foo", "bar"); map.add("baz", Arrays.asList("foo", "bar")); assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); map.delete("bAz"); assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); } @Test public void deleteUndefinedKeyTest() { FluentStringsMap map = new FluentStringsMap(); map.add("foo", "bar"); map.add("baz", Arrays.asList("foo", "bar")); assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); map.delete("bar"); assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); } @Test public void deleteNullTest() { FluentStringsMap map = new FluentStringsMap(); map.add("foo", "bar"); map.add("baz", Arrays.asList("foo", "bar")); assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); map.delete(null); assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); } @Test public void deleteAllArrayTest() { FluentStringsMap map = new FluentStringsMap(); map.add("foo", "bar"); map.add("baz", Arrays.asList("foo", "bar")); assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); map.deleteAll("baz", "Boo"); assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertNull(map.getFirstValue("baz")); assertNull(map.getJoinedValue("baz", ", ")); assertNull(map.get("baz")); } @Test public void deleteAllArrayDifferentCaseTest() { FluentStringsMap map = new FluentStringsMap(); map.add("foo", "bar"); map.add("baz", Arrays.asList("foo", "bar")); assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); map.deleteAll("Foo", "baz"); assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertNull(map.getFirstValue("baz")); assertNull(map.getJoinedValue("baz", ", ")); assertNull(map.get("baz")); } @Test public void deleteAllCollectionTest() { FluentStringsMap map = new FluentStringsMap(); map.add("foo", "bar"); map.add("baz", Arrays.asList("foo", "bar")); assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); map.deleteAll(Arrays.asList("baz", "foo")); assertEquals(map.keySet(), Collections.<String> emptyList()); assertNull(map.getFirstValue("foo")); assertNull(map.getJoinedValue("foo", ", ")); assertNull(map.get("foo")); assertNull(map.getFirstValue("baz")); assertNull(map.getJoinedValue("baz", ", ")); assertNull(map.get("baz")); } @Test public void deleteAllCollectionDifferentCaseTest() { FluentStringsMap map = new FluentStringsMap(); map.add("foo", "bar"); map.add("baz", Arrays.asList("foo", "bar")); assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); map.deleteAll(Arrays.asList("bAz", "fOO")); assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); } @Test public void deleteAllNullArrayTest() { FluentStringsMap map = new FluentStringsMap(); map.add("foo", "bar"); map.add("baz", Arrays.asList("foo", "bar")); assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); map.deleteAll((String[]) null); assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); } @Test public void deleteAllNullCollectionTest() { FluentStringsMap map = new FluentStringsMap(); map.add("foo", "bar"); map.add("baz", Arrays.asList("foo", "bar")); assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); map.deleteAll((Collection<String>) null); assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); } @Test public void replaceArrayTest() { FluentStringsMap map = new FluentStringsMap(); map.add("foo", "bar"); map.add("baz", Arrays.asList("foo", "bar")); assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); map.replace("foo", "blub", "bla"); assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "baz"))); assertEquals(map.getFirstValue("foo"), "blub"); assertEquals(map.getJoinedValue("foo", ", "), "blub, bla"); assertEquals(map.get("foo"), Arrays.asList("blub", "bla")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); } @Test public void replaceCollectionTest() { FluentStringsMap map = new FluentStringsMap(); map.add("foo", "bar"); map.add("baz", Arrays.asList("foo", "bar")); assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); map.replace("foo", Arrays.asList("blub", "bla")); assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "baz"))); assertEquals(map.getFirstValue("foo"), "blub"); assertEquals(map.getJoinedValue("foo", ", "), "blub, bla"); assertEquals(map.get("foo"), Arrays.asList("blub", "bla")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); } @Test public void replaceDifferentCaseTest() { FluentStringsMap map = new FluentStringsMap(); map.add("foo", "bar"); map.add("baz", Arrays.asList("foo", "bar")); assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); map.replace("Foo", Arrays.asList("blub", "bla")); assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "baz", "Foo"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); assertEquals(map.getFirstValue("Foo"), "blub"); assertEquals(map.getJoinedValue("Foo", ", "), "blub, bla"); assertEquals(map.get("Foo"), Arrays.asList("blub", "bla")); } @Test public void replaceUndefinedTest() { FluentStringsMap map = new FluentStringsMap(); map.add("foo", "bar"); map.add("baz", Arrays.asList("foo", "bar")); assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); map.replace("bar", Arrays.asList("blub")); assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "baz", "bar"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); assertEquals(map.getFirstValue("bar"), "blub"); assertEquals(map.getJoinedValue("bar", ", "), "blub"); assertEquals(map.get("bar"), Arrays.asList("blub")); } @Test public void replaceNullTest() { FluentStringsMap map = new FluentStringsMap(); map.add("foo", "bar"); map.add("baz", Arrays.asList("foo", "bar")); assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); map.replace(null, Arrays.asList("blub")); assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); } @Test public void replaceValueWithNullTest() { FluentStringsMap map = new FluentStringsMap(); map.add("foo", "bar"); map.add("baz", Arrays.asList("foo", "bar")); assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); map.replace("baz", (Collection<String>) null); assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertNull(map.getFirstValue("baz")); assertNull(map.getJoinedValue("baz", ", ")); assertNull(map.get("baz")); } @Test public void replaceAllMapTest1() { FluentStringsMap map = new FluentStringsMap(); map.add("foo", "bar"); map.add("bar", "foo, bar", "baz"); map.add("baz", Arrays.asList("foo", "bar")); assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "bar", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("bar"), "foo, bar"); assertEquals(map.getJoinedValue("bar", ", "), "foo, bar, baz"); assertEquals(map.get("bar"), Arrays.asList("foo, bar", "baz")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); map.replaceAll(new FluentStringsMap().add("bar", "baz").add("Foo", "blub", "bla")); assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "bar", "baz", "Foo"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("bar"), "baz"); assertEquals(map.getJoinedValue("bar", ", "), "baz"); assertEquals(map.get("bar"), Arrays.asList("baz")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); assertEquals(map.getFirstValue("Foo"), "blub"); assertEquals(map.getJoinedValue("Foo", ", "), "blub, bla"); assertEquals(map.get("Foo"), Arrays.asList("blub", "bla")); } @Test public void replaceAllTest2() { FluentStringsMap map = new FluentStringsMap(); map.add("foo", "bar"); map.add("bar", "foo, bar", "baz"); map.add("baz", Arrays.asList("foo", "bar")); assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "bar", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("bar"), "foo, bar"); assertEquals(map.getJoinedValue("bar", ", "), "foo, bar, baz"); assertEquals(map.get("bar"), Arrays.asList("foo, bar", "baz")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); LinkedHashMap<String, Collection<String>> newValues = new LinkedHashMap<String, Collection<String>>(); newValues.put("bar", Arrays.asList("baz")); newValues.put("foo", null); map.replaceAll(newValues); assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("bar", "baz"))); assertNull(map.getFirstValue("foo")); assertNull(map.getJoinedValue("foo", ", ")); assertNull(map.get("foo")); assertEquals(map.getFirstValue("bar"), "baz"); assertEquals(map.getJoinedValue("bar", ", "), "baz"); assertEquals(map.get("bar"), Arrays.asList("baz")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); } @Test public void replaceAllNullTest1() { FluentStringsMap map = new FluentStringsMap(); map.add("foo", "bar"); map.add("bar", "foo, bar", "baz"); map.add("baz", Arrays.asList("foo", "bar")); assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "bar", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("bar"), "foo, bar"); assertEquals(map.getJoinedValue("bar", ", "), "foo, bar, baz"); assertEquals(map.get("bar"), Arrays.asList("foo, bar", "baz")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); map.replaceAll((FluentStringsMap) null); assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "bar", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("bar"), "foo, bar"); assertEquals(map.getJoinedValue("bar", ", "), "foo, bar, baz"); assertEquals(map.get("bar"), Arrays.asList("foo, bar", "baz")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); } @Test public void replaceAllNullTest2() { FluentStringsMap map = new FluentStringsMap(); map.add("foo", "bar"); map.add("bar", "foo, bar", "baz"); map.add("baz", Arrays.asList("foo", "bar")); assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "bar", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("bar"), "foo, bar"); assertEquals(map.getJoinedValue("bar", ", "), "foo, bar, baz"); assertEquals(map.get("bar"), Arrays.asList("foo, bar", "baz")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); map.replaceAll((Map<String, Collection<String>>) null); assertEquals(map.keySet(), new LinkedHashSet<String>(Arrays.asList("foo", "bar", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("bar"), "foo, bar"); assertEquals(map.getJoinedValue("bar", ", "), "foo, bar, baz"); assertEquals(map.get("bar"), Arrays.asList("foo, bar", "baz")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.drill.exec.expr.stat; import org.apache.drill.common.expression.LogicalExpression; import org.apache.drill.common.expression.LogicalExpressionBase; import org.apache.drill.common.expression.TypedFieldExpr; import org.apache.drill.common.expression.visitors.ExprVisitor; import org.apache.drill.exec.expr.fn.FunctionGenerationHelper; import org.apache.parquet.column.statistics.BooleanStatistics; import org.apache.parquet.column.statistics.Statistics; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.function.BiFunction; import static org.apache.drill.exec.expr.stat.ParquetPredicatesHelper.hasNoNulls; import static org.apache.drill.exec.expr.stat.ParquetPredicatesHelper.isAllNulls; import static org.apache.drill.exec.expr.stat.ParquetPredicatesHelper.isNullOrEmpty; /** * IS predicates for parquet filter pushdown. */ public class ParquetIsPredicate<C extends Comparable<C>> extends LogicalExpressionBase implements ParquetFilterPredicate<C> { private final LogicalExpression expr; private final BiFunction<Statistics<C>, RangeExprEvaluator<C>, RowsMatch> predicate; private ParquetIsPredicate(LogicalExpression expr, BiFunction<Statistics<C>, RangeExprEvaluator<C>, RowsMatch> predicate) { super(expr.getPosition()); this.expr = expr; this.predicate = predicate; } @Override public Iterator<LogicalExpression> iterator() { final List<LogicalExpression> args = new ArrayList<>(); args.add(expr); return args.iterator(); } @Override public <T, V, E extends Exception> T accept(ExprVisitor<T, V, E> visitor, V value) throws E { return visitor.visitUnknown(this, value); } /** * Apply the filter condition against the meta of the rowgroup. */ public RowsMatch matches(RangeExprEvaluator<C> evaluator) { Statistics<C> exprStat = expr.accept(evaluator, null); return isNullOrEmpty(exprStat) ? RowsMatch.SOME : predicate.apply(exprStat, evaluator); } /** * After the applying of the filter against the statistics of the rowgroup, if the result is RowsMatch.ALL, * then we still must know if the rowgroup contains some null values, because it can change the filter result. * If it contains some null values, then we change the RowsMatch.ALL into RowsMatch.SOME, which sya that maybe * some values (the null ones) should be disgarded. */ private static RowsMatch checkNull(Statistics exprStat) { return hasNoNulls(exprStat) ? RowsMatch.ALL : RowsMatch.SOME; } /** * IS NULL predicate. */ private static <C extends Comparable<C>> LogicalExpression createIsNullPredicate(LogicalExpression expr) { return new ParquetIsPredicate<C>(expr, (exprStat, evaluator) -> { // for arrays we are not able to define exact number of nulls // [1,2,3] vs [1,2] -> in second case 3 is absent and thus it's null but statistics shows no nulls if (expr instanceof TypedFieldExpr) { TypedFieldExpr typedFieldExpr = (TypedFieldExpr) expr; if (typedFieldExpr.getPath().isArray()) { return RowsMatch.SOME; } } if (hasNoNulls(exprStat)) { return RowsMatch.NONE; } return isAllNulls(exprStat, evaluator.getRowCount()) ? RowsMatch.ALL : RowsMatch.SOME; }); } /** * IS NOT NULL predicate. */ private static <C extends Comparable<C>> LogicalExpression createIsNotNullPredicate(LogicalExpression expr) { return new ParquetIsPredicate<C>(expr, (exprStat, evaluator) -> isAllNulls(exprStat, evaluator.getRowCount()) ? RowsMatch.NONE : checkNull(exprStat) ); } /** * IS TRUE predicate. */ private static LogicalExpression createIsTruePredicate(LogicalExpression expr) { return new ParquetIsPredicate<Boolean>(expr, (exprStat, evaluator) -> { if (isAllNulls(exprStat, evaluator.getRowCount())) { return RowsMatch.NONE; } if (!exprStat.hasNonNullValue()) { return RowsMatch.SOME; } if (!((BooleanStatistics) exprStat).getMax()) { return RowsMatch.NONE; } return ((BooleanStatistics) exprStat).getMin() ? checkNull(exprStat) : RowsMatch.SOME; }); } /** * IS FALSE predicate. */ private static LogicalExpression createIsFalsePredicate(LogicalExpression expr) { return new ParquetIsPredicate<Boolean>(expr, (exprStat, evaluator) -> { if (isAllNulls(exprStat, evaluator.getRowCount())) { return RowsMatch.NONE; } if (!exprStat.hasNonNullValue()) { return RowsMatch.SOME; } if (((BooleanStatistics) exprStat).getMin()) { return RowsMatch.NONE; } return ((BooleanStatistics) exprStat).getMax() ? RowsMatch.SOME : checkNull(exprStat); }); } /** * IS NOT TRUE predicate. */ private static LogicalExpression createIsNotTruePredicate(LogicalExpression expr) { return new ParquetIsPredicate<Boolean>(expr, (exprStat, evaluator) -> { if (isAllNulls(exprStat, evaluator.getRowCount())) { return RowsMatch.ALL; } if (!exprStat.hasNonNullValue()) { return RowsMatch.SOME; } if (((BooleanStatistics) exprStat).getMin()) { return hasNoNulls(exprStat) ? RowsMatch.NONE : RowsMatch.SOME; } return ((BooleanStatistics) exprStat).getMax() ? RowsMatch.SOME : RowsMatch.ALL; }); } /** * IS NOT FALSE predicate. */ private static LogicalExpression createIsNotFalsePredicate(LogicalExpression expr) { return new ParquetIsPredicate<Boolean>(expr, (exprStat, evaluator) -> { if (isAllNulls(exprStat, evaluator.getRowCount())) { return RowsMatch.ALL; } if (!exprStat.hasNonNullValue()) { return RowsMatch.SOME; } if (!((BooleanStatistics) exprStat).getMax()) { return hasNoNulls(exprStat) ? RowsMatch.NONE : RowsMatch.SOME; } return ((BooleanStatistics) exprStat).getMin() ? RowsMatch.ALL : RowsMatch.SOME; }); } public static <C extends Comparable<C>> LogicalExpression createIsPredicate(String function, LogicalExpression expr) { switch (function) { case FunctionGenerationHelper.IS_NULL: return ParquetIsPredicate.<C>createIsNullPredicate(expr); case FunctionGenerationHelper.IS_NOT_NULL: return ParquetIsPredicate.<C>createIsNotNullPredicate(expr); case FunctionGenerationHelper.IS_TRUE: return createIsTruePredicate(expr); case FunctionGenerationHelper.IS_NOT_TRUE: return createIsNotTruePredicate(expr); case FunctionGenerationHelper.IS_FALSE: return createIsFalsePredicate(expr); case FunctionGenerationHelper.IS_NOT_FALSE: return createIsNotFalsePredicate(expr); default: logger.warn("Unhandled IS function. Function name: {}", function); return null; } } }
package com.github.shredder121.gh_event_api.model; import static org.hamcrest.Matchers.*; public class PullRequestSerializationTest extends AbstractSerializationTest<PullRequest> { static String SAMPLE_PULL_REQUEST = "{\n" + " \"url\": \"https://api.github.com/repos/baxterthehacker/public-repo/pulls/1\",\n" + " \"id\": 34778301,\n" + " \"html_url\": \"https://github.com/baxterthehacker/public-repo/pull/1\",\n" + " \"diff_url\": \"https://github.com/baxterthehacker/public-repo/pull/1.diff\",\n" + " \"patch_url\": \"https://github.com/baxterthehacker/public-repo/pull/1.patch\",\n" + " \"issue_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/issues/1\",\n" + " \"number\": 1,\n" + " \"state\": \"open\",\n" + " \"locked\": false,\n" + " \"title\": \"Update the README with new information\",\n" + " \"user\": {\n" + " \"login\": \"baxterthehacker\",\n" + " \"id\": 6752317,\n" + " \"avatar_url\": \"https://avatars.githubusercontent.com/u/6752317?v=3\",\n" + " \"gravatar_id\": \"\",\n" + " \"url\": \"https://api.github.com/users/baxterthehacker\",\n" + " \"html_url\": \"https://github.com/baxterthehacker\",\n" + " \"followers_url\": \"https://api.github.com/users/baxterthehacker/followers\",\n" + " \"following_url\": \"https://api.github.com/users/baxterthehacker/following{/other_user}\",\n" + " \"gists_url\": \"https://api.github.com/users/baxterthehacker/gists{/gist_id}\",\n" + " \"starred_url\": \"https://api.github.com/users/baxterthehacker/starred{/owner}{/repo}\",\n" + " \"subscriptions_url\": \"https://api.github.com/users/baxterthehacker/subscriptions\",\n" + " \"organizations_url\": \"https://api.github.com/users/baxterthehacker/orgs\",\n" + " \"repos_url\": \"https://api.github.com/users/baxterthehacker/repos\",\n" + " \"events_url\": \"https://api.github.com/users/baxterthehacker/events{/privacy}\",\n" + " \"received_events_url\": \"https://api.github.com/users/baxterthehacker/received_events\",\n" + " \"type\": \"User\",\n" + " \"site_admin\": false\n" + " },\n" + " \"body\": \"This is a pretty simple change that we need to pull into master.\",\n" + " \"created_at\": \"2015-05-05T23:40:27Z\",\n" + " \"updated_at\": \"2015-05-05T23:40:27Z\",\n" + " \"closed_at\": null,\n" + " \"merged_at\": null,\n" + " \"merge_commit_sha\": null,\n" + " \"assignee\": null,\n" + " \"milestone\": null,\n" + " \"commits_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/pulls/1/commits\",\n" + " \"review_comments_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/pulls/1/comments\",\n" + " \"review_comment_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/pulls/comments{/number}\",\n" + " \"comments_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/issues/1/comments\",\n" + " \"statuses_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/statuses/0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c\",\n" + " \"head\": {\n" + " \"label\": \"baxterthehacker:changes\",\n" + " \"ref\": \"changes\",\n" + " \"sha\": \"0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c\",\n" + " \"user\": {\n" + " \"login\": \"baxterthehacker\",\n" + " \"id\": 6752317,\n" + " \"avatar_url\": \"https://avatars.githubusercontent.com/u/6752317?v=3\",\n" + " \"gravatar_id\": \"\",\n" + " \"url\": \"https://api.github.com/users/baxterthehacker\",\n" + " \"html_url\": \"https://github.com/baxterthehacker\",\n" + " \"followers_url\": \"https://api.github.com/users/baxterthehacker/followers\",\n" + " \"following_url\": \"https://api.github.com/users/baxterthehacker/following{/other_user}\",\n" + " \"gists_url\": \"https://api.github.com/users/baxterthehacker/gists{/gist_id}\",\n" + " \"starred_url\": \"https://api.github.com/users/baxterthehacker/starred{/owner}{/repo}\",\n" + " \"subscriptions_url\": \"https://api.github.com/users/baxterthehacker/subscriptions\",\n" + " \"organizations_url\": \"https://api.github.com/users/baxterthehacker/orgs\",\n" + " \"repos_url\": \"https://api.github.com/users/baxterthehacker/repos\",\n" + " \"events_url\": \"https://api.github.com/users/baxterthehacker/events{/privacy}\",\n" + " \"received_events_url\": \"https://api.github.com/users/baxterthehacker/received_events\",\n" + " \"type\": \"User\",\n" + " \"site_admin\": false\n" + " },\n" + " \"repo\": {\n" + " \"id\": 35129377,\n" + " \"name\": \"public-repo\",\n" + " \"full_name\": \"baxterthehacker/public-repo\",\n" + " \"owner\": {\n" + " \"login\": \"baxterthehacker\",\n" + " \"id\": 6752317,\n" + " \"avatar_url\": \"https://avatars.githubusercontent.com/u/6752317?v=3\",\n" + " \"gravatar_id\": \"\",\n" + " \"url\": \"https://api.github.com/users/baxterthehacker\",\n" + " \"html_url\": \"https://github.com/baxterthehacker\",\n" + " \"followers_url\": \"https://api.github.com/users/baxterthehacker/followers\",\n" + " \"following_url\": \"https://api.github.com/users/baxterthehacker/following{/other_user}\",\n" + " \"gists_url\": \"https://api.github.com/users/baxterthehacker/gists{/gist_id}\",\n" + " \"starred_url\": \"https://api.github.com/users/baxterthehacker/starred{/owner}{/repo}\",\n" + " \"subscriptions_url\": \"https://api.github.com/users/baxterthehacker/subscriptions\",\n" + " \"organizations_url\": \"https://api.github.com/users/baxterthehacker/orgs\",\n" + " \"repos_url\": \"https://api.github.com/users/baxterthehacker/repos\",\n" + " \"events_url\": \"https://api.github.com/users/baxterthehacker/events{/privacy}\",\n" + " \"received_events_url\": \"https://api.github.com/users/baxterthehacker/received_events\",\n" + " \"type\": \"User\",\n" + " \"site_admin\": false\n" + " },\n" + " \"private\": false,\n" + " \"html_url\": \"https://github.com/baxterthehacker/public-repo\",\n" + " \"description\": \"\",\n" + " \"fork\": false,\n" + " \"url\": \"https://api.github.com/repos/baxterthehacker/public-repo\",\n" + " \"forks_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/forks\",\n" + " \"keys_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/keys{/key_id}\",\n" + " \"collaborators_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/collaborators{/collaborator}\",\n" + " \"teams_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/teams\",\n" + " \"hooks_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/hooks\",\n" + " \"issue_events_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/issues/events{/number}\",\n" + " \"events_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/events\",\n" + " \"assignees_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/assignees{/user}\",\n" + " \"branches_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/branches{/branch}\",\n" + " \"tags_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/tags\",\n" + " \"blobs_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/git/blobs{/sha}\",\n" + " \"git_tags_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/git/tags{/sha}\",\n" + " \"git_refs_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/git/refs{/sha}\",\n" + " \"trees_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/git/trees{/sha}\",\n" + " \"statuses_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/statuses/{sha}\",\n" + " \"languages_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/languages\",\n" + " \"stargazers_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/stargazers\",\n" + " \"contributors_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/contributors\",\n" + " \"subscribers_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/subscribers\",\n" + " \"subscription_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/subscription\",\n" + " \"commits_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/commits{/sha}\",\n" + " \"git_commits_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/git/commits{/sha}\",\n" + " \"comments_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/comments{/number}\",\n" + " \"issue_comment_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/issues/comments{/number}\",\n" + " \"contents_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/contents/{+path}\",\n" + " \"compare_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/compare/{base}...{head}\",\n" + " \"merges_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/merges\",\n" + " \"archive_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/{archive_format}{/ref}\",\n" + " \"downloads_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/downloads\",\n" + " \"issues_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/issues{/number}\",\n" + " \"pulls_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/pulls{/number}\",\n" + " \"milestones_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/milestones{/number}\",\n" + " \"notifications_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/notifications{?since,all,participating}\",\n" + " \"labels_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/labels{/name}\",\n" + " \"releases_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/releases{/id}\",\n" + " \"created_at\": \"2015-05-05T23:40:12Z\",\n" + " \"updated_at\": \"2015-05-05T23:40:12Z\",\n" + " \"pushed_at\": \"2015-05-05T23:40:26Z\",\n" + " \"git_url\": \"git://github.com/baxterthehacker/public-repo.git\",\n" + " \"ssh_url\": \"git@github.com:baxterthehacker/public-repo.git\",\n" + " \"clone_url\": \"https://github.com/baxterthehacker/public-repo.git\",\n" + " \"svn_url\": \"https://github.com/baxterthehacker/public-repo\",\n" + " \"homepage\": null,\n" + " \"size\": 0,\n" + " \"stargazers_count\": 0,\n" + " \"watchers_count\": 0,\n" + " \"language\": null,\n" + " \"has_issues\": true,\n" + " \"has_downloads\": true,\n" + " \"has_wiki\": true,\n" + " \"has_pages\": true,\n" + " \"forks_count\": 0,\n" + " \"mirror_url\": null,\n" + " \"open_issues_count\": 1,\n" + " \"forks\": 0,\n" + " \"open_issues\": 1,\n" + " \"watchers\": 0,\n" + " \"default_branch\": \"master\"\n" + " }\n" + " },\n" + " \"base\": {\n" + " \"label\": \"baxterthehacker:master\",\n" + " \"ref\": \"master\",\n" + " \"sha\": \"9049f1265b7d61be4a8904a9a27120d2064dab3b\",\n" + " \"user\": {\n" + " \"login\": \"baxterthehacker\",\n" + " \"id\": 6752317,\n" + " \"avatar_url\": \"https://avatars.githubusercontent.com/u/6752317?v=3\",\n" + " \"gravatar_id\": \"\",\n" + " \"url\": \"https://api.github.com/users/baxterthehacker\",\n" + " \"html_url\": \"https://github.com/baxterthehacker\",\n" + " \"followers_url\": \"https://api.github.com/users/baxterthehacker/followers\",\n" + " \"following_url\": \"https://api.github.com/users/baxterthehacker/following{/other_user}\",\n" + " \"gists_url\": \"https://api.github.com/users/baxterthehacker/gists{/gist_id}\",\n" + " \"starred_url\": \"https://api.github.com/users/baxterthehacker/starred{/owner}{/repo}\",\n" + " \"subscriptions_url\": \"https://api.github.com/users/baxterthehacker/subscriptions\",\n" + " \"organizations_url\": \"https://api.github.com/users/baxterthehacker/orgs\",\n" + " \"repos_url\": \"https://api.github.com/users/baxterthehacker/repos\",\n" + " \"events_url\": \"https://api.github.com/users/baxterthehacker/events{/privacy}\",\n" + " \"received_events_url\": \"https://api.github.com/users/baxterthehacker/received_events\",\n" + " \"type\": \"User\",\n" + " \"site_admin\": false\n" + " },\n" + " \"repo\": {\n" + " \"id\": 35129377,\n" + " \"name\": \"public-repo\",\n" + " \"full_name\": \"baxterthehacker/public-repo\",\n" + " \"owner\": {\n" + " \"login\": \"baxterthehacker\",\n" + " \"id\": 6752317,\n" + " \"avatar_url\": \"https://avatars.githubusercontent.com/u/6752317?v=3\",\n" + " \"gravatar_id\": \"\",\n" + " \"url\": \"https://api.github.com/users/baxterthehacker\",\n" + " \"html_url\": \"https://github.com/baxterthehacker\",\n" + " \"followers_url\": \"https://api.github.com/users/baxterthehacker/followers\",\n" + " \"following_url\": \"https://api.github.com/users/baxterthehacker/following{/other_user}\",\n" + " \"gists_url\": \"https://api.github.com/users/baxterthehacker/gists{/gist_id}\",\n" + " \"starred_url\": \"https://api.github.com/users/baxterthehacker/starred{/owner}{/repo}\",\n" + " \"subscriptions_url\": \"https://api.github.com/users/baxterthehacker/subscriptions\",\n" + " \"organizations_url\": \"https://api.github.com/users/baxterthehacker/orgs\",\n" + " \"repos_url\": \"https://api.github.com/users/baxterthehacker/repos\",\n" + " \"events_url\": \"https://api.github.com/users/baxterthehacker/events{/privacy}\",\n" + " \"received_events_url\": \"https://api.github.com/users/baxterthehacker/received_events\",\n" + " \"type\": \"User\",\n" + " \"site_admin\": false\n" + " },\n" + " \"private\": false,\n" + " \"html_url\": \"https://github.com/baxterthehacker/public-repo\",\n" + " \"description\": \"\",\n" + " \"fork\": false,\n" + " \"url\": \"https://api.github.com/repos/baxterthehacker/public-repo\",\n" + " \"forks_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/forks\",\n" + " \"keys_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/keys{/key_id}\",\n" + " \"collaborators_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/collaborators{/collaborator}\",\n" + " \"teams_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/teams\",\n" + " \"hooks_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/hooks\",\n" + " \"issue_events_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/issues/events{/number}\",\n" + " \"events_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/events\",\n" + " \"assignees_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/assignees{/user}\",\n" + " \"branches_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/branches{/branch}\",\n" + " \"tags_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/tags\",\n" + " \"blobs_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/git/blobs{/sha}\",\n" + " \"git_tags_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/git/tags{/sha}\",\n" + " \"git_refs_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/git/refs{/sha}\",\n" + " \"trees_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/git/trees{/sha}\",\n" + " \"statuses_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/statuses/{sha}\",\n" + " \"languages_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/languages\",\n" + " \"stargazers_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/stargazers\",\n" + " \"contributors_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/contributors\",\n" + " \"subscribers_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/subscribers\",\n" + " \"subscription_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/subscription\",\n" + " \"commits_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/commits{/sha}\",\n" + " \"git_commits_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/git/commits{/sha}\",\n" + " \"comments_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/comments{/number}\",\n" + " \"issue_comment_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/issues/comments{/number}\",\n" + " \"contents_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/contents/{+path}\",\n" + " \"compare_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/compare/{base}...{head}\",\n" + " \"merges_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/merges\",\n" + " \"archive_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/{archive_format}{/ref}\",\n" + " \"downloads_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/downloads\",\n" + " \"issues_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/issues{/number}\",\n" + " \"pulls_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/pulls{/number}\",\n" + " \"milestones_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/milestones{/number}\",\n" + " \"notifications_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/notifications{?since,all,participating}\",\n" + " \"labels_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/labels{/name}\",\n" + " \"releases_url\": \"https://api.github.com/repos/baxterthehacker/public-repo/releases{/id}\",\n" + " \"created_at\": \"2015-05-05T23:40:12Z\",\n" + " \"updated_at\": \"2015-05-05T23:40:12Z\",\n" + " \"pushed_at\": \"2015-05-05T23:40:26Z\",\n" + " \"git_url\": \"git://github.com/baxterthehacker/public-repo.git\",\n" + " \"ssh_url\": \"git@github.com:baxterthehacker/public-repo.git\",\n" + " \"clone_url\": \"https://github.com/baxterthehacker/public-repo.git\",\n" + " \"svn_url\": \"https://github.com/baxterthehacker/public-repo\",\n" + " \"homepage\": null,\n" + " \"size\": 0,\n" + " \"stargazers_count\": 0,\n" + " \"watchers_count\": 0,\n" + " \"language\": null,\n" + " \"has_issues\": true,\n" + " \"has_downloads\": true,\n" + " \"has_wiki\": true,\n" + " \"has_pages\": true,\n" + " \"forks_count\": 0,\n" + " \"mirror_url\": null,\n" + " \"open_issues_count\": 1,\n" + " \"forks\": 0,\n" + " \"open_issues\": 1,\n" + " \"watchers\": 0,\n" + " \"default_branch\": \"master\"\n" + " }\n" + " },\n" + " \"_links\": {\n" + " \"self\": {\n" + " \"href\": \"https://api.github.com/repos/baxterthehacker/public-repo/pulls/1\"\n" + " },\n" + " \"html\": {\n" + " \"href\": \"https://github.com/baxterthehacker/public-repo/pull/1\"\n" + " },\n" + " \"issue\": {\n" + " \"href\": \"https://api.github.com/repos/baxterthehacker/public-repo/issues/1\"\n" + " },\n" + " \"comments\": {\n" + " \"href\": \"https://api.github.com/repos/baxterthehacker/public-repo/issues/1/comments\"\n" + " },\n" + " \"review_comments\": {\n" + " \"href\": \"https://api.github.com/repos/baxterthehacker/public-repo/pulls/1/comments\"\n" + " },\n" + " \"review_comment\": {\n" + " \"href\": \"https://api.github.com/repos/baxterthehacker/public-repo/pulls/comments{/number}\"\n" + " },\n" + " \"commits\": {\n" + " \"href\": \"https://api.github.com/repos/baxterthehacker/public-repo/pulls/1/commits\"\n" + " },\n" + " \"statuses\": {\n" + " \"href\": \"https://api.github.com/repos/baxterthehacker/public-repo/statuses/0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c\"\n" + " }\n" + " },\n" + " \"merged\": false,\n" + " \"mergeable\": null,\n" + " \"mergeable_state\": \"unknown\",\n" + " \"merged_by\": null,\n" + " \"comments\": 0,\n" + " \"review_comments\": 0,\n" + " \"commits\": 1,\n" + " \"additions\": 1,\n" + " \"deletions\": 1,\n" + " \"changed_files\": 1\n" + "}"; public PullRequestSerializationTest() { super(PullRequest.class); } @Override protected String getSample() { return SAMPLE_PULL_REQUEST; } @Override protected void checkDeserialized(PullRequest pullRequest) { errorCollector.checkThat(pullRequest.getNumber(), is(1)); errorCollector.checkThat(pullRequest.getLinks(), allOf( hasKey("self"), hasKey("html"), hasKey("comments"), hasKey("commits"))); } }
package hex.tree.xgboost; import hex.DataInfo; import ml.dmlc.xgboost4j.java.DMatrix; import ml.dmlc.xgboost4j.java.XGBoostError; import ml.dmlc.xgboost4j.java.util.BigDenseMatrix; import water.H2O; import water.Key; import water.LocalMR; import water.MrFun; import water.fvec.Chunk; import water.fvec.Frame; import water.fvec.Vec; import water.util.Log; import water.util.VecUtils; import java.util.*; import static water.H2O.technote; import static water.MemoryManager.malloc4; import static water.MemoryManager.malloc4f; import static water.MemoryManager.malloc8; public class XGBoostUtils { /** * Arbitrary chosen initial size of array allocated for XGBoost's purpose. * Used in case of sparse matrices. */ private static final int ALLOCATED_ARRAY_LEN = 1048576; public static String makeFeatureMap(Frame f, DataInfo di) { // set the names for the (expanded) columns String[] coefnames = di.coefNames(); StringBuilder sb = new StringBuilder(); assert(coefnames.length == di.fullN()); int catCols = di._catOffsets[di._catOffsets.length-1]; for (int i = 0; i < di.fullN(); ++i) { sb.append(i).append(" ").append(coefnames[i].replaceAll("\\s*","")).append(" "); if (i < catCols || f.vec(i-catCols).isBinary()) sb.append("i"); else if (f.vec(i-catCols).isInt()) sb.append("int"); else sb.append("q"); sb.append("\n"); } return sb.toString(); } /** * convert an H2O Frame to a sparse DMatrix * @param f H2O Frame * @param onlyLocal if true uses only chunks local to this node * @param response name of the response column * @param weight name of the weight column * @param fold name of the fold assignment column * @return DMatrix * @throws XGBoostError */ public static DMatrix convertFrameToDMatrix(Key<DataInfo> dataInfoKey, Frame f, boolean onlyLocal, String response, String weight, String fold, boolean sparse) throws XGBoostError { int[] chunks; Vec vec = f.anyVec(); if(!onlyLocal) { // All chunks chunks = new int[f.anyVec().nChunks()]; for(int i = 0; i < chunks.length; i++) { chunks[i] = i; } } else { chunks = VecUtils.getLocalChunkIds(f.anyVec()); } final Vec weightVector = f.vec(weight); final int[] nRowsByChunk = new int[chunks.length]; final long nRowsL = sumChunksLength(chunks, vec, weightVector, nRowsByChunk); if (nRowsL > Integer.MAX_VALUE) { throw new IllegalArgumentException("XGBoost currently doesn't support datasets with more than " + Integer.MAX_VALUE + " per node. " + "To train a XGBoost model on this dataset add more nodes to your H2O cluster and use distributed training."); } final int nRows = (int) nRowsL; final DataInfo di = dataInfoKey.get(); assert di != null; final DMatrix trainMat; // In the future this 2 arrays might also need to be rewritten into float[][], // but only if we want to handle datasets over 2^31-1 on a single machine. For now I'd leave it as it is. float[] resp = malloc4f(nRows); float[] weights = null; if (weightVector != null) { weights = malloc4f(nRows); } if (sparse) { Log.debug("Treating matrix as sparse."); // 1 0 2 0 // 4 0 0 3 // 3 1 2 0 boolean csc = false; //di._cats == 0; // truly sparse matrix - no categoricals // collect all nonzeros column by column (in parallel), then stitch together into final data structures Vec.Reader w = weight == null ? null : weightVector.new Reader(); if (csc) { trainMat = csc(f, chunks, w, f.vec(response).new Reader(), nRows, di, resp, weights); } else { Vec.Reader[] vecs = new Vec.Reader[f.numCols()]; for (int i = 0; i < vecs.length; ++i) { vecs[i] = f.vec(i).new Reader(); } trainMat = csr(f, chunks, vecs, w, f.vec(response).new Reader(), nRows, di, resp, weights); } } else { Log.debug("Treating matrix as dense."); BigDenseMatrix data = null; try { data = allocateDenseMatrix(nRows, di); long actualRows = denseChunk(data, chunks, nRowsByChunk, f, weightVector, f.vec(response), di, resp, weights); assert data.nrow == actualRows; trainMat = new DMatrix(data, Float.NaN); } finally { if (data != null) { data.dispose(); } } } assert trainMat.rowNum() == nRows; trainMat.setLabel(resp); if (weights != null) { trainMat.setWeight(weights); } return trainMat; } // FIXME this and the other method should subtract rows where response is 0 private static int getDataRows(Chunk[] chunks, Frame f, int[] chunksIds, int cols) { double totalRows = 0; if(null != chunks) { for (Chunk ch : chunks) { totalRows += ch.len(); } } else { for(int chunkId : chunksIds) { totalRows += f.anyVec().chunkLen(chunkId); } } return (int) Math.ceil(totalRows * cols / ARRAY_MAX); } /** * Counts a total sum of chunks inside a vector. Only chunks present in chunkIds are considered. * * @param chunkIds Chunk identifier of a vector * @param vec Vector containing given chunk identifiers * @param weightsVector Vector with row weights, possibly null * @return A sum of chunk lengths. Possibly zero, if there are no chunks or the chunks are empty. */ private static long sumChunksLength(int[] chunkIds, Vec vec, Vec weightsVector, int[] chunkLengths) { for (int i = 0; i < chunkIds.length; i++) { final int chunk = chunkIds[i]; chunkLengths[i] = vec.chunkLen(chunk); if (weightsVector == null) continue; Chunk weightVecChunk = weightsVector.chunkForChunkIdx(chunk); if (weightVecChunk.atd(0) == 0) chunkLengths[i]--; int nzIndex = 0; do { nzIndex = weightVecChunk.nextNZ(nzIndex, true); if (nzIndex < 0 || nzIndex >= weightVecChunk._len) break; if (weightVecChunk.atd(nzIndex) == 0) chunkLengths[i]--; } while (true); } long totalChunkLength = 0; for (int cl : chunkLengths) { totalChunkLength += cl; } return totalChunkLength; } private static int setResponseAndWeight(Chunk[] chunks, int respIdx, int weightIdx, float[] resp, float[] weights, int j, int i) { if (weightIdx != -1) { if(chunks[weightIdx].atd(i) == 0) { return j; } weights[j] = (float) chunks[weightIdx].atd(i); } resp[j++] = (float) chunks[respIdx].atd(i); return j; } private static int setResponseAndWeight(Vec.Reader w, float[] resp, float[] weights, Vec.Reader respVec, int j, long i) { if (w != null) { if(w.at(i) == 0) { return j; } weights[j] = (float) w.at(i); } resp[j++] = (float) respVec.at(i); return j; } private static int getNzCount(Frame f, int[] chunks, final Vec.Reader w, int nCols, List<SparseItem>[] col, int nzCount) { for (int i=0;i<nCols;++i) { //TODO: parallelize over columns Vec v = f.vec(i); for (Integer c : chunks) { Chunk ck = v.chunkForChunkIdx(c); int[] nnz = new int[ck.sparseLenZero()]; int nnzCount = ck.nonzeros(nnz); nzCount = getNzCount(new ZeroWeight() { @Override public boolean zeroWeight(int idx) { return w != null && w.at(idx) == 0; } }, col[i], nzCount, ck, nnz, nnzCount, false); } } return nzCount; } interface ZeroWeight { boolean zeroWeight(int idx); } private static int getNzCount(final Chunk[] chunks, final int weight, int nCols, List<SparseItem>[] col, int nzCount) { for (int i=0;i<nCols;++i) { //TODO: parallelize over columns final Chunk ck = chunks[i]; int[] nnz = new int[ck.sparseLenZero()]; int nnzCount = ck.nonzeros(nnz); nzCount = getNzCount(new ZeroWeight() { @Override public boolean zeroWeight(int idx) { return weight != -1 && ck.atd(idx) == 0; } }, col[i], nzCount, ck, nnz, nnzCount, true); } return nzCount; } private static int getNzCount(ZeroWeight zw, List<SparseItem> sparseItems, int nzCount, Chunk ck, int[] nnz, int nnzCount, boolean localWeight) { for (int k=0;k<nnzCount;++k) { SparseItem item = new SparseItem(); int localIdx = nnz[k]; item.pos = (int)ck.start() + localIdx; // both 0 and NA are omitted in the sparse DMatrix if (zw.zeroWeight(localWeight ? localIdx : item.pos)) continue; if (ck.isNA(localIdx)) continue; item.val = ck.atd(localIdx); sparseItems.add(item); nzCount++; } return nzCount; } /** * convert a set of H2O chunks (representing a part of a vector) to a sparse DMatrix * @param response name of the response column * @param weight name of the weight column * @param fold name of the fold assignment column * @return DMatrix * @throws XGBoostError */ public static DMatrix convertChunksToDMatrix(Key<DataInfo> dataInfoKey, Chunk[] chunks, int response, int weight, int fold, boolean sparse) throws XGBoostError { int nRows = chunks[0]._len; DMatrix trainMat; DataInfo di = dataInfoKey.get(); float[] resp = malloc4f(nRows); float[] weights = null; if(-1 != weight) { weights = malloc4f(nRows); } try { if (sparse) { Log.debug("Treating matrix as sparse."); // 1 0 2 0 // 4 0 0 3 // 3 1 2 0 boolean csc = false; //di._cats == 0; // truly sparse matrix - no categoricals // collect all nonzeros column by column (in parallel), then stitch together into final data structures if (csc) { trainMat = csc(chunks, weight, nRows, di, resp, weights); } else { trainMat = csr(chunks, weight, response, nRows, di, resp, weights); } } else { trainMat = dense(chunks, weight, di, response, resp, weights); } } catch (NegativeArraySizeException e) { throw new IllegalArgumentException(technote(11, "Data is too large to fit into the 32-bit Java float[] array that needs to be passed to the XGBoost C++ backend. Use H2O GBM instead.")); } int len = (int) trainMat.rowNum(); resp = Arrays.copyOf(resp, len); trainMat.setLabel(resp); if (weight!=-1){ weights = Arrays.copyOf(weights, len); trainMat.setWeight(weights); } // trainMat.setGroup(null); //fold //FIXME - only needed if CV is internally done in XGBoost return trainMat; } /**************************************************************************************************************** ************************************** DMatrix creation for dense matrices ************************************* ****************************************************************************************************************/ private static DMatrix dense(Chunk[] chunks, int weight, DataInfo di, int respIdx, float[] resp, float[] weights) throws XGBoostError { Log.debug("Treating matrix as dense."); BigDenseMatrix data = null; try { data = allocateDenseMatrix(chunks[0].len(), di); long actualRows = denseChunk(data, chunks, weight, respIdx, di, resp, weights); assert actualRows == data.nrow; return new DMatrix(data, Float.NaN); } finally { if (data != null) { data.dispose(); } } } private static final int ARRAY_MAX = Integer.MAX_VALUE - 10; private static long denseChunk(BigDenseMatrix data, int[] chunks, int[] nRowsByChunk, Frame f, Vec weightsVec, Vec respVec, DataInfo di, float[] resp, float[] weights) { int[] offsets = new int[nRowsByChunk.length + 1]; for (int i = 0; i < chunks.length; i++) { offsets[i + 1] = nRowsByChunk[i] + offsets[i]; } WriteDenseChunkFun writeFun = new WriteDenseChunkFun(f, chunks, offsets, weightsVec, respVec, di, data, resp, weights); H2O.submitTask(new LocalMR(writeFun, chunks.length)).join(); return writeFun.getTotalRows(); } private static class WriteDenseChunkFun extends MrFun<WriteDenseChunkFun> { private final Frame _f; private final int[] _chunks; private final int[] _offsets; private final Vec _weightsVec; private final Vec _respVec; private final DataInfo _di; private final BigDenseMatrix _data; private final float[] _resp; private final float[] _weights; // OUT private int[] _nRowsByChunk; private WriteDenseChunkFun(Frame f, int[] chunks, int[] offsets, Vec weightsVec, Vec respVec, DataInfo di, BigDenseMatrix data, float[] resp, float[] weights) { _f = f; _chunks = chunks; _offsets = offsets; _weightsVec = weightsVec; _respVec = respVec; _di = di; _data = data; _resp = resp; _weights = weights; _nRowsByChunk = new int[chunks.length]; } @Override protected void map(int id) { final int chunkIdx = _chunks[id]; Chunk[] chks = new Chunk[_f.numCols()]; for (int c = 0; c < chks.length; c++) { chks[c] = _f.vec(c).chunkForChunkIdx(chunkIdx); } Chunk weightsChk = _weightsVec != null ? _weightsVec.chunkForChunkIdx(chunkIdx) : null; Chunk respChk = _respVec.chunkForChunkIdx(chunkIdx); long idx = _offsets[id] * _data.ncol; int actualRows = 0; for (int i = 0; i < chks[0]._len; i++) { if (weightsChk != null && weightsChk.atd(i) == 0) continue; idx = writeDenseRow(_di, chks, i, _data, idx); _resp[_offsets[id] + actualRows] = (float) respChk.atd(i); if (weightsChk != null) { _weights[_offsets[id] + actualRows] = (float) weightsChk.atd(i); } actualRows++; } assert idx == (long) _offsets[id + 1] * _data.ncol; _nRowsByChunk[id] = actualRows; } private long getTotalRows() { long totalRows = 0; for (int r : _nRowsByChunk) { totalRows += r; } return totalRows; } } private static long denseChunk(BigDenseMatrix data, Chunk[] chunks, int weight, int respIdx, DataInfo di, float[] resp, float[] weights) { long idx = 0; long actualRows = 0; int rwRow = 0; for (int i = 0; i < chunks[0]._len; i++) { if (weight != -1 && chunks[weight].atd(i) == 0) continue; idx = writeDenseRow(di, chunks, i, data, idx); actualRows++; rwRow = setResponseAndWeight(chunks, respIdx, weight, resp, weights, rwRow, i); } assert (long) data.nrow * data.ncol == idx; return actualRows; } private static long writeDenseRow(DataInfo di, Chunk[] chunks, int rowInChunk, BigDenseMatrix data, long idx) { for (int j = 0; j < di._cats; ++j) { int len = di._catOffsets[j+1] - di._catOffsets[j]; double val = chunks[j].isNA(rowInChunk) ? Double.NaN : chunks[j].at8(rowInChunk); int pos = di.getCategoricalId(j, val) - di._catOffsets[j]; for (int cat = 0; cat < len; cat++) data.set(idx + cat, 0f); // native memory => explicit zero-ing is necessary data.set(idx + pos, 1f); idx += len; } for (int j = 0; j < di._nums; ++j) { float val = chunks[di._cats + j].isNA(rowInChunk) ? Float.NaN : (float) chunks[di._cats + j].atd(rowInChunk); data.set(idx++, val); } return idx; } /**************************************************************************************************************** *********************************** DMatrix creation for sparse (CSR) matrices ********************************* ****************************************************************************************************************/ private static DMatrix csr(Frame f, int[] chunksIds, Vec.Reader[] vecs, Vec.Reader w, Vec.Reader respReader, // for setupLocal int nRows, DataInfo di, float[] resp, float[] weights) throws XGBoostError { return csr(null, -1, -1, f, chunksIds, vecs, w, respReader, nRows, di, resp, weights); } private static DMatrix csr(Chunk[] chunks, int weight, int respIdx, // for MR task int nRows, DataInfo di, float[] resp, float[] weights) throws XGBoostError { return csr(chunks, weight, respIdx, null, null, null, null, null, nRows, di, resp, weights); } private static DMatrix csr(Chunk[] chunks, int weight, int respIdx, // for MR task Frame f, int[] chunksIds, Vec.Reader[] vecs, Vec.Reader w, Vec.Reader respReader, // for setupLocal int nRows, DataInfo di, float[] resp, float[] weights) throws XGBoostError { DMatrix trainMat; int actualRows = 0; // CSR: // long[] rowHeaders = new long[] {0, 2, 4, 7}; //offsets // float[] data = new float[] {1f,2f, 4f,3f, 3f,1f,2f}; //non-zeros across each row // int[] colIndex = new int[] {0, 2, 0, 3, 0, 1, 2}; //col index for each non-zero long[][] rowHeaders; float[][] data; int[][] colIndex; final SparseMatrixDimensions sparseMatrixDimensions; if(null != chunks) { sparseMatrixDimensions = calculateCSRMatrixDimensions(chunks, di, weight); SparseMatrix sparseMatrix = allocateCSRMatrix(sparseMatrixDimensions); data = sparseMatrix._sparseData; rowHeaders = sparseMatrix._rowIndices; colIndex = sparseMatrix._colIndices; actualRows = initializeFromChunks( chunks, weight, di, actualRows, rowHeaders, data, colIndex, respIdx, resp, weights); } else { sparseMatrixDimensions = calculateCSRMatrixDimensions(f, chunksIds, vecs, w, di); SparseMatrix sparseMatrix = allocateCSRMatrix(sparseMatrixDimensions); data = sparseMatrix._sparseData; rowHeaders = sparseMatrix._rowIndices; colIndex = sparseMatrix._colIndices; actualRows = initalizeFromChunkIds( f, chunksIds, vecs, w, di, actualRows, rowHeaders, data, colIndex, respReader, resp, weights); } long size = sparseMatrixDimensions._nonZeroElementsCount; int rowHeadersSize = (int) sparseMatrixDimensions._rowIndicesCount; trainMat = new DMatrix(rowHeaders, colIndex, data, DMatrix.SparseType.CSR, di.fullN(), rowHeadersSize, size); assert trainMat.rowNum() == actualRows; return trainMat; } private static int initalizeFromChunkIds(Frame f, int[] chunks, Vec.Reader[] vecs, Vec.Reader w, DataInfo di, int actualRows, long[][] rowHeaders, float[][] data, int[][] colIndex, Vec.Reader respVec, float[] resp, float[] weights) { // CSR: // long[] rowHeaders = new long[] {0, 2, 4, 7}; //offsets // float[] data = new float[] {1f,2f, 4f,3f, 3f,1f,2f}; //non-zeros across each row // int[] colIndex = new int[] {0, 2, 0, 3, 0, 1, 2}; //col index for each non-zero // extract predictors int nonZeroCount = 0; int currentRow = 0; int currentCol = 0; int rwRow = 0; for (Integer chunk : chunks) { for(long i = f.anyVec().espc()[chunk]; i < f.anyVec().espc()[chunk+1]; i++) { if (w != null && w.at(i) == 0) continue; final int startNonZeroCount = nonZeroCount; // enlarge final data arrays by 2x if needed for (int j = 0; j < di._cats; ++j) { data[currentRow][currentCol] = 1; //one-hot encoding if (vecs[j].isNA(i)) { colIndex[currentRow][currentCol++] = di.getCategoricalId(j, Float.NaN); } else { colIndex[currentRow][currentCol++] = di.getCategoricalId(j, vecs[j].at8(i)); } nonZeroCount++; } for (int j = 0; j < di._nums; ++j) { float val = (float) vecs[di._cats + j].at(i); if (val != 0) { data[currentRow][currentCol] = val; colIndex[currentRow][currentCol++] = di._catOffsets[di._catOffsets.length - 1] + j; nonZeroCount++; } } rowHeaders[0][++actualRows] = nonZeroCount; rwRow = setResponseAndWeight(w, resp, weights, respVec, rwRow, i); } } return actualRows; } private static int initializeFromChunks(Chunk[] chunks, int weight, DataInfo di, int actualRows, long[][] rowHeaders, float[][] data, int[][] colIndex, int respIdx, float[] resp, float[] weights) { int nonZeroCount = 0; int currentRow = 0; int currentCol = 0; int rwRow = 0; for (int i = 0; i < chunks[0].len(); i++) { if (weight != -1 && chunks[weight].atd(i) == 0) continue; for (int j = 0; j < di._cats; ++j) { data[currentRow][currentCol] = 1; //one-hot encoding if (chunks[j].isNA(i)) { colIndex[currentRow][currentCol++] = di.getCategoricalId(j, Float.NaN); } else { colIndex[currentRow][currentCol++] = di.getCategoricalId(j, chunks[j].at8(i)); } nonZeroCount++; } for (int j = 0; j < di._nums; ++j) { float val = (float) chunks[di._cats + j].atd(i); if (val != 0) { data[currentRow][currentCol] = val; colIndex[currentRow][currentCol++] = di._catOffsets[di._catOffsets.length - 1] + j; nonZeroCount++; } } rowHeaders[0][++actualRows] = nonZeroCount; rwRow = setResponseAndWeight(chunks, respIdx, weight, resp, weights, rwRow, i); } return actualRows; } static class SparseItem { int pos; double val; } /**************************************************************************************************************** *********************************** DMatrix creation for sparse (CSC) matrices ********************************* ****************************************************************************************************************/ private static DMatrix csc(Chunk[] chunks, int weight, long nRows, DataInfo di, float[] resp, float[] weights) throws XGBoostError { return csc(chunks, weight, null, null, null, null, nRows, di, resp, weights); } private static DMatrix csc(Frame f, int[] chunksIds, Vec.Reader w, Vec.Reader respReader, long nRows, DataInfo di, float[] resp, float[] weights) throws XGBoostError { return csc(null, -1, f, chunksIds, w, respReader, nRows, di, resp, weights); } private static DMatrix csc(Chunk[] chunks, int weight, // for MR tasks Frame f, int[] chunksIds, Vec.Reader w, Vec.Reader respReader, // for setupLocal computation long nRows, DataInfo di, float[] resp, float[] weights) throws XGBoostError { DMatrix trainMat; // CSC: // long[] colHeaders = new long[] {0, 3, 4, 6, 7}; //offsets // float[] data = new float[] {1f,4f,3f, 1f, 2f,2f, 3f}; //non-zeros down each column // int[] rowIndex = new int[] {0,1,2, 2, 0, 2, 1}; //row index for each non-zero int nCols = di._nums; List<SparseItem>[] col = new List[nCols]; //TODO: use more efficient storage (no GC) // allocate for (int i=0;i<nCols;++i) { col[i] = new ArrayList<>((int)Math.min(nRows, 10000)); } // collect non-zeros int nzCount = 0; if(null != chunks) { nzCount = getNzCount(chunks, weight, nCols, col, nzCount); } else { nzCount = getNzCount(f, chunksIds, w, nCols, col, nzCount); } int currentRow = 0; int currentCol = 0; int nz = 0; long[][] colHeaders = new long[1][nCols + 1]; float[][] data = new float[getDataRows(chunks, f, chunksIds, di.fullN())][nzCount]; int[][] rowIndex = new int[1][nzCount]; int rwRow = 0; // fill data for DMatrix for (int i=0;i<nCols;++i) { //TODO: parallelize over columns List<SparseItem> sparseCol = col[i]; colHeaders[0][i] = nz; enlargeTables(data, rowIndex, sparseCol.size(), currentRow, currentCol); for (int j=0;j<sparseCol.size();++j) { if(currentCol == ARRAY_MAX) { currentCol = 0; currentRow++; } SparseItem si = sparseCol.get(j); rowIndex[currentRow][currentCol] = si.pos; data[currentRow][currentCol] = (float)si.val; assert(si.val != 0); assert(!Double.isNaN(si.val)); // assert(weight == -1 || chunks[weight].atd((int)(si.pos - chunks[weight].start())) != 0); nz++; currentCol++; // Do only once if(0 == i) { rwRow = setResponseAndWeight(w, resp, weights, respReader, rwRow, j); } } } colHeaders[0][nCols] = nz; data[data.length - 1] = Arrays.copyOf(data[data.length - 1], nz % ARRAY_MAX); rowIndex[rowIndex.length - 1] = Arrays.copyOf(rowIndex[rowIndex.length - 1], nz % ARRAY_MAX); int actualRows = countUnique(rowIndex); trainMat = new DMatrix(colHeaders, rowIndex, data, DMatrix.SparseType.CSC, actualRows, di.fullN(), nz); assert trainMat.rowNum() == actualRows; assert trainMat.rowNum() == rwRow; return trainMat; } private static int countUnique(int[][] array) { if (array.length == 0) { return 0; } BitSet values = new BitSet(ARRAY_MAX); int count = 1; for (int i = 0; i < array.length; i++) { for (int j = 0; j < array[i].length - 1; j++) { if (!values.get(array[i][j])) { count++; values.set(array[i][j]); } } } return count; } // Assumes both matrices are getting filled at the same rate and will require the same amount of space private static void enlargeTables(float[][] data, int[][] rowIndex, int cols, int currentRow, int currentCol) { while (data[currentRow].length < currentCol + cols) { if(data[currentRow].length == ARRAY_MAX) { currentCol = 0; cols -= (data[currentRow].length - currentCol); currentRow++; data[currentRow] = malloc4f(ALLOCATED_ARRAY_LEN); rowIndex[currentRow] = malloc4(ALLOCATED_ARRAY_LEN); } else { int newLen = (int) Math.min((long) data[currentRow].length << 1L, (long) ARRAY_MAX); data[currentRow] = Arrays.copyOf(data[currentRow], newLen); rowIndex[currentRow] = Arrays.copyOf(rowIndex[currentRow], newLen); } } } /** * Creates a {@link SparseMatrix} object with pre-instantiated backing arrays for row-oriented compression schema (CSR). * All backing arrays are allocated using MemoryManager. * * @param sparseMatrixDimensions Dimensions of a sparse matrix * @return An instance of {@link SparseMatrix} with pre-allocated backing arrays. */ private static SparseMatrix allocateCSRMatrix(SparseMatrixDimensions sparseMatrixDimensions) { // Number of rows in non-zero elements matrix final int dataRowsNumber = (int) (sparseMatrixDimensions._nonZeroElementsCount / ARRAY_MAX); final int dataLastRowSize = (int)(sparseMatrixDimensions._nonZeroElementsCount % ARRAY_MAX); //Number of rows in matrix with row indices final int rowIndicesRowsNumber = (int)(sparseMatrixDimensions._rowIndicesCount / ARRAY_MAX); final int rowIndicesLastRowSize = (int)(sparseMatrixDimensions._rowIndicesCount % ARRAY_MAX); // Number of rows in matrix with column indices of sparse matrix non-zero elements final int colIndicesRowsNumber = (int)(sparseMatrixDimensions._nonZeroElementsCount / ARRAY_MAX); final int colIndicesLastRowSize = (int)(sparseMatrixDimensions._nonZeroElementsCount % ARRAY_MAX); // Sparse matrix elements (non-zero elements) float[][] sparseData = new float[dataLastRowSize == 0 ? dataRowsNumber : dataRowsNumber + 1][]; for (int sparseDataRow = 0; sparseDataRow < sparseData.length - 1; sparseDataRow++) { sparseData[sparseDataRow] = malloc4f(ARRAY_MAX); } if (dataLastRowSize > 0) { sparseData[sparseData.length - 1] = malloc4f(dataLastRowSize); } // Row indices long[][] rowIndices = new long[rowIndicesLastRowSize == 0 ? rowIndicesRowsNumber : rowIndicesRowsNumber + 1][]; for (int rowIndicesRow = 0; rowIndicesRow < rowIndices.length - 1; rowIndicesRow++) { rowIndices[rowIndicesRow] = malloc8(ARRAY_MAX); } if (rowIndicesLastRowSize > 0) { rowIndices[rowIndices.length - 1] = malloc8(rowIndicesLastRowSize); } // Column indices int[][] colIndices = new int[colIndicesLastRowSize == 0 ? colIndicesRowsNumber : colIndicesRowsNumber + 1][]; for (int colIndicesRow = 0; colIndicesRow < colIndices.length - 1; colIndicesRow++) { colIndices[colIndicesRow] = malloc4(ARRAY_MAX); } if (colIndicesLastRowSize > 0) { colIndices[colIndices.length - 1] = malloc4(colIndicesLastRowSize); } // Wrap backing arrays into a SparseMatrix object and return them return new SparseMatrix(sparseData, rowIndices, colIndices); } private static SparseMatrixDimensions calculateCSRMatrixDimensions(Chunk[] chunks, DataInfo di, int weightColIndex){ long nonZeroElementsCount = 0; long rowIndicesCount = 0; for (int i = 0; i < chunks[0].len(); i++) { // Rows with zero weights are going to be ignored if (weightColIndex != -1 && chunks[weightColIndex].atd(i) == 0) continue; nonZeroElementsCount += di._cats; for (int j = 0; j < di._nums; ++j) { float val = (float) chunks[di._cats + j].atd(i); if (val != 0) { nonZeroElementsCount++; } } rowIndicesCount++; } return new SparseMatrixDimensions(nonZeroElementsCount, ++rowIndicesCount); } private static SparseMatrixDimensions calculateCSRMatrixDimensions(Frame f, int[] chunks, Vec.Reader[] vecs, Vec.Reader w, DataInfo di) { long nonZeroElementsCount = 0; long rowIndicesCount = 0; for (Integer chunk : chunks) { for (long i = f.anyVec().espc()[chunk]; i < f.anyVec().espc()[chunk + 1]; i++) { if (w != null && w.at(i) == 0) continue; nonZeroElementsCount+= di._cats; for (int j = 0; j < di._nums; ++j) { float val = (float) vecs[di._cats + j].at(i); if (val != 0) { nonZeroElementsCount++; } } rowIndicesCount++; } } return new SparseMatrixDimensions(nonZeroElementsCount, ++rowIndicesCount); } /** * Dimensions of a Sparse Matrix */ private static final class SparseMatrixDimensions{ private final long _nonZeroElementsCount; private final long _rowIndicesCount; /** * Constructs an instance of {@link SparseMatrixDimensions} * * @param nonZeroElementsCount Number of non-zero elements (number of elements in sparse matrix). Also * number of column indices. * @param rowIndicesCount Number of indices of elements rows begin with */ public SparseMatrixDimensions(long nonZeroElementsCount, long rowIndicesCount) { _nonZeroElementsCount = nonZeroElementsCount; _rowIndicesCount = rowIndicesCount; } } /** * Sparse Matrix representation for XGBoost */ private static final class SparseMatrix { private final float[][] _sparseData; private final long[][] _rowIndices; private final int[][] _colIndices; /** * Constructs a {@link SparseMatrix} instance * * @param sparseData Non-zero data of a sparse matrix * @param rowIndices Indices to elements in sparseData rows begin with * @param colIndices Column indices of elements in sparseData */ public SparseMatrix(final float[][] sparseData, final long[][] rowIndices, final int[][] colIndices) { _sparseData = sparseData; _rowIndices = rowIndices; _colIndices = colIndices; } } /** * Allocated an exactly-sized float[] array serving as a backing array for XGBoost's {@link DMatrix}. * The backing array created by this method does not contain any actual data and needs to be filled. * * @param rowCount Number of rows to allocate data for * @param dataInfo An instance of {@link DataInfo} * @return An exactly-sized Float[] backing array for XGBoost's {@link DMatrix} to be filled with data. */ private static BigDenseMatrix allocateDenseMatrix(final int rowCount, final DataInfo dataInfo) { return new BigDenseMatrix(rowCount, dataInfo.fullN()); } public static FeatureProperties assembleFeatureNames(final DataInfo di) { String[] coefnames = di.coefNames(); assert (coefnames.length == di.fullN()); int numCatCols = di._catOffsets[di._catOffsets.length - 1]; String[] featureNames = new String[di.fullN()]; boolean[] oneHotEncoded = new boolean[di.fullN()]; for (int i = 0; i < di.fullN(); ++i) { featureNames[i] = coefnames[i]; if (i < numCatCols) { oneHotEncoded[i] = true; } } return new FeatureProperties(featureNames, oneHotEncoded); } static class FeatureProperties { public String[] _names; public boolean[] _oneHotEncoded; public FeatureProperties(String[] names, boolean[] oneHotEncoded) { _names = names; _oneHotEncoded = oneHotEncoded; } } static Map<String, FeatureScore> parseFeatureScores(String[] modelDump) { Map<String, FeatureScore> featureScore = new HashMap<>(); for (String tree : modelDump) { for (String node : tree.split("\n")) { String[] array = node.split("\\[", 2); if (array.length < 2) continue; String[] content = array[1].split("\\]", 2); if (content.length < 2) continue; String fid = content[0].split("<")[0]; FeatureScore fs = new FeatureScore(); String[] keyValues = content[1].split(","); for (String keyValue : keyValues) { if (keyValue.startsWith(FeatureScore.GAIN_KEY + "=")) { fs._gain = Float.parseFloat(keyValue.substring(FeatureScore.GAIN_KEY.length() + 1)); } else if (keyValue.startsWith(FeatureScore.COVER_KEY + "=")) { fs._cover = Float.parseFloat(keyValue.substring(FeatureScore.COVER_KEY.length() + 1)); } } fs._frequency = 1; if (featureScore.containsKey(fid)) { featureScore.get(fid).add(fs); } else { featureScore.put(fid, fs); } } } return featureScore; } static class FeatureScore { static final String GAIN_KEY = "gain"; static final String COVER_KEY = "cover"; int _frequency; float _gain; float _cover; void add(FeatureScore fs) { _frequency += fs._frequency; _gain += fs._gain; _cover += fs._cover; } } }
/** * Copyright 2014 Alexey Ragozin * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gridkit.jvmtool; import java.io.IOException; import java.lang.reflect.Array; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.management.Attribute; import javax.management.InstanceNotFoundException; import javax.management.IntrospectionException; import javax.management.MBeanAttributeInfo; import javax.management.MBeanException; import javax.management.MBeanInfo; import javax.management.MBeanOperationInfo; import javax.management.MBeanParameterInfo; import javax.management.MBeanServerConnection; import javax.management.ObjectName; import javax.management.ReflectionException; import javax.management.openmbean.CompositeData; import javax.management.openmbean.TabularData; public class MBeanHelper { private MBeanServerConnection mserver; public MBeanHelper(MBeanServerConnection connection) { this.mserver = connection; } public String get(ObjectName bean, String attr) throws Exception { MBeanInfo mbinfo = mserver.getMBeanInfo(bean); MBeanAttributeInfo ai = attrInfo(mbinfo, attr); if (ai == null) { throw new IllegalArgumentException("No such attribute '" + attr + "'"); } if (!ai.isReadable()) { throw new IllegalArgumentException("Attribute '" + attr + "' is write-only"); } Object v = mserver.getAttribute(bean, attr); String type = ai.getType(); String text = format(v, type); return text; } public void set(ObjectName bean, String attr, String value) throws Exception { MBeanInfo mbinfo = mserver.getMBeanInfo(bean); MBeanAttributeInfo ai = attrInfo(mbinfo, attr); if (ai == null) { throw new IllegalArgumentException("No such attribute '" + attr + "'"); } if (!ai.isWritable()) { throw new IllegalArgumentException("Attribute '" + attr + "' is not writeable"); } String type = ai.getType(); Object ov = convert(value, type); mserver.setAttribute(bean, new Attribute(attr, ov)); } public String invoke(ObjectName bean, String operation, String... params) throws InstanceNotFoundException, IntrospectionException, ReflectionException, IOException, MBeanException { MBeanInfo mbinfo = mserver.getMBeanInfo(bean); MBeanOperationInfo op = null; for(MBeanOperationInfo oi: mbinfo.getOperations()) { if (oi.getName().equalsIgnoreCase(operation) && oi.getSignature().length == params.length) { if (op != null) { throw new IllegalArgumentException("Ambiguous " + operation + "/" + params.length + " operatition signature for " + bean); } op = oi; } } if (op == null) { throw new IllegalArgumentException("Operation " + operation + "/" + params.length + " not found for " + bean); } Object[] args = new Object[params.length]; String[] sig = new String[params.length]; for(int i = 0; i != params.length; ++i) { args[i] = convert(params[i], op.getSignature()[i].getType()); sig[i] = op.getSignature()[i].getType(); } return format(mserver.invoke(bean, op.getName(), args, sig), op.getReturnType()); } private String format(Object v, String type) { if (type.equals("void")) { return null; } else if (v instanceof CompositeData[]) { CompositeData[] td = (CompositeData[]) v; if (td.length == 0) { return ""; } List<String> header = new ArrayList<String>(); for(String f: td[0].getCompositeType().keySet()) { if (!header.contains(f)) { header.add(f); } } List<String[]> content = new ArrayList<String[]>(); content.add(header.toArray(new String[0])); for(Object row: td) { content.add(formatRow((CompositeData)row, header)); } return formatTable(content, 40, true); } else if (v instanceof TabularData) { TabularData td = (TabularData) v; td.getTabularType().getIndexNames(); List<String> header = new ArrayList<String>(td.getTabularType().getIndexNames()); for(String f: td.getTabularType().getRowType().keySet()) { if (!header.contains(f)) { header.add(f); } } List<String[]> content = new ArrayList<String[]>(); content.add(header.toArray(new String[0])); for(Object row: td.values()) { content.add(formatRow((CompositeData)row, header)); } return formatTable(content, 40, true); } else if (v instanceof CompositeData) { CompositeData cd = (CompositeData)v; List<String[]> content = new ArrayList<String[]>(); for(String field: cd.getCompositeType().keySet()) { String val = formatLine(cd.get(field), cd.getCompositeType().getType(field).getClassName()); content.add(new String[]{field + ": ", val}); } return formatTable(content, 1000, false); } else { return formatLine(v, type); } } private String formatTable(List<String[]> content, int maxCell, boolean table) { int[] width = new int[content.get(0).length]; for(String[] row: content) { for(int i = 0; i != row.length; ++i) { width[i] = Math.min(Math.max(width[i], row[i].length()), maxCell); } } StringBuilder sb = new StringBuilder(); boolean header = table; for(String[] row: content) { for(int i = 0; i != width.length; ++i) { String cell = row[i]; if (cell.length() > width[i]) { cell = cell.substring(0, width[i] - 3) + "..."; } sb.append(cell); for(int s = 0; s != width[i] - cell.length(); ++s) { sb.append(' '); } if (table) { sb.append('|'); } } if (table) { sb.setLength(sb.length() - 1); } sb.append('\n'); if (header) { header = false; for(int n: width) { for(int i = 0; i != n; ++i) { sb.append('-'); } sb.append('+'); } sb.setLength(sb.length() - 1); sb.append('\n'); } } return sb.toString(); } private String formatLine(Object v, String type) { if (v instanceof TabularData) { TabularData td = (TabularData)v; StringBuilder sb = new StringBuilder(); for(Object c: td.values()) { sb.append(formatLine(c, td.getTabularType().getRowType().getClassName())); sb.append(","); } if (sb.length() > 0) { sb.setLength(sb.length() - 1); } return sb.toString(); } if (v instanceof CompositeData[]) { CompositeData[] td = (CompositeData[])v; StringBuilder sb = new StringBuilder(); for(Object c: td) { sb.append(formatLine(c, ((CompositeData)c).getCompositeType().getClassName())); sb.append(","); } if (sb.length() > 0) { sb.setLength(sb.length() - 1); } return sb.toString(); } else if (v instanceof CompositeData) { CompositeData cdata = (CompositeData) v; StringBuilder sb = new StringBuilder(); sb.append("{"); for(String attr: cdata.getCompositeType().keySet()) { sb.append(attr).append("="); sb.append(formatLine(cdata.get(attr), cdata.getCompositeType().getType(attr).getClassName())); sb.append(','); } if (sb.length() > 1) { sb.setLength(sb.length() - 1); } sb.append("}"); return sb.toString(); } else if (v instanceof Object[]) { return Arrays.toString((Object[])v); } else if (v instanceof boolean[]) { return Arrays.toString((boolean[])v); } else if (v instanceof byte[]) { return Arrays.toString((byte[])v); } else if (v instanceof char[]) { return Arrays.toString((char[])v); } else if (v instanceof short[]) { return Arrays.toString((short[])v); } else if (v instanceof int[]) { return Arrays.toString((int[])v); } else if (v instanceof long[]) { return Arrays.toString((long[])v); } else if (v instanceof float[]) { return Arrays.toString((float[])v); } else if (v instanceof double[]) { return Arrays.toString((double[])v); } else { return String.valueOf(v); } } private String[] formatRow(CompositeData row, List<String> header) { String[] text = new String[header.size()]; for(int i = 0; i != text.length; ++i) { String attr = header.get(i); text[i] = formatLine(row.get(attr), row.getCompositeType().getType(attr).getClassName()); } return text; } private Object convert(String value, String type) { if (type.equals("java.lang.String")) { return value; } if (type.equals("boolean")) { return Boolean.valueOf(value); } else if (type.equals("byte")) { return Byte.valueOf(value); } else if (type.equals("short")) { return Short.valueOf(value); } else if (type.equals("char")) { if (value.length() == 1) { return value.charAt(0); } else { throw new IllegalArgumentException("Cannot convert '" + value + "' to " + type); } } else if (type.equals("int")) { return Integer.valueOf(value); } else if (type.equals("long")) { return Long.valueOf(value); } else if (type.equals("float")) { return Float.valueOf(value); } else if (type.equals("double")) { return Double.valueOf(value); } else if (type.startsWith("[")) { String[] elements = value.split("[,]"); Object array = ARRAY_MAP.get(type); if (array == null) { throw new IllegalArgumentException("Cannot convert '" + value + "' to " + type); } array = Array.newInstance(array.getClass().getComponentType(), elements.length); String etype = array.getClass().getComponentType().getName(); for(int i = 0; i != elements.length; ++i) { Array.set(array, i, convert(elements[i], etype)); } return array; } throw new IllegalArgumentException("Cannot convert '" + value + "' to " + type); } private MBeanAttributeInfo attrInfo(MBeanInfo mbinfo, String attr) { for(MBeanAttributeInfo ai: mbinfo.getAttributes()) { if (ai.getName().equals(attr)) { return ai; } } return null; } public String describe(ObjectName bean) throws Exception { MBeanInfo mbinfo = mserver.getMBeanInfo(bean); StringBuilder sb = new StringBuilder(); sb.append(bean); sb.append('\n'); sb.append(mbinfo.getClassName()); sb.append('\n'); sb.append(" - " + mbinfo.getDescription()); sb.append('\n'); for(MBeanAttributeInfo ai: mbinfo.getAttributes()) { sb.append(" (A) "); sb.append(ai.getName()).append(" : ").append(toPrintableType(ai.getType())).append(""); if (!ai.isReadable()) { sb.append(" - WRITEONLY"); } else if (ai.isWritable()) { sb.append(" - WRITEABLE"); } sb.append('\n'); if (!ai.getName().equals(ai.getDescription())) { sb.append(" - " + ai.getDescription()); sb.append('\n'); } } for (MBeanOperationInfo oi: mbinfo.getOperations()) { sb.append(" (O) "); sb.append(oi.getName()).append("("); for(MBeanParameterInfo pi: oi.getSignature()) { String name = pi.getName(); String type = toPrintableType(pi.getType()); sb.append(type).append(' ').append(name).append(", "); } if (oi.getSignature().length > 0) { sb.setLength(sb.length() - 2); } sb.append(") : ").append(toPrintableType(oi.getReturnType())); sb.append('\n'); if (!oi.getName().equals(oi.getDescription())) { sb.append(" - " + oi.getDescription()); sb.append('\n'); } } return sb.toString(); } static Map<String, Object> ARRAY_MAP = new HashMap<String, Object>(); static { ARRAY_MAP.put("[Z", new boolean[0]); ARRAY_MAP.put("[B", new byte[0]); ARRAY_MAP.put("[S", new short[0]); ARRAY_MAP.put("[C", new char[0]); ARRAY_MAP.put("[I", new int[0]); ARRAY_MAP.put("[J", new long[0]); ARRAY_MAP.put("[F", new float[0]); ARRAY_MAP.put("[D", new double[0]); ARRAY_MAP.put("[Ljava.lang.String;", new String[0]); } static Map<String, String> TYPE_MAP = new HashMap<String, String>(); static { TYPE_MAP.put("java.lang.String", "String"); TYPE_MAP.put("javax.management.openmbean.CompositeData", "CompositeData"); TYPE_MAP.put("javax.management.openmbean.TabularData", "TabularData"); TYPE_MAP.put("[Z", "boolean[]"); TYPE_MAP.put("[B", "byte[]"); TYPE_MAP.put("[S", "short[]"); TYPE_MAP.put("[C", "char[]"); TYPE_MAP.put("[I", "int[]"); TYPE_MAP.put("[J", "long[]"); TYPE_MAP.put("[F", "float[]"); TYPE_MAP.put("[D", "double[]"); } static String toPrintableType(String type) { if (TYPE_MAP.containsKey(type)) { return TYPE_MAP.get(type); } else if (type.startsWith("[L")) { return toPrintableType(type.substring(2, type.length() -1)) + "[]"; } else { return type; } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.api; import java.io.IOException; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Public; import org.apache.hadoop.classification.InterfaceStability.Stable; import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.io.retry.Idempotent; import org.apache.hadoop.yarn.api.protocolrecords.CancelDelegationTokenRequest; import org.apache.hadoop.yarn.api.protocolrecords.CancelDelegationTokenResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoResponse; import org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest; import org.apache.hadoop.yarn.api.protocolrecords.KillApplicationResponse; import org.apache.hadoop.yarn.api.protocolrecords.MoveApplicationAcrossQueuesRequest; import org.apache.hadoop.yarn.api.protocolrecords.MoveApplicationAcrossQueuesResponse; import org.apache.hadoop.yarn.api.protocolrecords.RenewDelegationTokenRequest; import org.apache.hadoop.yarn.api.protocolrecords.RenewDelegationTokenResponse; import org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest; import org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationResponse; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationReport; import org.apache.hadoop.yarn.api.records.ContainerLaunchContext; import org.apache.hadoop.yarn.api.records.NodeReport; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.ResourceRequest; import org.apache.hadoop.yarn.api.records.Token; import org.apache.hadoop.yarn.api.records.YarnClusterMetrics; import org.apache.hadoop.yarn.exceptions.YarnException; /** * <p>The protocol between clients and the <code>ResourceManager</code> * to submit/abort jobs and to get information on applications, cluster metrics, * nodes, queues and ACLs.</p> */ @Public @Stable public interface ApplicationClientProtocol { /** * <p>The interface used by clients to obtain a new {@link ApplicationId} for * submitting new applications.</p> * * <p>The <code>ResourceManager</code> responds with a new, monotonically * increasing, {@link ApplicationId} which is used by the client to submit * a new application.</p> * * <p>The <code>ResourceManager</code> also responds with details such * as maximum resource capabilities in the cluster as specified in * {@link GetNewApplicationResponse}.</p> * * @param request request to get a new <code>ApplicationId</code> * @return response containing the new <code>ApplicationId</code> to be used * to submit an application * @throws YarnException * @throws IOException * @see #submitApplication(SubmitApplicationRequest) */ @Public @Stable @Idempotent public GetNewApplicationResponse getNewApplication( GetNewApplicationRequest request) throws YarnException, IOException; /** * <p>The interface used by clients to submit a new application to the * <code>ResourceManager.</code></p> * * <p>The client is required to provide details such as queue, * {@link Resource} required to run the <code>ApplicationMaster</code>, * the equivalent of {@link ContainerLaunchContext} for launching * the <code>ApplicationMaster</code> etc. via the * {@link SubmitApplicationRequest}.</p> * * <p>Currently the <code>ResourceManager</code> sends an immediate (empty) * {@link SubmitApplicationResponse} on accepting the submission and throws * an exception if it rejects the submission. However, this call needs to be * followed by {@link #getApplicationReport(GetApplicationReportRequest)} * to make sure that the application gets properly submitted.</p> * * <p>During the submission process, it checks whether the application * already exists. If the application exists, it will simply return * SubmitApplicationResponse</p> * * <p> In secure mode,the <code>ResourceManager</code> verifies access to * queues etc. before accepting the application submission.</p> * * @param request request to submit a new application * @return (empty) response on accepting the submission * @throws YarnException * @throws IOException * @throws InvalidResourceRequestException * The exception is thrown when a {@link ResourceRequest} is out of * the range of the configured lower and upper resource boundaries. * @see #getNewApplication(GetNewApplicationRequest) */ @Public @Stable @Idempotent public SubmitApplicationResponse submitApplication( SubmitApplicationRequest request) throws YarnException, IOException; /** * <p>The interface used by clients to request the * <code>ResourceManager</code> to abort submitted application.</p> * * <p>The client, via {@link KillApplicationRequest} provides the * {@link ApplicationId} of the application to be aborted.</p> * * <p> In secure mode,the <code>ResourceManager</code> verifies access to the * application, queue etc. before terminating the application.</p> * * <p>Currently, the <code>ResourceManager</code> returns an empty response * on success and throws an exception on rejecting the request.</p> * * @param request request to abort a submitted application * @return <code>ResourceManager</code> returns an empty response * on success and throws an exception on rejecting the request * @throws YarnException * @throws IOException * @see #getQueueUserAcls(GetQueueUserAclsInfoRequest) */ @Public @Stable @Idempotent public KillApplicationResponse forceKillApplication( KillApplicationRequest request) throws YarnException, IOException; /** * <p>The interface used by clients to get a report of an Application from * the <code>ResourceManager</code>.</p> * * <p>The client, via {@link GetApplicationReportRequest} provides the * {@link ApplicationId} of the application.</p> * * <p> In secure mode,the <code>ResourceManager</code> verifies access to the * application, queue etc. before accepting the request.</p> * * <p>The <code>ResourceManager</code> responds with a * {@link GetApplicationReportResponse} which includes the * {@link ApplicationReport} for the application.</p> * * <p>If the user does not have <code>VIEW_APP</code> access then the * following fields in the report will be set to stubbed values: * <ul> * <li>host - set to "N/A"</li> * <li>RPC port - set to -1</li> * <li>client token - set to "N/A"</li> * <li>diagnostics - set to "N/A"</li> * <li>tracking URL - set to "N/A"</li> * <li>original tracking URL - set to "N/A"</li> * <li>resource usage report - all values are -1</li> * </ul></p> * * @param request request for an application report * @return application report * @throws YarnException * @throws IOException */ @Public @Stable @Idempotent public GetApplicationReportResponse getApplicationReport( GetApplicationReportRequest request) throws YarnException, IOException; /** * <p>The interface used by clients to get metrics about the cluster from * the <code>ResourceManager</code>.</p> * * <p>The <code>ResourceManager</code> responds with a * {@link GetClusterMetricsResponse} which includes the * {@link YarnClusterMetrics} with details such as number of current * nodes in the cluster.</p> * * @param request request for cluster metrics * @return cluster metrics * @throws YarnException * @throws IOException */ @Public @Stable @Idempotent public GetClusterMetricsResponse getClusterMetrics( GetClusterMetricsRequest request) throws YarnException, IOException; /** * <p>The interface used by clients to get a report of Applications * matching the filters defined by {@link GetApplicationsRequest} * in the cluster from the <code>ResourceManager</code>.</p> * * <p>The <code>ResourceManager</code> responds with a * {@link GetApplicationsResponse} which includes the * {@link ApplicationReport} for the applications.</p> * * <p>If the user does not have <code>VIEW_APP</code> access for an * application then the corresponding report will be filtered as * described in {@link #getApplicationReport(GetApplicationReportRequest)}. * </p> * * @param request request for report on applications * @return report on applications matching the given application types * defined in the request * @throws YarnException * @throws IOException * @see GetApplicationsRequest */ @Public @Stable @Idempotent public GetApplicationsResponse getApplications( GetApplicationsRequest request) throws YarnException, IOException; /** * <p>The interface used by clients to get a report of all nodes * in the cluster from the <code>ResourceManager</code>.</p> * * <p>The <code>ResourceManager</code> responds with a * {@link GetClusterNodesResponse} which includes the * {@link NodeReport} for all the nodes in the cluster.</p> * * @param request request for report on all nodes * @return report on all nodes * @throws YarnException * @throws IOException */ @Public @Stable @Idempotent public GetClusterNodesResponse getClusterNodes( GetClusterNodesRequest request) throws YarnException, IOException; /** * <p>The interface used by clients to get information about <em>queues</em> * from the <code>ResourceManager</code>.</p> * * <p>The client, via {@link GetQueueInfoRequest}, can ask for details such * as used/total resources, child queues, running applications etc.</p> * * <p> In secure mode,the <code>ResourceManager</code> verifies access before * providing the information.</p> * * @param request request to get queue information * @return queue information * @throws YarnException * @throws IOException */ @Public @Stable @Idempotent public GetQueueInfoResponse getQueueInfo( GetQueueInfoRequest request) throws YarnException, IOException; /** * <p>The interface used by clients to get information about <em>queue * acls</em> for <em>current user</em> from the <code>ResourceManager</code>. * </p> * * <p>The <code>ResourceManager</code> responds with queue acls for all * existing queues.</p> * * @param request request to get queue acls for <em>current user</em> * @return queue acls for <em>current user</em> * @throws YarnException * @throws IOException */ @Public @Stable @Idempotent public GetQueueUserAclsInfoResponse getQueueUserAcls( GetQueueUserAclsInfoRequest request) throws YarnException, IOException; /** * <p>The interface used by clients to get delegation token, enabling the * containers to be able to talk to the service using those tokens. * * <p> The <code>ResourceManager</code> responds with the delegation * {@link Token} that can be used by the client to speak to this * service. * @param request request to get a delegation token for the client. * @return delegation token that can be used to talk to this service * @throws YarnException * @throws IOException */ @Public @Stable @Idempotent public GetDelegationTokenResponse getDelegationToken( GetDelegationTokenRequest request) throws YarnException, IOException; /** * Renew an existing delegation {@link Token}. * * @param request the delegation token to be renewed. * @return the new expiry time for the delegation token. * @throws YarnException * @throws IOException */ @Private @Unstable @Idempotent public RenewDelegationTokenResponse renewDelegationToken( RenewDelegationTokenRequest request) throws YarnException, IOException; /** * Cancel an existing delegation {@link Token}. * * @param request the delegation token to be cancelled. * @return an empty response. * @throws YarnException * @throws IOException */ @Private @Unstable @Idempotent public CancelDelegationTokenResponse cancelDelegationToken( CancelDelegationTokenRequest request) throws YarnException, IOException; /** * Move an application to a new queue. * * @param request the application ID and the target queue * @return an empty response * @throws YarnException * @throws IOException */ @Public @Unstable @Idempotent public MoveApplicationAcrossQueuesResponse moveApplicationAcrossQueues( MoveApplicationAcrossQueuesRequest request) throws YarnException, IOException; }
package org.opentosca.yamlconverter.main.UI; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.net.URISyntaxException; import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; import org.opentosca.yamlconverter.main.Parser; import org.opentosca.yamlconverter.main.utils.CSARUtil; import org.opentosca.yamlconverter.main.utils.ConstraintUtils; import org.opentosca.yamlconverter.main.utils.FileUtil; import org.opentosca.yamlconverter.yamlmodel.yaml.element.Input; /** * A simple User Interface for Console. * */ public class ConsoleUI { /** * The file Util. */ private static FileUtil fileutil = new FileUtil(); /** * Whether the cow says it or not. */ private static boolean COW = true; public static void main(String[] args) { cowsay("Hi! This is the TOSCA YAML 2 XML Cowverter! Let's start!"); boolean read = false; String yaml = ""; while (!read) { // ask for file String filename = ""; if (args.length > 0) { filename = args[0]; if (args.length > 1) { for (int i = 1; i < args.length; i++) { System.out.println("Warning: unknown parameter " + args[i]); } } } else { filename = promptString("Where can I find a YAML-file?"); } try { if (filename.isEmpty()) { System.out.println("ERROR: Filename cannot be empty! Muh.."); read = false; } else { yaml = fileutil.readYamlResource(filename); read = true; } } catch (final URISyntaxException e) { System.out.println("ERROR: Filename not valid! Muh.."); read = false; } catch (final IOException e) { System.out.println("ERROR: File could not be read! Muh.."); read = false; } catch (final NullPointerException e) { System.out.println("ERROR: File could not be found! Muh.."); read = false; } catch (final RuntimeException e) { System.out.println("ERROR: Error while reading files! Muh..."); read = false; } } // parse it // TODO: make use of the interface, i.e. IToscaYamlParser parser = new Parser(); // TODO: but before that: put methods to interface if necessary final Parser parser = new Parser(); parser.parse(yaml); // TODO: Why can't we just call parser.getInputRequirementsText()? They use the same map...?! final Map<String, Input> reqMap = parser.getInputRequirements(); final Map<String, String> reqText = parser.getInputRequirementsText(); if (reqMap != null && !reqMap.isEmpty()) { // ask for inputs cowsay("I need some variables you have to define!"); final Map<String, String> inputValues = new HashMap<String, String>(); for (final Entry<String, Input> requirement : reqMap.entrySet()) { String userinput = promptString("Variable " + requirement.getKey() + " (" + reqText.get(requirement.getKey()) + "):"); boolean valid = false; while (!valid) { valid = true; if (userinput != null && !userinput.isEmpty()) { valid = ConstraintUtils.matchesConstraints(userinput, requirement.getValue()); if (valid) { inputValues.put(requirement.getKey(), userinput); } else { userinput = promptString("ERROR: User Input did not fulfill the constraints. Try again.\nVariable " + requirement.getKey() + " (" + reqText.get(requirement.getKey()) + "):"); } } } } parser.setInputValues(inputValues); } // give results // XML final String xml = parser.getXML(); cowsay("I have some results for you!"); System.out.println("Here is your XML-file:"); System.out.println(xml); final String xmlfilename = promptString("\nIf you want to save this XML, enter a filename, else just hit ENTER..."); if (xmlfilename != null && !xmlfilename.isEmpty()) { try { FileUtil.saveStringAsFile(xmlfilename, xml); } catch (final IOException e) { System.out.println("ERROR: File has not been saved, because of an IOException. Muh.."); } } // XSD final String xsd = parser.getXSD(); if (xsd != null && !xsd.isEmpty()) { System.out.println("\nAlso I have an XSD-file for you. Save it as types.xsd!"); System.out.println(xsd); final String xsdfilename = promptString("\nIf you want to save this XSD, enter a filename, else just hit ENTER..."); if (xsdfilename != null && !xsdfilename.isEmpty()) { try { FileUtil.saveStringAsFile(xsdfilename, xsd); } catch (final IOException e) { System.out.println("ERROR: File has not been saved, because of an IOException. Muh.."); } } } final String csarfilename = promptString("\nIf you want to save the results as a CSAR-File, enter a filename, else just hit ENTER..."); if (csarfilename != null && !csarfilename.isEmpty()) { try { CSARUtil.createCSAR(parser.getServiceTemplate(), xml, xsd, csarfilename); } catch (final IOException e) { System.out.println("ERROR: File has not been saved, because of an IOException. Muh.."); } } cowsay("Wuhuu! I'm finished with converting. I hope you're happy now! Good Bye!"); System.out.println("\n\n exiting..."); } /** * Uses Systems I/O to prompt the user for a lineinput. * * @param promptString The description for the input. * @return the returned line. */ private static String promptString(String promptString) { final BufferedReader console = new BufferedReader(new InputStreamReader(System.in)); System.out.println(promptString); String result = null; try { result = console.readLine(); } catch (final IOException e) { // this suggests there is no console available. System.exit(0); } return result; } /** * Uses System.out to print a message in cowsay, if cowsay is enabled. Else it justs prints the message * * @param message the message to print */ private static void cowsay(String message) { if (COW) { final int messageLength = message.length(); String top = " "; String bottom = " "; final String contentAndBorders = "< " + message + " >"; String speechBubble; String cow; String cowsay; for (int i = 1; i <= messageLength + 2; i++) { top += "_"; bottom += "-"; } speechBubble = top + "\n"; speechBubble += contentAndBorders + "\n"; speechBubble += bottom + "\n"; cow = " \\ ^__^" + "\n"; cow += " \\ (oo)\\_______" + "\n"; cow += " (__)\\ )\\/\\" + "\n"; cow += " ||----w |" + "\n"; cow += " || ||" + "\n"; cowsay = speechBubble + cow; System.out.println(cowsay); } else { System.out.println("\n" + message + "\n"); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache; import java.sql.PreparedStatement; import java.sql.SQLException; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Random; import java.util.Set; import java.util.concurrent.Callable; import javax.cache.Cache; import javax.cache.CacheException; import org.apache.ignite.Ignite; import org.apache.ignite.IgniteCache; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.IgniteDataStreamer; import org.apache.ignite.cache.CacheAtomicityMode; import org.apache.ignite.cache.query.FieldsQueryCursor; import org.apache.ignite.cache.query.QueryCursor; import org.apache.ignite.cache.query.SqlFieldsQuery; import org.apache.ignite.cache.query.SqlQuery; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.configuration.NearCacheConfiguration; import org.apache.ignite.internal.GridKernalContext; import org.apache.ignite.internal.IgniteKernal; import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion; import org.apache.ignite.internal.processors.cache.persistence.CacheDataRow; import org.apache.ignite.internal.processors.query.GridQueryCancel; import org.apache.ignite.internal.processors.query.GridQueryIndexing; import org.apache.ignite.internal.processors.query.GridQueryProcessor; import org.apache.ignite.internal.processors.query.GridQueryRowCacheCleaner; import org.apache.ignite.internal.processors.query.GridQueryTypeDescriptor; import org.apache.ignite.internal.processors.query.GridRunningQueryInfo; import org.apache.ignite.internal.processors.query.QueryField; import org.apache.ignite.internal.processors.query.QueryIndexDescriptorImpl; import org.apache.ignite.internal.processors.query.SqlClientContext; import org.apache.ignite.internal.processors.query.schema.SchemaIndexCacheVisitor; import org.apache.ignite.internal.util.GridSpinBusyLock; import org.apache.ignite.internal.util.lang.GridCloseableIterator; import org.apache.ignite.lang.IgniteBiTuple; import org.apache.ignite.lang.IgniteFuture; import org.apache.ignite.spi.indexing.IndexingQueryFilter; import org.apache.ignite.testframework.GridTestUtils; import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; import org.jetbrains.annotations.Nullable; /** * Test checks whether cache initialization error on client side * doesn't causes hangs and doesn't impact other caches. */ public class IgniteClientCacheInitializationFailTest extends GridCommonAbstractTest { /** Failed cache name. */ private static final String CACHE_NAME = "cache"; /** Atomic cache name. */ private static final String ATOMIC_CACHE_NAME = "atomic-cache"; /** Tx cache name. */ private static final String TX_CACHE_NAME = "tx-cache"; /** Near atomic cache name. */ private static final String NEAR_ATOMIC_CACHE_NAME = "near-atomic-cache"; /** Near tx cache name. */ private static final String NEAR_TX_CACHE_NAME = "near-tx-cache"; /** Failed caches. */ private static final Set<String> FAILED_CACHES; static { Set<String> set = new HashSet<>(); set.add(ATOMIC_CACHE_NAME); set.add(TX_CACHE_NAME); set.add(NEAR_ATOMIC_CACHE_NAME); set.add(NEAR_TX_CACHE_NAME); FAILED_CACHES = Collections.unmodifiableSet(set); } /** {@inheritDoc} */ @Override protected void beforeTestsStarted() throws Exception { startGrid("server"); startGrid("client"); } /** {@inheritDoc} */ @Override protected void afterTestsStopped() throws Exception { stopAllGrids(); } /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String gridName) throws Exception { IgniteConfiguration cfg = super.getConfiguration(gridName); if (gridName.contains("server")) { CacheConfiguration<Integer, String> ccfg1 = new CacheConfiguration<>(); ccfg1.setIndexedTypes(Integer.class, String.class); ccfg1.setName(ATOMIC_CACHE_NAME); ccfg1.setAtomicityMode(CacheAtomicityMode.ATOMIC); CacheConfiguration<Integer, String> ccfg2 = new CacheConfiguration<>(); ccfg2.setIndexedTypes(Integer.class, String.class); ccfg2.setName(TX_CACHE_NAME); ccfg2.setAtomicityMode(CacheAtomicityMode.TRANSACTIONAL); cfg.setCacheConfiguration(ccfg1, ccfg2); } else { GridQueryProcessor.idxCls = FailedIndexing.class; cfg.setClientMode(true); } return cfg; } /** * @throws Exception If failed. */ public void testAtomicCacheInitialization() throws Exception { checkCacheInitialization(ATOMIC_CACHE_NAME); } /** * @throws Exception If failed. */ public void testTransactionalCacheInitialization() throws Exception { checkCacheInitialization(TX_CACHE_NAME); } /** * @throws Exception If failed. */ public void testAtomicNearCacheInitialization() throws Exception { checkCacheInitialization(NEAR_ATOMIC_CACHE_NAME); } /** * @throws Exception If failed. */ public void testTransactionalNearCacheInitialization() throws Exception { checkCacheInitialization(NEAR_TX_CACHE_NAME); } /** * @param cacheName Cache name. * @throws Exception If failed. */ private void checkCacheInitialization(final String cacheName) throws Exception { Ignite client = grid("client"); checkFailedCache(client, cacheName); checkFineCache(client, CACHE_NAME + 1); assertNull(((IgniteKernal)client).context().cache().cache(cacheName)); checkFineCache(client, CACHE_NAME + 2); } /** * @param client Client. * @param cacheName Cache name. */ private void checkFineCache(Ignite client, String cacheName) { IgniteCache<Integer, String> cache = client.getOrCreateCache(cacheName); cache.put(1, "1"); assertEquals("1", cache.get(1)); } /** * @param client Client. */ @SuppressWarnings({"ThrowableNotThrown", "ThrowableResultOfMethodCallIgnored"}) private void checkFailedCache(final Ignite client, final String cacheName) { GridTestUtils.assertThrows(log, new Callable<Object>() { @Override public Object call() throws Exception { IgniteCache<Integer, String> cache; // Start cache with near enabled. if (NEAR_ATOMIC_CACHE_NAME.equals(cacheName) || NEAR_TX_CACHE_NAME.equals(cacheName)) { CacheConfiguration<Integer, String> ccfg = new CacheConfiguration<Integer, String>(cacheName) .setNearConfiguration(new NearCacheConfiguration<Integer, String>()); if (NEAR_TX_CACHE_NAME.equals(cacheName)) ccfg.setAtomicityMode(CacheAtomicityMode.TRANSACTIONAL); cache = client.getOrCreateCache(ccfg); } else cache = client.cache(cacheName); cache.put(1, "1"); assertEquals("1", cache.get(1)); return null; } }, CacheException.class, null); } /** * To fail on cache start. */ private static class FailedIndexing implements GridQueryIndexing { /** {@inheritDoc} */ @Override public void start(GridKernalContext ctx, GridSpinBusyLock busyLock) throws IgniteCheckedException { // No-op } /** {@inheritDoc} */ @Override public void stop() throws IgniteCheckedException { // No-op } /** {@inheritDoc} */ @Override public <K, V> QueryCursor<Cache.Entry<K, V>> queryDistributedSql(String schemaName, String cacheName, SqlQuery qry, boolean keepBinary) throws IgniteCheckedException { return null; } /** {@inheritDoc} */ @Override public List<FieldsQueryCursor<List<?>>> querySqlFields(String schemaName, SqlFieldsQuery qry, SqlClientContext cliCtx, boolean keepBinary, boolean failOnMultipleStmts, GridQueryCancel cancel) { return null; } /** {@inheritDoc} */ @Override public List<Long> streamBatchedUpdateQuery(String schemaName, String qry, List<Object[]> params, SqlClientContext cliCtx) throws IgniteCheckedException { return Collections.emptyList(); } /** {@inheritDoc} */ @Override public long streamUpdateQuery(String schemaName, String qry, @Nullable Object[] params, IgniteDataStreamer<?, ?> streamer) throws IgniteCheckedException { return 0; } /** {@inheritDoc} */ @Override public <K, V> QueryCursor<Cache.Entry<K, V>> queryLocalSql(String schemaName, String cacheName, SqlQuery qry, IndexingQueryFilter filter, boolean keepBinary) throws IgniteCheckedException { return null; } /** {@inheritDoc} */ @Override public FieldsQueryCursor<List<?>> queryLocalSqlFields(String schemaName, SqlFieldsQuery qry, boolean keepBinary, IndexingQueryFilter filter, GridQueryCancel cancel) throws IgniteCheckedException { return null; } /** {@inheritDoc} */ @Override public <K, V> GridCloseableIterator<IgniteBiTuple<K, V>> queryLocalText(String spaceName, String cacheName, String qry, String typeName, IndexingQueryFilter filter) throws IgniteCheckedException { return null; } /** {@inheritDoc} */ @Override public void dynamicIndexCreate(String spaceName, String tblName, QueryIndexDescriptorImpl idxDesc, boolean ifNotExists, SchemaIndexCacheVisitor cacheVisitor) throws IgniteCheckedException { // No-op } /** {@inheritDoc} */ @Override public void dynamicIndexDrop(String spaceName, String idxName, boolean ifExists) throws IgniteCheckedException { // No-op } /** {@inheritDoc} */ @Override public void dynamicAddColumn(String schemaName, String tblName, List<QueryField> cols, boolean ifTblExists, boolean ifColNotExists) throws IgniteCheckedException { // No-op. } /** {@inheritDoc} */ @Override public void dynamicDropColumn(String schemaName, String tblName, List<String> cols, boolean ifTblExists, boolean ifColExists) throws IgniteCheckedException { // No-op } /** {@inheritDoc} */ @Override public void registerCache(String cacheName, String schemaName, GridCacheContext<?, ?> cctx) throws IgniteCheckedException { if (FAILED_CACHES.contains(cctx.name()) && cctx.kernalContext().clientNode()) throw new IgniteCheckedException("Test query exception " + cctx.name() + " " + new Random().nextInt()); } /** {@inheritDoc} */ @Override public void unregisterCache(GridCacheContext cctx, boolean rmvIdx) throws IgniteCheckedException { // No-op } /** {@inheritDoc} */ @Override public boolean registerType(GridCacheContext cctx, GridQueryTypeDescriptor desc) throws IgniteCheckedException { return false; } /** {@inheritDoc} */ @Override public void store(GridCacheContext cctx, GridQueryTypeDescriptor type, CacheDataRow row, CacheDataRow prevRow, boolean prevRowAvailable) { // No-op. } /** {@inheritDoc} */ @Override public void remove(GridCacheContext cctx, GridQueryTypeDescriptor type, CacheDataRow val) { // No-op. } /** {@inheritDoc} */ @Override public void rebuildIndexesFromHash(String cacheName) throws IgniteCheckedException { // No-op } /** {@inheritDoc} */ @Override public void markForRebuildFromHash(String cacheName) { // No-op } /** {@inheritDoc} */ @Override public IndexingQueryFilter backupFilter(AffinityTopologyVersion topVer, int[] parts) { return null; } /** {@inheritDoc} */ @Override public void onDisconnected(IgniteFuture<?> reconnectFut) { // No-op } /** {@inheritDoc} */ @Override public PreparedStatement prepareNativeStatement(String space, String sql) throws SQLException { return null; } /** {@inheritDoc} */ @Override public Collection<GridRunningQueryInfo> runningQueries(long duration) { return null; } /** {@inheritDoc} */ @Override public void cancelQueries(Collection<Long> queries) { // No-op } /** {@inheritDoc} */ @Override public void cancelAllQueries() { // No-op } /** {@inheritDoc} */ @Override public String schema(String cacheName) { return null; } /** {@inheritDoc} */ @Override public void checkStatementStreamable(PreparedStatement nativeStmt) { // No-op. } /** {@inheritDoc} */ @Override public GridQueryRowCacheCleaner rowCacheCleaner(int cacheGroupId) { return null; } } }
/* * Copyright (c) 2012 - 2015 Ngewi Fet <ngewif@gmail.com> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gnucash.android.db; import android.content.Context; import android.database.sqlite.SQLiteDatabase; import android.database.sqlite.SQLiteOpenHelper; import android.util.Log; import android.widget.Toast; import com.crashlytics.android.Crashlytics; import org.gnucash.android.app.GnuCashApplication; import org.gnucash.android.model.Commodity; import org.xml.sax.SAXException; import java.io.IOException; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import javax.xml.parsers.ParserConfigurationException; import static org.gnucash.android.db.DatabaseSchema.AccountEntry; import static org.gnucash.android.db.DatabaseSchema.BudgetAmountEntry; import static org.gnucash.android.db.DatabaseSchema.BudgetEntry; import static org.gnucash.android.db.DatabaseSchema.CommodityEntry; import static org.gnucash.android.db.DatabaseSchema.CommonColumns; import static org.gnucash.android.db.DatabaseSchema.PriceEntry; import static org.gnucash.android.db.DatabaseSchema.RecurrenceEntry; import static org.gnucash.android.db.DatabaseSchema.ScheduledActionEntry; import static org.gnucash.android.db.DatabaseSchema.SplitEntry; import static org.gnucash.android.db.DatabaseSchema.TransactionEntry; /** * Helper class for managing the SQLite database. * Creates the database and handles upgrades * @author Ngewi Fet <ngewif@gmail.com> * */ public class DatabaseHelper extends SQLiteOpenHelper { /** * Tag for logging */ public static final String LOG_TAG = DatabaseHelper.class.getName(); /** * SQL statement to create the accounts table in the database */ private static final String ACCOUNTS_TABLE_CREATE = "create table " + AccountEntry.TABLE_NAME + " (" + AccountEntry._ID + " integer primary key autoincrement, " + AccountEntry.COLUMN_UID + " varchar(255) not null UNIQUE, " + AccountEntry.COLUMN_NAME + " varchar(255) not null, " + AccountEntry.COLUMN_TYPE + " varchar(255) not null, " + AccountEntry.COLUMN_CURRENCY + " varchar(255) not null, " + AccountEntry.COLUMN_COMMODITY_UID + " varchar(255) not null, " + AccountEntry.COLUMN_DESCRIPTION + " varchar(255), " + AccountEntry.COLUMN_COLOR_CODE + " varchar(255), " + AccountEntry.COLUMN_FAVORITE + " tinyint default 0, " + AccountEntry.COLUMN_HIDDEN + " tinyint default 0, " + AccountEntry.COLUMN_FULL_NAME + " varchar(255), " + AccountEntry.COLUMN_PLACEHOLDER + " tinyint default 0, " + AccountEntry.COLUMN_PARENT_ACCOUNT_UID + " varchar(255), " + AccountEntry.COLUMN_DEFAULT_TRANSFER_ACCOUNT_UID + " varchar(255), " + AccountEntry.COLUMN_CREATED_AT + " TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, " + AccountEntry.COLUMN_MODIFIED_AT + " TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, " // + "FOREIGN KEY (" + AccountEntry.COLUMN_DEFAULT_TRANSFER_ACCOUNT_UID + ") REFERENCES " + AccountEntry.TABLE_NAME + " (" + AccountEntry.COLUMN_UID + ") ON DELETE SET NULL, " + "FOREIGN KEY (" + AccountEntry.COLUMN_COMMODITY_UID + ") REFERENCES " + CommodityEntry.TABLE_NAME + " (" + CommodityEntry.COLUMN_UID + ") " + ");" + createUpdatedAtTrigger(AccountEntry.TABLE_NAME); /** * SQL statement to create the transactions table in the database */ private static final String TRANSACTIONS_TABLE_CREATE = "create table " + TransactionEntry.TABLE_NAME + " (" + TransactionEntry._ID + " integer primary key autoincrement, " + TransactionEntry.COLUMN_UID + " varchar(255) not null UNIQUE, " + TransactionEntry.COLUMN_DESCRIPTION + " varchar(255), " + TransactionEntry.COLUMN_NOTES + " text, " + TransactionEntry.COLUMN_TIMESTAMP + " integer not null, " + TransactionEntry.COLUMN_EXPORTED + " tinyint default 0, " + TransactionEntry.COLUMN_TEMPLATE + " tinyint default 0, " + TransactionEntry.COLUMN_CURRENCY + " varchar(255) not null, " + TransactionEntry.COLUMN_COMMODITY_UID + " varchar(255) not null, " + TransactionEntry.COLUMN_SCHEDX_ACTION_UID + " varchar(255), " + TransactionEntry.COLUMN_CREATED_AT + " TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, " + TransactionEntry.COLUMN_MODIFIED_AT + " TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, " + "FOREIGN KEY (" + TransactionEntry.COLUMN_SCHEDX_ACTION_UID + ") REFERENCES " + ScheduledActionEntry.TABLE_NAME + " (" + ScheduledActionEntry.COLUMN_UID + ") ON DELETE SET NULL, " + "FOREIGN KEY (" + TransactionEntry.COLUMN_COMMODITY_UID + ") REFERENCES " + CommodityEntry.TABLE_NAME + " (" + CommodityEntry.COLUMN_UID + ") " + ");" + createUpdatedAtTrigger(TransactionEntry.TABLE_NAME); /** * SQL statement to create the transaction splits table */ private static final String SPLITS_TABLE_CREATE = "CREATE TABLE " + SplitEntry.TABLE_NAME + " (" + SplitEntry._ID + " integer primary key autoincrement, " + SplitEntry.COLUMN_UID + " varchar(255) not null UNIQUE, " + SplitEntry.COLUMN_MEMO + " text, " + SplitEntry.COLUMN_TYPE + " varchar(255) not null, " + SplitEntry.COLUMN_VALUE_NUM + " integer not null, " + SplitEntry.COLUMN_VALUE_DENOM + " integer not null, " + SplitEntry.COLUMN_QUANTITY_NUM + " integer not null, " + SplitEntry.COLUMN_QUANTITY_DENOM + " integer not null, " + SplitEntry.COLUMN_ACCOUNT_UID + " varchar(255) not null, " + SplitEntry.COLUMN_TRANSACTION_UID + " varchar(255) not null, " + SplitEntry.COLUMN_RECONCILE_STATE + " varchar(1) not null default 'n', " + SplitEntry.COLUMN_RECONCILE_DATE + " timestamp not null default current_timestamp, " + SplitEntry.COLUMN_CREATED_AT + " TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, " + SplitEntry.COLUMN_MODIFIED_AT + " TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, " + "FOREIGN KEY (" + SplitEntry.COLUMN_ACCOUNT_UID + ") REFERENCES " + AccountEntry.TABLE_NAME + " (" + AccountEntry.COLUMN_UID + ") ON DELETE CASCADE, " + "FOREIGN KEY (" + SplitEntry.COLUMN_TRANSACTION_UID + ") REFERENCES " + TransactionEntry.TABLE_NAME + " (" + TransactionEntry.COLUMN_UID + ") ON DELETE CASCADE " + ");" + createUpdatedAtTrigger(SplitEntry.TABLE_NAME); public static final String SCHEDULED_ACTIONS_TABLE_CREATE = "CREATE TABLE " + ScheduledActionEntry.TABLE_NAME + " (" + ScheduledActionEntry._ID + " integer primary key autoincrement, " + ScheduledActionEntry.COLUMN_UID + " varchar(255) not null UNIQUE, " + ScheduledActionEntry.COLUMN_ACTION_UID + " varchar(255) not null, " + ScheduledActionEntry.COLUMN_TYPE + " varchar(255) not null, " + ScheduledActionEntry.COLUMN_RECURRENCE_UID + " varchar(255) not null, " + ScheduledActionEntry.COLUMN_TEMPLATE_ACCT_UID + " varchar(255) not null, " + ScheduledActionEntry.COLUMN_LAST_RUN + " integer default 0, " + ScheduledActionEntry.COLUMN_START_TIME + " integer not null, " + ScheduledActionEntry.COLUMN_END_TIME + " integer default 0, " + ScheduledActionEntry.COLUMN_TAG + " text, " + ScheduledActionEntry.COLUMN_ENABLED + " tinyint default 1, " //enabled by default + ScheduledActionEntry.COLUMN_AUTO_CREATE + " tinyint default 1, " + ScheduledActionEntry.COLUMN_AUTO_NOTIFY + " tinyint default 0, " + ScheduledActionEntry.COLUMN_ADVANCE_CREATION + " integer default 0, " + ScheduledActionEntry.COLUMN_ADVANCE_NOTIFY + " integer default 0, " + ScheduledActionEntry.COLUMN_TOTAL_FREQUENCY + " integer default 0, " + ScheduledActionEntry.COLUMN_EXECUTION_COUNT + " integer default 0, " + ScheduledActionEntry.COLUMN_CREATED_AT + " TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, " + ScheduledActionEntry.COLUMN_MODIFIED_AT + " TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, " + "FOREIGN KEY (" + ScheduledActionEntry.COLUMN_RECURRENCE_UID + ") REFERENCES " + RecurrenceEntry.TABLE_NAME + " (" + RecurrenceEntry.COLUMN_UID + ") " + ");" + createUpdatedAtTrigger(ScheduledActionEntry.TABLE_NAME); public static final String COMMODITIES_TABLE_CREATE = "CREATE TABLE " + DatabaseSchema.CommodityEntry.TABLE_NAME + " (" + CommodityEntry._ID + " integer primary key autoincrement, " + CommodityEntry.COLUMN_UID + " varchar(255) not null UNIQUE, " + CommodityEntry.COLUMN_NAMESPACE + " varchar(255) not null default " + Commodity.Namespace.ISO4217.name() + ", " + CommodityEntry.COLUMN_FULLNAME + " varchar(255) not null, " + CommodityEntry.COLUMN_MNEMONIC + " varchar(255) not null, " + CommodityEntry.COLUMN_LOCAL_SYMBOL+ " varchar(255) not null default '', " + CommodityEntry.COLUMN_CUSIP + " varchar(255), " + CommodityEntry.COLUMN_SMALLEST_FRACTION + " integer not null, " + CommodityEntry.COLUMN_QUOTE_FLAG + " integer not null, " + CommodityEntry.COLUMN_CREATED_AT + " TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, " + CommodityEntry.COLUMN_MODIFIED_AT + " TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP " + ");" + createUpdatedAtTrigger(CommodityEntry.TABLE_NAME); /** * SQL statement to create the commodity prices table */ private static final String PRICES_TABLE_CREATE = "CREATE TABLE " + PriceEntry.TABLE_NAME + " (" + PriceEntry._ID + " integer primary key autoincrement, " + PriceEntry.COLUMN_UID + " varchar(255) not null UNIQUE, " + PriceEntry.COLUMN_COMMODITY_UID + " varchar(255) not null, " + PriceEntry.COLUMN_CURRENCY_UID + " varchar(255) not null, " + PriceEntry.COLUMN_TYPE + " varchar(255), " + PriceEntry.COLUMN_DATE + " TIMESTAMP not null, " + PriceEntry.COLUMN_SOURCE + " text, " + PriceEntry.COLUMN_VALUE_NUM + " integer not null, " + PriceEntry.COLUMN_VALUE_DENOM + " integer not null, " + PriceEntry.COLUMN_CREATED_AT + " TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, " + PriceEntry.COLUMN_MODIFIED_AT + " TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, " + "UNIQUE (" + PriceEntry.COLUMN_COMMODITY_UID + ", " + PriceEntry.COLUMN_CURRENCY_UID + ") ON CONFLICT REPLACE, " + "FOREIGN KEY (" + PriceEntry.COLUMN_COMMODITY_UID + ") REFERENCES " + CommodityEntry.TABLE_NAME + " (" + CommodityEntry.COLUMN_UID + ") ON DELETE CASCADE, " + "FOREIGN KEY (" + PriceEntry.COLUMN_CURRENCY_UID + ") REFERENCES " + CommodityEntry.TABLE_NAME + " (" + CommodityEntry.COLUMN_UID + ") ON DELETE CASCADE " + ");" + createUpdatedAtTrigger(PriceEntry.TABLE_NAME); private static final String BUDGETS_TABLE_CREATE = "CREATE TABLE " + BudgetEntry.TABLE_NAME + " (" + BudgetEntry._ID + " integer primary key autoincrement, " + BudgetEntry.COLUMN_UID + " varchar(255) not null UNIQUE, " + BudgetEntry.COLUMN_NAME + " varchar(255) not null, " + BudgetEntry.COLUMN_DESCRIPTION + " varchar(255), " + BudgetEntry.COLUMN_RECURRENCE_UID + " varchar(255) not null, " + BudgetEntry.COLUMN_NUM_PERIODS + " integer, " + BudgetEntry.COLUMN_CREATED_AT + " TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, " + BudgetEntry.COLUMN_MODIFIED_AT + " TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, " + "FOREIGN KEY (" + BudgetEntry.COLUMN_RECURRENCE_UID + ") REFERENCES " + RecurrenceEntry.TABLE_NAME + " (" + RecurrenceEntry.COLUMN_UID + ") " + ");" + createUpdatedAtTrigger(BudgetEntry.TABLE_NAME); private static final String BUDGET_AMOUNTS_TABLE_CREATE = "CREATE TABLE " + BudgetAmountEntry.TABLE_NAME + " (" + BudgetAmountEntry._ID + " integer primary key autoincrement, " + BudgetAmountEntry.COLUMN_UID + " varchar(255) not null UNIQUE, " + BudgetAmountEntry.COLUMN_BUDGET_UID + " varchar(255) not null, " + BudgetAmountEntry.COLUMN_ACCOUNT_UID + " varchar(255) not null, " + BudgetAmountEntry.COLUMN_AMOUNT_NUM + " integer not null, " + BudgetAmountEntry.COLUMN_AMOUNT_DENOM + " integer not null, " + BudgetAmountEntry.COLUMN_PERIOD_NUM + " integer not null, " + BudgetAmountEntry.COLUMN_CREATED_AT + " TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, " + BudgetAmountEntry.COLUMN_MODIFIED_AT + " TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, " + "FOREIGN KEY (" + BudgetAmountEntry.COLUMN_ACCOUNT_UID + ") REFERENCES " + AccountEntry.TABLE_NAME + " (" + AccountEntry.COLUMN_UID + ") ON DELETE CASCADE, " + "FOREIGN KEY (" + BudgetAmountEntry.COLUMN_BUDGET_UID + ") REFERENCES " + BudgetEntry.TABLE_NAME + " (" + BudgetEntry.COLUMN_UID + ") ON DELETE CASCADE " + ");" + createUpdatedAtTrigger(BudgetAmountEntry.TABLE_NAME); private static final String RECURRENCE_TABLE_CREATE = "CREATE TABLE " + RecurrenceEntry.TABLE_NAME + " (" + RecurrenceEntry._ID + " integer primary key autoincrement, " + RecurrenceEntry.COLUMN_UID + " varchar(255) not null UNIQUE, " + RecurrenceEntry.COLUMN_MULTIPLIER + " integer not null default 1, " + RecurrenceEntry.COLUMN_PERIOD_TYPE + " varchar(255) not null, " + RecurrenceEntry.COLUMN_BYDAY + " varchar(255), " + RecurrenceEntry.COLUMN_PERIOD_START + " timestamp not null, " + RecurrenceEntry.COLUMN_PERIOD_END + " timestamp, " + RecurrenceEntry.COLUMN_CREATED_AT + " TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, " + RecurrenceEntry.COLUMN_MODIFIED_AT + " TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP); " + createUpdatedAtTrigger(RecurrenceEntry.TABLE_NAME); /** * Constructor * @param context Application context * @param databaseName Name of the database */ public DatabaseHelper(Context context, String databaseName){ super(context, databaseName, null, DatabaseSchema.DATABASE_VERSION); } /** * Creates an update trigger to update the updated_at column for all records in the database. * This has to be run per table, and is currently appended to the create table statement. * @param tableName Name of table on which to create trigger * @return SQL statement for creating trigger */ static String createUpdatedAtTrigger(String tableName){ return "CREATE TRIGGER update_time_trigger " + " AFTER UPDATE ON " + tableName + " FOR EACH ROW" + " BEGIN " + "UPDATE " + tableName + " SET " + CommonColumns.COLUMN_MODIFIED_AT + " = CURRENT_TIMESTAMP" + " WHERE OLD." + CommonColumns.COLUMN_UID + " = NEW." + CommonColumns.COLUMN_UID + ";" + " END;"; } @Override public void onCreate(SQLiteDatabase db) { createDatabaseTables(db); } @Override public void onOpen(SQLiteDatabase db) { super.onOpen(db); db.execSQL("PRAGMA foreign_keys=ON"); } @Override public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion){ Log.i(LOG_TAG, "Upgrading database from version " + oldVersion + " to " + newVersion); Toast.makeText(GnuCashApplication.getAppContext(), "Upgrading GnuCash database", Toast.LENGTH_SHORT).show(); /* * NOTE: In order to modify the database, create a new static method in the MigrationHelper class * called upgradeDbToVersion<#>, e.g. int upgradeDbToVersion10(SQLiteDatabase) in order to upgrade to version 10. * The upgrade method should return the new (upgraded) database version as the return value. * Then all you need to do is increment the DatabaseSchema.DATABASE_VERSION to the appropriate number to trigger an upgrade. */ if (oldVersion > newVersion) { throw new IllegalArgumentException("Database downgrades are not supported at the moment"); } while(oldVersion < newVersion){ try { Method method = MigrationHelper.class.getDeclaredMethod("upgradeDbToVersion" + (oldVersion+1), SQLiteDatabase.class); Object result = method.invoke(null, db); oldVersion = Integer.parseInt(result.toString()); } catch (NoSuchMethodException e) { String msg = String.format("Database upgrade method upgradeToVersion%d(SQLiteDatabase) definition not found ", newVersion); Log.e(LOG_TAG, msg, e); Crashlytics.log(msg); Crashlytics.logException(e); throw new RuntimeException(e); } catch (IllegalAccessException e) { String msg = String.format("Database upgrade to version %d failed. The upgrade method is inaccessible ", newVersion); Log.e(LOG_TAG, msg, e); Crashlytics.log(msg); Crashlytics.logException(e); throw new RuntimeException(e); } catch (InvocationTargetException e){ Crashlytics.logException(e.getTargetException()); throw new RuntimeException(e.getTargetException()); } } } /** * Creates the tables in the database and import default commodities into the database * @param db Database instance */ private void createDatabaseTables(SQLiteDatabase db) { Log.i(LOG_TAG, "Creating database tables"); db.execSQL(ACCOUNTS_TABLE_CREATE); db.execSQL(TRANSACTIONS_TABLE_CREATE); db.execSQL(SPLITS_TABLE_CREATE); db.execSQL(SCHEDULED_ACTIONS_TABLE_CREATE); db.execSQL(COMMODITIES_TABLE_CREATE); db.execSQL(PRICES_TABLE_CREATE); db.execSQL(RECURRENCE_TABLE_CREATE); db.execSQL(BUDGETS_TABLE_CREATE); db.execSQL(BUDGET_AMOUNTS_TABLE_CREATE); String createAccountUidIndex = "CREATE UNIQUE INDEX '" + AccountEntry.INDEX_UID + "' ON " + AccountEntry.TABLE_NAME + "(" + AccountEntry.COLUMN_UID + ")"; String createTransactionUidIndex = "CREATE UNIQUE INDEX '" + TransactionEntry.INDEX_UID + "' ON " + TransactionEntry.TABLE_NAME + "(" + TransactionEntry.COLUMN_UID + ")"; String createSplitUidIndex = "CREATE UNIQUE INDEX '" + SplitEntry.INDEX_UID + "' ON " + SplitEntry.TABLE_NAME + "(" + SplitEntry.COLUMN_UID + ")"; String createScheduledEventUidIndex = "CREATE UNIQUE INDEX '" + ScheduledActionEntry.INDEX_UID + "' ON " + ScheduledActionEntry.TABLE_NAME + "(" + ScheduledActionEntry.COLUMN_UID + ")"; String createCommodityUidIndex = "CREATE UNIQUE INDEX '" + CommodityEntry.INDEX_UID + "' ON " + CommodityEntry.TABLE_NAME + "(" + CommodityEntry.COLUMN_UID + ")"; String createPriceUidIndex = "CREATE UNIQUE INDEX '" + PriceEntry.INDEX_UID + "' ON " + PriceEntry.TABLE_NAME + "(" + PriceEntry.COLUMN_UID + ")"; String createBudgetUidIndex = "CREATE UNIQUE INDEX '" + BudgetEntry.INDEX_UID + "' ON " + BudgetEntry.TABLE_NAME + "(" + BudgetEntry.COLUMN_UID + ")"; String createBudgetAmountUidIndex = "CREATE UNIQUE INDEX '" + BudgetAmountEntry.INDEX_UID + "' ON " + BudgetAmountEntry.TABLE_NAME + "(" + BudgetAmountEntry.COLUMN_UID + ")"; String createRecurrenceUidIndex = "CREATE UNIQUE INDEX '" + RecurrenceEntry.INDEX_UID + "' ON " + RecurrenceEntry.TABLE_NAME + "(" + RecurrenceEntry.COLUMN_UID + ")"; db.execSQL(createAccountUidIndex); db.execSQL(createTransactionUidIndex); db.execSQL(createSplitUidIndex); db.execSQL(createScheduledEventUidIndex); db.execSQL(createCommodityUidIndex); db.execSQL(createPriceUidIndex); db.execSQL(createBudgetUidIndex); db.execSQL(createRecurrenceUidIndex); db.execSQL(createBudgetAmountUidIndex); try { MigrationHelper.importCommodities(db); } catch (SAXException | ParserConfigurationException | IOException e) { Log.e(LOG_TAG, "Error loading currencies into the database"); e.printStackTrace(); throw new RuntimeException(e); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.streams.kstream.internals; import org.apache.kafka.common.serialization.Serdes; import org.apache.kafka.common.serialization.StringSerializer; import org.apache.kafka.common.utils.Bytes; import org.apache.kafka.common.utils.Utils; import org.apache.kafka.streams.StreamsBuilder; import org.apache.kafka.streams.Topology; import org.apache.kafka.streams.TopologyDescription; import org.apache.kafka.streams.TopologyTestDriver; import org.apache.kafka.streams.TopologyTestDriverWrapper; import org.apache.kafka.streams.TopologyWrapper; import org.apache.kafka.streams.kstream.Consumed; import org.apache.kafka.streams.kstream.KTable; import org.apache.kafka.streams.kstream.Materialized; import org.apache.kafka.streams.kstream.Predicate; import org.apache.kafka.streams.kstream.Produced; import org.apache.kafka.streams.kstream.ValueJoiner; import org.apache.kafka.streams.kstream.ValueMapper; import org.apache.kafka.streams.kstream.ValueMapperWithKey; import org.apache.kafka.streams.kstream.ValueTransformerWithKeySupplier; import org.apache.kafka.streams.processor.internals.InternalTopologyBuilder; import org.apache.kafka.streams.processor.internals.SinkNode; import org.apache.kafka.streams.processor.internals.SourceNode; import org.apache.kafka.streams.state.KeyValueStore; import org.apache.kafka.streams.test.ConsumerRecordFactory; import org.apache.kafka.test.MockAggregator; import org.apache.kafka.test.MockInitializer; import org.apache.kafka.test.MockMapper; import org.apache.kafka.test.MockProcessor; import org.apache.kafka.test.MockProcessorSupplier; import org.apache.kafka.test.MockReducer; import org.apache.kafka.test.MockValueJoiner; import org.apache.kafka.test.StreamsTestUtils; import org.junit.Before; import org.junit.Test; import java.lang.reflect.Field; import java.util.List; import java.util.Properties; import static org.easymock.EasyMock.mock; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; public class KTableImplTest { private final Consumed<String, String> consumed = Consumed.with(Serdes.String(), Serdes.String()); private final Produced<String, String> produced = Produced.with(Serdes.String(), Serdes.String()); private final Properties props = StreamsTestUtils.getStreamsConfig(Serdes.String(), Serdes.String()); private final ConsumerRecordFactory<String, String> recordFactory = new ConsumerRecordFactory<>(new StringSerializer(), new StringSerializer()); private StreamsBuilder builder; private KTable<String, String> table; @Before public void setUp() { builder = new StreamsBuilder(); table = builder.table("test"); } @Test public void testKTable() { final StreamsBuilder builder = new StreamsBuilder(); final String topic1 = "topic1"; final String topic2 = "topic2"; final KTable<String, String> table1 = builder.table(topic1, consumed); final MockProcessorSupplier<String, Object> supplier = new MockProcessorSupplier<>(); table1.toStream().process(supplier); final KTable<String, Integer> table2 = table1.mapValues(new ValueMapper<String, Integer>() { @Override public Integer apply(final String value) { return new Integer(value); } }); table2.toStream().process(supplier); final KTable<String, Integer> table3 = table2.filter(new Predicate<String, Integer>() { @Override public boolean test(final String key, final Integer value) { return (value % 2) == 0; } }); table3.toStream().process(supplier); table1.toStream().to(topic2, produced); final KTable<String, String> table4 = builder.table(topic2, consumed); table4.toStream().process(supplier); try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) { driver.pipeInput(recordFactory.create(topic1, "A", "01")); driver.pipeInput(recordFactory.create(topic1, "B", "02")); driver.pipeInput(recordFactory.create(topic1, "C", "03")); driver.pipeInput(recordFactory.create(topic1, "D", "04")); } final List<MockProcessor<String, Object>> processors = supplier.capturedProcessors(4); assertEquals(Utils.mkList("A:01", "B:02", "C:03", "D:04"), processors.get(0).processed); assertEquals(Utils.mkList("A:1", "B:2", "C:3", "D:4"), processors.get(1).processed); assertEquals(Utils.mkList("A:null", "B:2", "C:null", "D:4"), processors.get(2).processed); assertEquals(Utils.mkList("A:01", "B:02", "C:03", "D:04"), processors.get(3).processed); } @Test public void testValueGetter() { final StreamsBuilder builder = new StreamsBuilder(); final String topic1 = "topic1"; final String topic2 = "topic2"; final KTableImpl<String, String, String> table1 = (KTableImpl<String, String, String>) builder.table(topic1, consumed); final KTableImpl<String, String, Integer> table2 = (KTableImpl<String, String, Integer>) table1.mapValues( new ValueMapper<String, Integer>() { @Override public Integer apply(final String value) { return new Integer(value); } }); final KTableImpl<String, Integer, Integer> table3 = (KTableImpl<String, Integer, Integer>) table2.filter( new Predicate<String, Integer>() { @Override public boolean test(final String key, final Integer value) { return (value % 2) == 0; } }); table1.toStream().to(topic2, produced); final KTableImpl<String, String, String> table4 = (KTableImpl<String, String, String>) builder.table(topic2, consumed); final Topology topology = builder.build(); final KTableValueGetterSupplier<String, String> getterSupplier1 = table1.valueGetterSupplier(); final KTableValueGetterSupplier<String, Integer> getterSupplier2 = table2.valueGetterSupplier(); final KTableValueGetterSupplier<String, Integer> getterSupplier3 = table3.valueGetterSupplier(); final KTableValueGetterSupplier<String, String> getterSupplier4 = table4.valueGetterSupplier(); final InternalTopologyBuilder topologyBuilder = TopologyWrapper.getInternalTopologyBuilder(topology); topologyBuilder.connectProcessorAndStateStores(table1.name, getterSupplier1.storeNames()); topologyBuilder.connectProcessorAndStateStores(table2.name, getterSupplier2.storeNames()); topologyBuilder.connectProcessorAndStateStores(table3.name, getterSupplier3.storeNames()); topologyBuilder.connectProcessorAndStateStores(table4.name, getterSupplier4.storeNames()); try (final TopologyTestDriverWrapper driver = new TopologyTestDriverWrapper(topology, props)) { assertEquals(2, driver.getAllStateStores().size()); final KTableValueGetter<String, String> getter1 = getterSupplier1.get(); final KTableValueGetter<String, Integer> getter2 = getterSupplier2.get(); final KTableValueGetter<String, Integer> getter3 = getterSupplier3.get(); final KTableValueGetter<String, String> getter4 = getterSupplier4.get(); getter1.init(driver.setCurrentNodeForProcessorContext(table1.name)); getter2.init(driver.setCurrentNodeForProcessorContext(table2.name)); getter3.init(driver.setCurrentNodeForProcessorContext(table3.name)); getter4.init(driver.setCurrentNodeForProcessorContext(table4.name)); driver.pipeInput(recordFactory.create(topic1, "A", "01")); driver.pipeInput(recordFactory.create(topic1, "B", "01")); driver.pipeInput(recordFactory.create(topic1, "C", "01")); assertEquals("01", getter1.get("A")); assertEquals("01", getter1.get("B")); assertEquals("01", getter1.get("C")); assertEquals(new Integer(1), getter2.get("A")); assertEquals(new Integer(1), getter2.get("B")); assertEquals(new Integer(1), getter2.get("C")); assertNull(getter3.get("A")); assertNull(getter3.get("B")); assertNull(getter3.get("C")); assertEquals("01", getter4.get("A")); assertEquals("01", getter4.get("B")); assertEquals("01", getter4.get("C")); driver.pipeInput(recordFactory.create(topic1, "A", "02")); driver.pipeInput(recordFactory.create(topic1, "B", "02")); assertEquals("02", getter1.get("A")); assertEquals("02", getter1.get("B")); assertEquals("01", getter1.get("C")); assertEquals(new Integer(2), getter2.get("A")); assertEquals(new Integer(2), getter2.get("B")); assertEquals(new Integer(1), getter2.get("C")); assertEquals(new Integer(2), getter3.get("A")); assertEquals(new Integer(2), getter3.get("B")); assertNull(getter3.get("C")); assertEquals("02", getter4.get("A")); assertEquals("02", getter4.get("B")); assertEquals("01", getter4.get("C")); driver.pipeInput(recordFactory.create(topic1, "A", "03")); assertEquals("03", getter1.get("A")); assertEquals("02", getter1.get("B")); assertEquals("01", getter1.get("C")); assertEquals(new Integer(3), getter2.get("A")); assertEquals(new Integer(2), getter2.get("B")); assertEquals(new Integer(1), getter2.get("C")); assertNull(getter3.get("A")); assertEquals(new Integer(2), getter3.get("B")); assertNull(getter3.get("C")); assertEquals("03", getter4.get("A")); assertEquals("02", getter4.get("B")); assertEquals("01", getter4.get("C")); driver.pipeInput(recordFactory.create(topic1, "A", (String) null)); assertNull(getter1.get("A")); assertEquals("02", getter1.get("B")); assertEquals("01", getter1.get("C")); assertNull(getter2.get("A")); assertEquals(new Integer(2), getter2.get("B")); assertEquals(new Integer(1), getter2.get("C")); assertNull(getter3.get("A")); assertEquals(new Integer(2), getter3.get("B")); assertNull(getter3.get("C")); assertNull(getter4.get("A")); assertEquals("02", getter4.get("B")); assertEquals("01", getter4.get("C")); } } @Test public void testStateStoreLazyEval() { final String topic1 = "topic1"; final String topic2 = "topic2"; final StreamsBuilder builder = new StreamsBuilder(); final KTableImpl<String, String, String> table1 = (KTableImpl<String, String, String>) builder.table(topic1, consumed); builder.table(topic2, consumed); final KTableImpl<String, String, Integer> table1Mapped = (KTableImpl<String, String, Integer>) table1.mapValues( new ValueMapper<String, Integer>() { @Override public Integer apply(final String value) { return new Integer(value); } }); table1Mapped.filter( new Predicate<String, Integer>() { @Override public boolean test(final String key, final Integer value) { return (value % 2) == 0; } }); try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) { assertEquals(2, driver.getAllStateStores().size()); } } @Test public void testStateStore() { final String topic1 = "topic1"; final String topic2 = "topic2"; final StreamsBuilder builder = new StreamsBuilder(); final KTableImpl<String, String, String> table1 = (KTableImpl<String, String, String>) builder.table(topic1, consumed); final KTableImpl<String, String, String> table2 = (KTableImpl<String, String, String>) builder.table(topic2, consumed); final KTableImpl<String, String, Integer> table1Mapped = (KTableImpl<String, String, Integer>) table1.mapValues( new ValueMapper<String, Integer>() { @Override public Integer apply(final String value) { return new Integer(value); } }); final KTableImpl<String, Integer, Integer> table1MappedFiltered = (KTableImpl<String, Integer, Integer>) table1Mapped.filter( new Predicate<String, Integer>() { @Override public boolean test(final String key, final Integer value) { return (value % 2) == 0; } }); table2.join(table1MappedFiltered, new ValueJoiner<String, Integer, String>() { @Override public String apply(final String v1, final Integer v2) { return v1 + v2; } }); try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) { assertEquals(2, driver.getAllStateStores().size()); } } private void assertTopologyContainsProcessor(final Topology topology, final String processorName) { for (final TopologyDescription.Subtopology subtopology: topology.describe().subtopologies()) { for (final TopologyDescription.Node node: subtopology.nodes()) { if (node.name().equals(processorName)) { return; } } } throw new AssertionError("No processor named '" + processorName + "'" + "found in the provided Topology:\n" + topology.describe()); } @Test public void shouldCreateSourceAndSinkNodesForRepartitioningTopic() throws NoSuchFieldException, IllegalAccessException { final String topic1 = "topic1"; final String storeName1 = "storeName1"; final StreamsBuilder builder = new StreamsBuilder(); final KTableImpl<String, String, String> table1 = (KTableImpl<String, String, String>) builder.table(topic1, consumed, Materialized.<String, String, KeyValueStore<Bytes, byte[]>>as(storeName1) .withKeySerde(Serdes.String()) .withValueSerde(Serdes.String()) ); table1.groupBy(MockMapper.<String, String>noOpKeyValueMapper()) .aggregate(MockInitializer.STRING_INIT, MockAggregator.TOSTRING_ADDER, MockAggregator.TOSTRING_REMOVER, Materialized.<String, String, KeyValueStore<Bytes, byte[]>>as("mock-result1")); table1.groupBy(MockMapper.<String, String>noOpKeyValueMapper()) .reduce(MockReducer.STRING_ADDER, MockReducer.STRING_REMOVER, Materialized.<String, String, KeyValueStore<Bytes, byte[]>>as("mock-result2")); final Topology topology = builder.build(); try (final TopologyTestDriverWrapper driver = new TopologyTestDriverWrapper(topology, props)) { assertEquals(3, driver.getAllStateStores().size()); assertTopologyContainsProcessor(topology, "KSTREAM-SINK-0000000003"); assertTopologyContainsProcessor(topology, "KSTREAM-SOURCE-0000000004"); assertTopologyContainsProcessor(topology, "KSTREAM-SINK-0000000007"); assertTopologyContainsProcessor(topology, "KSTREAM-SOURCE-0000000008"); final Field valSerializerField = ((SinkNode) driver.getProcessor("KSTREAM-SINK-0000000003")).getClass().getDeclaredField("valSerializer"); final Field valDeserializerField = ((SourceNode) driver.getProcessor("KSTREAM-SOURCE-0000000004")).getClass().getDeclaredField("valDeserializer"); valSerializerField.setAccessible(true); valDeserializerField.setAccessible(true); assertNotNull(((ChangedSerializer) valSerializerField.get(driver.getProcessor("KSTREAM-SINK-0000000003"))).inner()); assertNotNull(((ChangedDeserializer) valDeserializerField.get(driver.getProcessor("KSTREAM-SOURCE-0000000004"))).inner()); assertNotNull(((ChangedSerializer) valSerializerField.get(driver.getProcessor("KSTREAM-SINK-0000000007"))).inner()); assertNotNull(((ChangedDeserializer) valDeserializerField.get(driver.getProcessor("KSTREAM-SOURCE-0000000008"))).inner()); } } @Test(expected = NullPointerException.class) public void shouldNotAllowNullSelectorOnToStream() { table.toStream(null); } @Test(expected = NullPointerException.class) public void shouldNotAllowNullPredicateOnFilter() { table.filter(null); } @Test(expected = NullPointerException.class) public void shouldNotAllowNullPredicateOnFilterNot() { table.filterNot(null); } @Test(expected = NullPointerException.class) public void shouldNotAllowNullMapperOnMapValues() { table.mapValues((ValueMapper) null); } @Test(expected = NullPointerException.class) public void shouldNotAllowNullMapperOnMapValueWithKey() { table.mapValues((ValueMapperWithKey) null); } @Test(expected = NullPointerException.class) public void shouldNotAllowNullSelectorOnGroupBy() { table.groupBy(null); } @Test(expected = NullPointerException.class) public void shouldNotAllowNullOtherTableOnJoin() { table.join(null, MockValueJoiner.TOSTRING_JOINER); } @Test public void shouldAllowNullStoreInJoin() { table.join(table, MockValueJoiner.TOSTRING_JOINER); } @Test(expected = NullPointerException.class) public void shouldNotAllowNullJoinerJoin() { table.join(table, null); } @Test(expected = NullPointerException.class) public void shouldNotAllowNullOtherTableOnOuterJoin() { table.outerJoin(null, MockValueJoiner.TOSTRING_JOINER); } @Test(expected = NullPointerException.class) public void shouldNotAllowNullJoinerOnOuterJoin() { table.outerJoin(table, null); } @Test(expected = NullPointerException.class) public void shouldNotAllowNullJoinerOnLeftJoin() { table.leftJoin(table, null); } @Test(expected = NullPointerException.class) public void shouldNotAllowNullOtherTableOnLeftJoin() { table.leftJoin(null, MockValueJoiner.TOSTRING_JOINER); } @Test(expected = NullPointerException.class) public void shouldThrowNullPointerOnFilterWhenMaterializedIsNull() { table.filter(new Predicate<String, String>() { @Override public boolean test(final String key, final String value) { return false; } }, (Materialized) null); } @Test(expected = NullPointerException.class) public void shouldThrowNullPointerOnFilterNotWhenMaterializedIsNull() { table.filterNot(new Predicate<String, String>() { @Override public boolean test(final String key, final String value) { return false; } }, (Materialized) null); } @Test(expected = NullPointerException.class) public void shouldThrowNullPointerOnJoinWhenMaterializedIsNull() { table.join(table, MockValueJoiner.TOSTRING_JOINER, (Materialized) null); } @Test(expected = NullPointerException.class) public void shouldThrowNullPointerOnLeftJoinWhenMaterializedIsNull() { table.leftJoin(table, MockValueJoiner.TOSTRING_JOINER, (Materialized) null); } @Test(expected = NullPointerException.class) public void shouldThrowNullPointerOnOuterJoinWhenMaterializedIsNull() { table.outerJoin(table, MockValueJoiner.TOSTRING_JOINER, (Materialized) null); } @Test(expected = NullPointerException.class) public void shouldThrowNullPointerOnTransformValuesWithKeyWhenTransformerSupplierIsNull() { table.transformValues((ValueTransformerWithKeySupplier) null); } @SuppressWarnings("unchecked") @Test(expected = NullPointerException.class) public void shouldThrowNullPointerOnTransformValuesWithKeyWhenMaterializedIsNull() { final ValueTransformerWithKeySupplier<String, String, ?> valueTransformerSupplier = mock(ValueTransformerWithKeySupplier.class); table.transformValues(valueTransformerSupplier, (Materialized) null); } @SuppressWarnings("unchecked") @Test(expected = NullPointerException.class) public void shouldThrowNullPointerOnTransformValuesWithKeyWhenStoreNamesNull() { final ValueTransformerWithKeySupplier<String, String, ?> valueTransformerSupplier = mock(ValueTransformerWithKeySupplier.class); table.transformValues(valueTransformerSupplier, (String[]) null); } }
/* * Copyright 2005 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools; import java.io.Externalizable; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.Map.Entry; import org.drools.common.AgendaGroupFactory; import org.drools.common.ArrayAgendaGroupFactory; import org.drools.common.PriorityQueueAgendaGroupFactory; import org.drools.process.core.Context; import org.drools.process.core.ParameterDefinition; import org.drools.process.core.Process; import org.drools.process.core.WorkDefinition; import org.drools.process.core.datatype.DataType; import org.drools.process.core.impl.ParameterDefinitionImpl; import org.drools.process.core.impl.WorkDefinitionExtensionImpl; import org.drools.process.instance.ProcessInstanceFactory; import org.drools.process.instance.ProcessInstanceFactoryRegistry; import org.drools.process.instance.ProcessInstanceManager; import org.drools.process.instance.impl.ContextInstanceFactory; import org.drools.process.instance.impl.ContextInstanceFactoryRegistry; import org.drools.spi.ConflictResolver; import org.drools.spi.ConsequenceExceptionHandler; import org.drools.util.ChainedProperties; import org.drools.util.ConfFileUtils; import org.drools.workflow.core.Node; import org.drools.workflow.instance.impl.NodeInstanceFactory; import org.drools.workflow.instance.impl.NodeInstanceFactoryRegistry; import org.mvel.MVEL; /** * RuleBaseConfiguration * * A class to store RuleBase related configuration. It must be used at rule base instantiation time * or not used at all. * This class will automatically load default values from system properties, so if you want to set * a default configuration value for all your new rule bases, you can simply set the property as * a System property. * * After RuleBase is created, it makes the configuration immutable and there is no way to make it * mutable again. This is to avoid inconsistent behavior inside rulebase. * * NOTE: This API is under review and may change in the future. */ /** * drools.maintainTms = <true|false> * drools.sequential = <true|false> * drools.sequential.agenda = <sequential|dynamic> * drools.removeIdentities = <true|false> * drools.shareAlphaNodes = <true|false> * drools.shareBetaNodes = <true|false> * drools.alphaMemory <true/false> * drools.alphaNodeHashingThreshold = <1...n> * drools.compositeKeyDepth =<1..3> * drools.indexLeftBetaMemory = <true/false> * drools.indexRightBetaMemory = <true/false> * drools.assertBehaviour = <identity|equality> * drools.logicalOverride = <discard|preserve> * drools.executorService = <qualified class name> * drools.conflictResolver = <qualified class name> * drools.consequenceExceptionHandler = <qualified class name> * drools.ruleBaseUpdateHandler = <qualified class name> * drools.sessionClock = <qualified class name> * drools.useStaticObjenesis = <false|true> * */ public class RuleBaseConfiguration implements Externalizable { private static final long serialVersionUID = 400L; private ChainedProperties chainedProperties; private boolean immutable; private boolean sequential; private SequentialAgenda sequentialAgenda; private boolean maintainTms; private boolean removeIdentities; private boolean shareAlphaNodes; private boolean shareBetaNodes; private int alphaNodeHashingThreshold; private int compositeKeyDepth; private boolean indexLeftBetaMemory; private boolean indexRightBetaMemory; private AssertBehaviour assertBehaviour; private LogicalOverride logicalOverride; private String executorService; private ConsequenceExceptionHandler consequenceExceptionHandler; private String ruleBaseUpdateHandler; // if "true", rulebase builder will try to split // the rulebase into multiple partitions that can be evaluated // in parallel by using multiple internal threads private boolean partitionsEnabled; private ConflictResolver conflictResolver; private static final String STAR = "*"; private ContextInstanceFactoryRegistry processContextInstanceFactoryRegistry; private Map<String, WorkDefinition> workDefinitions; private boolean advancedProcessRuleIntegration; private ProcessInstanceFactoryRegistry processInstanceFactoryRegistry; private NodeInstanceFactoryRegistry processNodeInstanceFactoryRegistry; private ProcessInstanceManager processInstanceManager; private transient ClassLoader classLoader; public void writeExternal(ObjectOutput out) throws IOException { out.writeObject( chainedProperties ); out.writeBoolean( immutable ); out.writeBoolean( sequential ); out.writeObject( sequentialAgenda ); out.writeBoolean( maintainTms ); out.writeBoolean( removeIdentities ); out.writeBoolean( shareAlphaNodes ); out.writeBoolean( shareBetaNodes ); out.writeInt( alphaNodeHashingThreshold ); out.writeInt( compositeKeyDepth ); out.writeBoolean( indexLeftBetaMemory ); out.writeBoolean( indexRightBetaMemory ); out.writeObject( assertBehaviour ); out.writeObject( logicalOverride ); out.writeObject( executorService ); out.writeObject( consequenceExceptionHandler ); out.writeObject( ruleBaseUpdateHandler ); out.writeObject( conflictResolver ); out.writeObject( processNodeInstanceFactoryRegistry ); out.writeBoolean( advancedProcessRuleIntegration ); out.writeBoolean( partitionsEnabled ); } public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { chainedProperties = (ChainedProperties) in.readObject(); immutable = in.readBoolean(); sequential = in.readBoolean(); sequentialAgenda = (SequentialAgenda) in.readObject(); maintainTms = in.readBoolean(); removeIdentities = in.readBoolean(); shareAlphaNodes = in.readBoolean(); shareBetaNodes = in.readBoolean(); alphaNodeHashingThreshold = in.readInt(); compositeKeyDepth = in.readInt(); indexLeftBetaMemory = in.readBoolean(); indexRightBetaMemory = in.readBoolean(); assertBehaviour = (AssertBehaviour) in.readObject(); logicalOverride = (LogicalOverride) in.readObject(); executorService = (String) in.readObject(); consequenceExceptionHandler = (ConsequenceExceptionHandler) in.readObject(); ruleBaseUpdateHandler = (String) in.readObject(); conflictResolver = (ConflictResolver) in.readObject(); processNodeInstanceFactoryRegistry = (NodeInstanceFactoryRegistry) in.readObject(); advancedProcessRuleIntegration = in.readBoolean(); partitionsEnabled = in.readBoolean(); } /** * Creates a new rulebase configuration using the provided properties * as configuration options. Also, if a Thread.currentThread().getContextClassLoader() * returns a non-null class loader, it will be used as the parent classloader * for this rulebase class loaders, otherwise, the RuleBaseConfiguration.class.getClassLoader() * class loader will be used. * * @param properties */ public RuleBaseConfiguration(Properties properties) { init( null, properties ); } /** * Creates a new rulebase with a default parent class loader set according * to the following algorithm: * * If a Thread.currentThread().getContextClassLoader() returns a non-null class loader, * it will be used as the parent class loader for this rulebase class loaders, otherwise, * the RuleBaseConfiguration.class.getClassLoader() class loader will be used. * * @param properties */ public RuleBaseConfiguration() { init( null, null ); } /** * A constructor that sets the parent classloader to be used * while dealing with this rule base * * @param classLoader */ public RuleBaseConfiguration(ClassLoader classLoader) { init( classLoader, null ); } /** * A constructor that sets the classloader to be used as the parent classloader * of this rule base classloaders, and the properties to be used * as base configuration options * * @param classLoder * @param properties */ public RuleBaseConfiguration(ClassLoader classLoader, Properties properties) { init( classLoader, properties ); } private void init(ClassLoader classLoader, Properties properties) { this.immutable = false; if ( classLoader != null ) { this.classLoader = classLoader; } else if ( Thread.currentThread().getContextClassLoader() != null ) { this.classLoader = Thread.currentThread().getContextClassLoader(); } else { this.classLoader = this.getClass().getClassLoader(); } this.chainedProperties = new ChainedProperties( "rulebase.conf" ); if ( properties != null ) { this.chainedProperties.addProperties( properties ); } setSequentialAgenda( SequentialAgenda.determineSequentialAgenda( this.chainedProperties.getProperty( "drools.sequential.agenda", "sequential" ) ) ); setSequential( Boolean.valueOf( this.chainedProperties.getProperty( "drools.sequential", "false" ) ).booleanValue() ); setMaintainTms( Boolean.valueOf( this.chainedProperties.getProperty( "drools.maintainTms", "true" ) ).booleanValue() ); setRemoveIdentities( Boolean.valueOf( this.chainedProperties.getProperty( "drools.removeIdentities", "false" ) ).booleanValue() ); setShareAlphaNodes( Boolean.valueOf( this.chainedProperties.getProperty( "drools.shareAlphaNodes", "true" ) ).booleanValue() ); setShareBetaNodes( Boolean.valueOf( this.chainedProperties.getProperty( "drools.shareBetaNodes", "true" ) ).booleanValue() ); setAlphaNodeHashingThreshold( Integer.parseInt( this.chainedProperties.getProperty( "drools.alphaNodeHashingThreshold", "3" ) ) ); setCompositeKeyDepth( Integer.parseInt( this.chainedProperties.getProperty( "drools.compositeKeyDepth", "3" ) ) ); setIndexLeftBetaMemory( Boolean.valueOf( this.chainedProperties.getProperty( "drools.indexLeftBetaMemory", "true" ) ).booleanValue() ); setIndexRightBetaMemory( Boolean.valueOf( this.chainedProperties.getProperty( "drools.indexRightBetaMemory", "true" ) ).booleanValue() ); setAssertBehaviour( AssertBehaviour.determineAssertBehaviour( this.chainedProperties.getProperty( "drools.assertBehaviour", "identity" ) ) ); setLogicalOverride( LogicalOverride.determineLogicalOverride( this.chainedProperties.getProperty( "drools.logicalOverride", "discard" ) ) ); setExecutorService( this.chainedProperties.getProperty( "drools.executorService", "org.drools.concurrent.DefaultExecutorService" ) ); setConsequenceExceptionHandler( RuleBaseConfiguration.determineConsequenceExceptionHandler( this.chainedProperties.getProperty( "drools.consequenceExceptionHandler", "org.drools.base.DefaultConsequenceExceptionHandler" ) ) ); setRuleBaseUpdateHandler( this.chainedProperties.getProperty( "drools.ruleBaseUpdateHandler", "org.drools.base.FireAllRulesRuleBaseUpdateListener" ) ); setConflictResolver( RuleBaseConfiguration.determineConflictResolver( this.chainedProperties.getProperty( "drools.conflictResolver", "org.drools.conflict.DepthConflictResolver" ) ) ); setAdvancedProcessRuleIntegration( Boolean.valueOf( this.chainedProperties.getProperty( "drools.advancedProcessRuleIntegration", "false" ) ).booleanValue() ); setPartitionsEnabled( Boolean.valueOf( this.chainedProperties.getProperty( "drools.enablePartitioning", "false" ) ).booleanValue() ); } /** * Makes the configuration object immutable. Once it becomes immutable, * there is no way to make it mutable again. * This is done to keep consistency. */ public void makeImmutable() { this.immutable = true; } /** * Returns true if this configuration object is immutable or false otherwise. * @return */ public boolean isImmutable() { return this.immutable; } private void checkCanChange() { if ( this.immutable ) { throw new UnsupportedOperationException( "Can't set a property after configuration becomes immutable" ); } } public void setSequential(boolean sequential) { this.sequential = sequential; } public boolean isSequential() { return this.sequential; } public boolean isMaintainTms() { return this.maintainTms; } public void setMaintainTms(final boolean maintainTms) { checkCanChange(); // throws an exception if a change isn't possible; this.maintainTms = maintainTms; } public boolean isRemoveIdentities() { return this.removeIdentities; } public void setRemoveIdentities(final boolean removeIdentities) { checkCanChange(); // throws an exception if a change isn't possible; this.removeIdentities = removeIdentities; } public boolean isShareAlphaNodes() { return this.shareAlphaNodes; } public void setShareAlphaNodes(final boolean shareAlphaNodes) { checkCanChange(); // throws an exception if a change isn't possible; this.shareAlphaNodes = shareAlphaNodes; } public boolean isShareBetaNodes() { return this.shareBetaNodes; } public void setShareBetaNodes(final boolean shareBetaNodes) { checkCanChange(); // throws an exception if a change isn't possible; this.shareBetaNodes = shareBetaNodes; } public int getAlphaNodeHashingThreshold() { return this.alphaNodeHashingThreshold; } public void setAlphaNodeHashingThreshold(final int alphaNodeHashingThreshold) { checkCanChange(); // throws an exception if a change isn't possible; this.alphaNodeHashingThreshold = alphaNodeHashingThreshold; } public AssertBehaviour getAssertBehaviour() { return this.assertBehaviour; } public void setAssertBehaviour(final AssertBehaviour assertBehaviour) { checkCanChange(); // throws an exception if a change isn't possible; this.assertBehaviour = assertBehaviour; } public int getCompositeKeyDepth() { return this.compositeKeyDepth; } public void setCompositeKeyDepth(final int compositeKeyDepth) { if ( !this.immutable ) { if ( compositeKeyDepth > 3 ) { throw new UnsupportedOperationException( "compositeKeyDepth cannot be greater than 3" ); } this.compositeKeyDepth = compositeKeyDepth; } else { throw new UnsupportedOperationException( "Can't set a property after configuration becomes immutable" ); } } public boolean isIndexLeftBetaMemory() { return this.indexLeftBetaMemory; } public void setIndexLeftBetaMemory(final boolean indexLeftBetaMemory) { checkCanChange(); // throws an exception if a change isn't possible; this.indexLeftBetaMemory = indexLeftBetaMemory; } public boolean isIndexRightBetaMemory() { return this.indexRightBetaMemory; } public void setIndexRightBetaMemory(final boolean indexRightBetaMemory) { checkCanChange(); // throws an exception if a change isn't possible; this.indexRightBetaMemory = indexRightBetaMemory; } public LogicalOverride getLogicalOverride() { return this.logicalOverride; } public void setLogicalOverride(final LogicalOverride logicalOverride) { checkCanChange(); // throws an exception if a change isn't possible; this.logicalOverride = logicalOverride; } public String getExecutorService() { return executorService; } public void setExecutorService(String executorService) { checkCanChange(); // throws an exception if a change isn't possible; this.executorService = executorService; } public ConsequenceExceptionHandler getConsequenceExceptionHandler() { return consequenceExceptionHandler; } public void setConsequenceExceptionHandler(ConsequenceExceptionHandler consequenceExceptionHandler) { checkCanChange(); // throws an exception if a change isn't possible; this.consequenceExceptionHandler = consequenceExceptionHandler; } public String getRuleBaseUpdateHandler() { return ruleBaseUpdateHandler; } public void setRuleBaseUpdateHandler(String ruleBaseUpdateHandler) { checkCanChange(); // throws an exception if a change isn't possible; this.ruleBaseUpdateHandler = ruleBaseUpdateHandler; } public AgendaGroupFactory getAgendaGroupFactory() { if ( isSequential() ) { if ( this.sequentialAgenda == SequentialAgenda.SEQUENTIAL ) { return ArrayAgendaGroupFactory.getInstance(); } else { return PriorityQueueAgendaGroupFactory.getInstance(); } } else { return PriorityQueueAgendaGroupFactory.getInstance(); } } public SequentialAgenda getSequentialAgenda() { return this.sequentialAgenda; } public void setSequentialAgenda(final SequentialAgenda sequentialAgenda) { checkCanChange(); // throws an exception if a change isn't possible; this.sequentialAgenda = sequentialAgenda; } public NodeInstanceFactoryRegistry getProcessNodeInstanceFactoryRegistry() { if ( this.processNodeInstanceFactoryRegistry == null ) { initProcessNodeInstanceFactoryRegistry(); } return this.processNodeInstanceFactoryRegistry; } /** * Defines if the RuleBase should try to split the rules into * multiple independent partitions that can work in parallel * using multiple threads ("true"), of if the rulebase should * work in classic single partition mode ("false"). * * @param enablePartitioning true for multi-partition or * false for single-partition. Default is false. */ public void setPartitionsEnabled(boolean enablePartitioning) { checkCanChange(); this.partitionsEnabled = enablePartitioning; } /** * Returns true if the partitioning of the rulebase is enabled * and false otherwise. Default is false. * * @return */ public boolean isPartitionsEnabled() { return this.partitionsEnabled; } private void initProcessNodeInstanceFactoryRegistry() { this.processNodeInstanceFactoryRegistry = new NodeInstanceFactoryRegistry(); // split on each space String locations[] = this.chainedProperties.getProperty( "processNodeInstanceFactoryRegistry", "" ).split( "\\s" ); int i = 0; // load each SemanticModule for ( String factoryLocation : locations ) { // trim leading/trailing spaces and quotes factoryLocation = factoryLocation.trim(); if ( factoryLocation.startsWith( "\"" ) ) { factoryLocation = factoryLocation.substring( 1 ); } if ( factoryLocation.endsWith( "\"" ) ) { factoryLocation = factoryLocation.substring( 0, factoryLocation.length() - 1 ); } if ( !factoryLocation.equals( "" ) ) { loadProcessNodeInstanceFactoryRegistry( factoryLocation ); } } } private void loadProcessNodeInstanceFactoryRegistry(String factoryLocation) { String content = ConfFileUtils.URLContentsToString( ConfFileUtils.getURL( factoryLocation, null, RuleBaseConfiguration.class ) ); Map<Class< ? extends Node>, NodeInstanceFactory> map = (Map<Class< ? extends Node>, NodeInstanceFactory>) MVEL.eval( content, new HashMap() ); if ( map != null ) { for ( Entry<Class< ? extends Node>, NodeInstanceFactory> entry : map.entrySet() ) { this.processNodeInstanceFactoryRegistry.register( entry.getKey(), entry.getValue() ); } } } public ProcessInstanceFactoryRegistry getProcessInstanceFactoryRegistry() { if ( this.processInstanceFactoryRegistry == null ) { initProcessInstanceFactoryRegistry(); } return this.processInstanceFactoryRegistry; } private void initProcessInstanceFactoryRegistry() { this.processInstanceFactoryRegistry = new ProcessInstanceFactoryRegistry(); // split on each space String locations[] = this.chainedProperties.getProperty( "processInstanceFactoryRegistry", "" ).split( "\\s" ); int i = 0; // load each SemanticModule for ( String factoryLocation : locations ) { // trim leading/trailing spaces and quotes factoryLocation = factoryLocation.trim(); if ( factoryLocation.startsWith( "\"" ) ) { factoryLocation = factoryLocation.substring( 1 ); } if ( factoryLocation.endsWith( "\"" ) ) { factoryLocation = factoryLocation.substring( 0, factoryLocation.length() - 1 ); } if ( !factoryLocation.equals( "" ) ) { loadProcessInstanceFactoryRegistry( factoryLocation ); } } } private void loadProcessInstanceFactoryRegistry(String factoryLocation) { String content = ConfFileUtils.URLContentsToString( ConfFileUtils.getURL( factoryLocation, null, RuleBaseConfiguration.class ) ); Map<Class< ? extends Process>, ProcessInstanceFactory> map = (Map<Class< ? extends Process>, ProcessInstanceFactory>) MVEL.eval( content, new HashMap() ); if ( map != null ) { for ( Entry<Class< ? extends Process>, ProcessInstanceFactory> entry : map.entrySet() ) { this.processInstanceFactoryRegistry.register( entry.getKey(), entry.getValue() ); } } } public Map<String, WorkDefinition> getProcessWorkDefinitions() { if ( this.workDefinitions == null ) { initWorkDefinitions(); } return this.workDefinitions; } private void initWorkDefinitions() { this.workDefinitions = new HashMap<String, WorkDefinition>(); // split on each space String locations[] = this.chainedProperties.getProperty( "drools.workDefinitions", "" ).split( "\\s" ); // load each SemanticModule for ( String factoryLocation : locations ) { // trim leading/trailing spaces and quotes factoryLocation = factoryLocation.trim(); if ( factoryLocation.startsWith( "\"" ) ) { factoryLocation = factoryLocation.substring( 1 ); } if ( factoryLocation.endsWith( "\"" ) ) { factoryLocation = factoryLocation.substring( 0, factoryLocation.length() - 1 ); } if ( !factoryLocation.equals( "" ) ) { loadWorkItems( factoryLocation ); } } } private void loadWorkItems(String location) { String content = ConfFileUtils.URLContentsToString( ConfFileUtils.getURL( location, null, RuleBaseConfiguration.class ) ); List<Map<String, Object>> workDefinitionsMap = (List<Map<String, Object>>) MVEL.eval( content, new HashMap() ); for ( Map<String, Object> workDefinitionMap : workDefinitionsMap ) { WorkDefinitionExtensionImpl workDefinition = new WorkDefinitionExtensionImpl(); workDefinition.setName( (String) workDefinitionMap.get( "name" ) ); workDefinition.setDisplayName( (String) workDefinitionMap.get( "displayName" ) ); workDefinition.setIcon( (String) workDefinitionMap.get( "icon" ) ); workDefinition.setCustomEditor( (String) workDefinitionMap.get( "customEditor" ) ); Set<ParameterDefinition> parameters = new HashSet<ParameterDefinition>(); Map<String, DataType> parameterMap = (Map<String, DataType>) workDefinitionMap.get( "parameters" ); if ( parameterMap != null ) { for ( Map.Entry<String, DataType> entry : parameterMap.entrySet() ) { parameters.add( new ParameterDefinitionImpl( entry.getKey(), entry.getValue() ) ); } } workDefinition.setParameters( parameters ); Set<ParameterDefinition> results = new HashSet<ParameterDefinition>(); Map<String, DataType> resultMap = (Map<String, DataType>) workDefinitionMap.get( "results" ); if ( resultMap != null ) { for ( Map.Entry<String, DataType> entry : resultMap.entrySet() ) { results.add( new ParameterDefinitionImpl( entry.getKey(), entry.getValue() ) ); } } workDefinition.setResults( results ); this.workDefinitions.put( workDefinition.getName(), workDefinition ); } } public ContextInstanceFactoryRegistry getProcessContextInstanceFactoryRegistry() { if ( this.processContextInstanceFactoryRegistry == null ) { initProcessContextInstanceFactoryRegistry(); } return this.processContextInstanceFactoryRegistry; } private void initProcessContextInstanceFactoryRegistry() { this.processContextInstanceFactoryRegistry = new ContextInstanceFactoryRegistry(); // split on each space String locations[] = this.chainedProperties.getProperty( "processContextInstanceFactoryRegistry", "" ).split( "\\s" ); int i = 0; // load each SemanticModule for ( String factoryLocation : locations ) { // trim leading/trailing spaces and quotes factoryLocation = factoryLocation.trim(); if ( factoryLocation.startsWith( "\"" ) ) { factoryLocation = factoryLocation.substring( 1 ); } if ( factoryLocation.endsWith( "\"" ) ) { factoryLocation = factoryLocation.substring( 0, factoryLocation.length() - 1 ); } if ( !factoryLocation.equals( "" ) ) { loadProcessContextInstanceFactoryRegistry( factoryLocation ); } } } private void loadProcessContextInstanceFactoryRegistry(String factoryLocation) { String content = ConfFileUtils.URLContentsToString( ConfFileUtils.getURL( factoryLocation, null, RuleBaseConfiguration.class ) ); Map<Class< ? extends Context>, ContextInstanceFactory> map = (Map<Class< ? extends Context>, ContextInstanceFactory>) MVEL.eval( content, new HashMap() ); if ( map != null ) { for ( Entry<Class< ? extends Context>, ContextInstanceFactory> entry : map.entrySet() ) { this.processContextInstanceFactoryRegistry.register( entry.getKey(), entry.getValue() ); } } } public ProcessInstanceManager getProcessInstanceManager() { if ( this.processInstanceManager == null ) { initProcessInstanceManager(); } return this.processInstanceManager; } private void initProcessInstanceManager() { String className = this.chainedProperties.getProperty( "processInstanceManager", "org.drools.process.instance.impl.DefaultProcessInstanceManager" ); Class clazz = null; try { clazz = Thread.currentThread().getContextClassLoader().loadClass( className ); } catch ( ClassNotFoundException e ) { } if ( clazz == null ) { try { clazz = RuleBaseConfiguration.class.getClassLoader().loadClass( className ); } catch ( ClassNotFoundException e ) { } } if ( clazz != null ) { try { this.processInstanceManager = (ProcessInstanceManager) clazz.newInstance(); } catch ( Exception e ) { throw new IllegalArgumentException( "Unable to instantiate process instance manager '" + className + "'" ); } } else { throw new IllegalArgumentException( "Process instance manager '" + className + "' not found" ); } } public boolean isAdvancedProcessRuleIntegration() { return advancedProcessRuleIntegration; } public void setAdvancedProcessRuleIntegration(boolean advancedProcessRuleIntegration) { this.advancedProcessRuleIntegration = advancedProcessRuleIntegration; } private boolean determineShadowProxy(String userValue) { if ( this.isSequential() ) { // sequential never needs shadowing, so always override return false; } if ( userValue != null ) { return Boolean.valueOf( userValue ).booleanValue(); } else { return true; } } private static ConflictResolver determineConflictResolver(String className) { Class clazz = null; try { clazz = Thread.currentThread().getContextClassLoader().loadClass( className ); } catch ( ClassNotFoundException e ) { } if ( clazz == null ) { try { clazz = RuleBaseConfiguration.class.getClassLoader().loadClass( className ); } catch ( ClassNotFoundException e ) { } } if ( clazz != null ) { try { return (ConflictResolver) clazz.getMethod( "getInstance", null ).invoke( null, null ); } catch ( Exception e ) { throw new IllegalArgumentException( "Unable to Conflict Resolver '" + className + "'" ); } } else { throw new IllegalArgumentException( "conflict Resolver '" + className + "' not found" ); } } public void setConflictResolver(ConflictResolver conflictResolver) { checkCanChange(); // throws an exception if a change isn't possible; this.conflictResolver = conflictResolver; } public ConflictResolver getConflictResolver() { return this.conflictResolver; } public ClassLoader getClassLoader() { return classLoader; } public void setClassLoader(ClassLoader classLoader) { this.classLoader = classLoader; } private static ConsequenceExceptionHandler determineConsequenceExceptionHandler(String className) { return (ConsequenceExceptionHandler) instantiateClass( "ConsequenceExceptionHandler", className ); } private static Object instantiateClass(String type, String className) { Class clazz = null; try { clazz = Thread.currentThread().getContextClassLoader().loadClass( className ); } catch ( ClassNotFoundException e ) { } if ( clazz == null ) { try { clazz = RuleBaseConfiguration.class.getClassLoader().loadClass( className ); } catch ( ClassNotFoundException e ) { } } if ( clazz != null ) { try { return clazz.newInstance(); } catch ( Exception e ) { throw new IllegalArgumentException( "Unable to instantiate " + type + " '" + className + "'" ); } } else { throw new IllegalArgumentException( type + " '" + className + "' not found" ); } } public static class AssertBehaviour implements Externalizable { private static final long serialVersionUID = 400L; public static final AssertBehaviour IDENTITY = new AssertBehaviour( 0 ); public static final AssertBehaviour EQUALITY = new AssertBehaviour( 1 ); private int value; public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { value = in.readInt(); } public void writeExternal(ObjectOutput out) throws IOException { out.writeInt( value ); } public AssertBehaviour() { } private AssertBehaviour(final int value) { this.value = value; } public boolean equals(Object obj) { if ( obj == this ) return true; else if ( obj instanceof AssertBehaviour ) { AssertBehaviour that = (AssertBehaviour) obj; return value == that.value; } return false; } public static AssertBehaviour determineAssertBehaviour(final String value) { if ( "IDENTITY".equalsIgnoreCase( value ) ) { return IDENTITY; } else if ( "EQUALITY".equalsIgnoreCase( value ) ) { return EQUALITY; } else { throw new IllegalArgumentException( "Illegal enum value '" + value + "' for AssertBehaviour" ); } } private Object readResolve() throws java.io.ObjectStreamException { switch ( this.value ) { case 0 : return IDENTITY; case 1 : return EQUALITY; default : throw new IllegalArgumentException( "Illegal enum value '" + this.value + "' for AssertBehaviour" ); } } public String toString() { return "AssertBehaviour : " + ((this.value == 0) ? "identity" : "equality"); } } public static class LogicalOverride implements Externalizable { private static final long serialVersionUID = 400L; public static final LogicalOverride PRESERVE = new LogicalOverride( 0 ); public static final LogicalOverride DISCARD = new LogicalOverride( 1 ); private int value; public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { value = in.readInt(); } public void writeExternal(ObjectOutput out) throws IOException { out.writeInt( value ); } public LogicalOverride() { } private LogicalOverride(final int value) { this.value = value; } public static LogicalOverride determineLogicalOverride(final String value) { if ( "PRESERVE".equalsIgnoreCase( value ) ) { return PRESERVE; } else if ( "DISCARD".equalsIgnoreCase( value ) ) { return DISCARD; } else { throw new IllegalArgumentException( "Illegal enum value '" + value + "' for LogicalOverride" ); } } private Object readResolve() throws java.io.ObjectStreamException { switch ( this.value ) { case 0 : return PRESERVE; case 1 : return DISCARD; default : throw new IllegalArgumentException( "Illegal enum value '" + this.value + "' for LogicalOverride" ); } } public boolean equals(Object obj) { if ( obj == this ) { return true; } else if ( obj instanceof LogicalOverride ) { return value == ((LogicalOverride) obj).value; } return false; } public String toString() { return "LogicalOverride : " + ((this.value == 0) ? "preserve" : "discard"); } } public static class SequentialAgenda implements Externalizable { private static final long serialVersionUID = 400L; public static final SequentialAgenda SEQUENTIAL = new SequentialAgenda( 0 ); public static final SequentialAgenda DYNAMIC = new SequentialAgenda( 1 ); private int value; public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { value = in.readInt(); } public void writeExternal(ObjectOutput out) throws IOException { out.writeInt( value ); } public SequentialAgenda() { } private SequentialAgenda(final int value) { this.value = value; } public static SequentialAgenda determineSequentialAgenda(final String value) { if ( "sequential".equalsIgnoreCase( value ) ) { return SEQUENTIAL; } else if ( "dynamic".equalsIgnoreCase( value ) ) { return DYNAMIC; } else { throw new IllegalArgumentException( "Illegal enum value '" + value + "' for SequentialAgenda" ); } } private Object readResolve() throws java.io.ObjectStreamException { switch ( this.value ) { case 0 : return SEQUENTIAL; case 1 : return DYNAMIC; default : throw new IllegalArgumentException( "Illegal enum value '" + this.value + "' for SequentialAgenda" ); } } public String toString() { return "SequentialAgenda : " + ((this.value == 0) ? "sequential" : "dynamic"); } } }
/* * Licensed to Metamarkets Group Inc. (Metamarkets) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Metamarkets licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.druid.tests.indexer; import com.google.common.base.Throwables; import com.google.inject.Inject; import com.metamx.common.ISE; import com.metamx.common.logger.Logger; import io.druid.testing.IntegrationTestingConfig; import io.druid.testing.guice.DruidTestModuleFactory; import io.druid.testing.utils.RetryUtil; import io.druid.testing.utils.TestQueryHelper; import kafka.admin.AdminUtils; import kafka.common.TopicExistsException; import kafka.javaapi.producer.Producer; import kafka.producer.KeyedMessage; import kafka.producer.ProducerConfig; import kafka.utils.ZKStringSerializer$; import org.I0Itec.zkclient.ZkClient; import java.util.concurrent.TimeUnit; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; import org.testng.annotations.AfterClass; import org.testng.annotations.Guice; import org.testng.annotations.Test; import org.apache.commons.io.IOUtils; import java.io.InputStream; import java.io.IOException; import java.util.Properties; import java.util.concurrent.Callable; /* * This is a test for the kafka firehose. */ @Guice(moduleFactory = DruidTestModuleFactory.class) public class ITKafkaTest extends AbstractIndexerTest { private static final Logger LOG = new Logger(ITKafkaTest.class); private static final int DELAY_BETWEEN_EVENTS_SECS = 5; private static final String INDEXER_FILE = "/indexer/kafka_index_task.json"; private static final String QUERIES_FILE = "/indexer/kafka_index_queries.json"; private static final String DATASOURCE = "kafka_test"; private static final String TOPIC_NAME = "kafkaTopic"; private static final int MINUTES_TO_SEND = 2; // We'll fill in the current time and numbers for added, deleted and changed // before sending the event. final String event_template = "{\"timestamp\": \"%s\"," + "\"page\": \"Gypsy Danger\"," + "\"language\" : \"en\"," + "\"user\" : \"nuclear\"," + "\"unpatrolled\" : \"true\"," + "\"newPage\" : \"true\"," + "\"robot\": \"false\"," + "\"anonymous\": \"false\"," + "\"namespace\":\"article\"," + "\"continent\":\"North America\"," + "\"country\":\"United States\"," + "\"region\":\"Bay Area\"," + "\"city\":\"San Francisco\"," + "\"added\":%d," + "\"deleted\":%d," + "\"delta\":%d}"; private String taskID; private ZkClient zkClient; private Boolean segmentsExist; // to tell if we should remove segments during teardown // format for the querying interval private final DateTimeFormatter INTERVAL_FMT = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:'00Z'"); // format for the expected timestamp in a query response private final DateTimeFormatter TIMESTAMP_FMT = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss'.000Z'"); private DateTime dtFirst; // timestamp of 1st event private DateTime dtLast; // timestamp of last event @Inject private TestQueryHelper queryHelper; @Inject private IntegrationTestingConfig config; @Test public void testKafka() { LOG.info("Starting test: ITKafkaTest"); // create topic try { int sessionTimeoutMs = 10000; int connectionTimeoutMs = 10000; String zkHosts = config.getZookeeperHosts(); zkClient = new ZkClient( zkHosts, sessionTimeoutMs, connectionTimeoutMs, ZKStringSerializer$.MODULE$ ); int numPartitions = 1; int replicationFactor = 1; Properties topicConfig = new Properties(); AdminUtils.createTopic(zkClient, TOPIC_NAME, numPartitions, replicationFactor, topicConfig); } catch (TopicExistsException e) { // it's ok if the topic already exists } catch (Exception e) { throw new ISE(e, "could not create kafka topic"); } String indexerSpec = ""; // replace temp strings in indexer file try { LOG.info("indexerFile name: [%s]", INDEXER_FILE); indexerSpec = getTaskAsString(INDEXER_FILE); indexerSpec = indexerSpec.replaceAll("%%TOPIC%%", TOPIC_NAME); indexerSpec = indexerSpec.replaceAll("%%ZOOKEEPER_SERVER%%", config.getZookeeperHosts()); indexerSpec = indexerSpec.replaceAll("%%GROUP_ID%%", Long.toString(System.currentTimeMillis())); indexerSpec = indexerSpec.replaceAll( "%%SHUTOFFTIME%%", new DateTime( System.currentTimeMillis() + TimeUnit.MINUTES.toMillis( 2 * MINUTES_TO_SEND ) ).toString() ); LOG.info("indexerFile: [%s]\n", indexerSpec); } catch (Exception e) { // log here so the message will appear in the console output LOG.error("could not read indexer file [%s]", INDEXER_FILE); throw new ISE(e, "could not read indexer file [%s]", INDEXER_FILE); } // start indexing task taskID = indexer.submitTask(indexerSpec); LOG.info("-------------SUBMITTED TASK"); // set up kafka producer Properties properties = new Properties(); properties.put("metadata.broker.list", config.getKafkaHost()); LOG.info("kafka host: [%s]", config.getKafkaHost()); properties.put("serializer.class", "kafka.serializer.StringEncoder"); properties.put("request.required.acks", "1"); properties.put("producer.type", "async"); ProducerConfig producerConfig = new ProducerConfig(properties); Producer<String, String> producer = new Producer<String, String>(producerConfig); DateTimeZone zone = DateTimeZone.forID("UTC"); // format for putting into events DateTimeFormatter event_fmt = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss'Z'"); DateTime dt = new DateTime(zone); // timestamp to put on events dtFirst = dt; // timestamp of 1st event dtLast = dt; // timestamp of last event // stop sending events when time passes this DateTime dtStop = dtFirst.plusMinutes(MINUTES_TO_SEND).plusSeconds(30); // these are used to compute the expected aggregations int added = 0; int num_events = 0; // send data to kafka while (dt.compareTo(dtStop) < 0) { // as long as we're within the time span LOG.info("sending event at [%s]", event_fmt.print(dt)); num_events++; added += num_events; // construct the event to send String event = String.format( event_template, event_fmt.print(dt), num_events, 0, num_events ); LOG.debug("event: [%s]", event); try { // Send event to kafka KeyedMessage<String, String> message = new KeyedMessage<String, String>(TOPIC_NAME, event); producer.send(message); } catch (Exception ioe) { Throwables.propagate(ioe); } try { Thread.sleep(DELAY_BETWEEN_EVENTS_SECS * 1000); } catch (InterruptedException ex) { /* nothing */ } dtLast = dt; dt = new DateTime(zone); } producer.close(); // put the timestamps into the query structure String query_response_template = null; InputStream is = ITKafkaTest.class.getResourceAsStream(QUERIES_FILE); if (null == is) { throw new ISE("could not open query file: %s", QUERIES_FILE); } try { query_response_template = IOUtils.toString(is, "UTF-8"); } catch (IOException e) { throw new ISE(e, "could not read query file: %s", QUERIES_FILE); } String queryStr = query_response_template // time boundary .replace("%%TIMEBOUNDARY_RESPONSE_TIMESTAMP%%", TIMESTAMP_FMT.print(dtFirst)) .replace("%%TIMEBOUNDARY_RESPONSE_MAXTIME%%", TIMESTAMP_FMT.print(dtLast)) .replace("%%TIMEBOUNDARY_RESPONSE_MINTIME%%", TIMESTAMP_FMT.print(dtFirst)) // time series .replace("%%TIMESERIES_QUERY_START%%", INTERVAL_FMT.print(dtFirst)) .replace("%%TIMESERIES_QUERY_END%%", INTERVAL_FMT.print(dtFirst.plusMinutes(MINUTES_TO_SEND + 2))) .replace("%%TIMESERIES_RESPONSE_TIMESTAMP%%", TIMESTAMP_FMT.print(dtFirst)) .replace("%%TIMESERIES_ADDED%%", Integer.toString(added)) .replace("%%TIMESERIES_NUMEVENTS%%", Integer.toString(num_events)); // this query will probably be answered from the realtime task try { this.queryHelper.testQueriesFromString(queryStr, 2); } catch (Exception e) { Throwables.propagate(e); } // wait for segments to be handed off try { RetryUtil.retryUntil( new Callable<Boolean>() { @Override public Boolean call() throws Exception { return coordinator.areSegmentsLoaded(DATASOURCE); } }, true, 30000, 10, "Real-time generated segments loaded" ); } catch (Exception e) { Throwables.propagate(e); } LOG.info("segments are present"); segmentsExist = true; // this query will be answered by historical try { this.queryHelper.testQueriesFromString(queryStr, 2); } catch (Exception e) { Throwables.propagate(e); } } @AfterClass public void afterClass() { LOG.info("teardown"); // wait for the task to complete indexer.waitUntilTaskCompletes(taskID); // delete kafka topic AdminUtils.deleteTopic(zkClient, TOPIC_NAME); // remove segments if (segmentsExist) { try { String first = DateTimeFormat.forPattern("yyyy-MM-dd'T00:00:00.000Z'").print(dtFirst); String last = DateTimeFormat.forPattern("yyyy-MM-dd'T00:00:00.000Z'").print(dtFirst.plusDays(1)); unloadAndKillData(DATASOURCE, first, last); } catch (Exception e) { LOG.warn("exception while removing segments: [%s]", e.getMessage()); } } } }
package one.util.streamex; import java.util.Iterator; import java.util.PrimitiveIterator; import java.util.Spliterator; import java.util.function.Consumer; import java.util.function.DoubleConsumer; import java.util.function.IntConsumer; import java.util.function.LongConsumer; import java.util.stream.Stream; import java.util.stream.StreamSupport; import static one.util.streamex.StreamExInternals.*; /* package */abstract class UnknownSizeSpliterator<T, S extends UnknownSizeSpliterator<? extends T, S, I>, I extends Iterator<? extends T>> implements Spliterator<T> { static final int BATCH_UNIT = 1 << 10; // batch array size increment static final int MAX_BATCH = 1 << 25; // max batch array size; /** * Optimize the stream created on IteratorSpliterator replacing it with * UnknownSizeSpliterator. * * @param stream original stream * @return either original or optimized stream */ @SuppressWarnings("unchecked") static <T> Stream<T> optimize(Stream<T> stream) { if (SOURCE_SPLITERATOR == null || SPLITERATOR_ITERATOR == null) return stream; Iterator<T> it = null; try { Spliterator<T> spliterator = (Spliterator<T>) SOURCE_SPLITERATOR.get(stream); if (spliterator != null && !spliterator.hasCharacteristics(SIZED) && spliterator.getClass().getName().equals("java.util.Spliterators$IteratorSpliterator")) { it = (Iterator<T>) SPLITERATOR_ITERATOR.get(spliterator); } } catch (IllegalArgumentException | IllegalAccessException e) { // ignore } if (it == null) return stream; stream.spliterator(); // consume stream return StreamSupport.stream(new USOfRef<>(it), stream.isParallel()).onClose(stream::close); } I it; int index, fence; long est = Long.MAX_VALUE; UnknownSizeSpliterator(I iterator) { this.it = iterator; } UnknownSizeSpliterator(int index, int fence) { this.index = index; this.fence = fence; } int getN() { int n = fence + BATCH_UNIT; return n > MAX_BATCH ? MAX_BATCH : n; } S correctSize(S prefix) { if (this.it != null) prefix.est = Long.MAX_VALUE - 1; else { prefix.est = this.est / 2; this.est -= prefix.est; } return prefix; } @Override public long estimateSize() { return est; } @Override public int characteristics() { return ORDERED; } static class USOfRef<T> extends UnknownSizeSpliterator<T, USOfRef<T>, Iterator<? extends T>> { Object[] array; USOfRef(Iterator<? extends T> iterator) { super(iterator); } USOfRef(Object[] array, int index, int fence) { super(index, fence); this.array = array; } @Override public Spliterator<T> trySplit() { Iterator<? extends T> i = it; if (i != null) { int n = getN(); Object[] a = new Object[n]; int j = 0; while (i.hasNext() && j < n) { a[j++] = i.next(); } fence = j; if (i.hasNext()) { return correctSize(new USOfRef<>(a, 0, j)); } it = null; array = a; } int lo = index, mid = (lo + fence) >>> 1; return (lo >= mid) ? null : correctSize(new USOfRef<>(array, lo, index = mid)); } @Override public void forEachRemaining(Consumer<? super T> action) { if (it != null) it.forEachRemaining(action); else { Object[] a = array; int i = index, hi = fence; while (i < hi) { @SuppressWarnings("unchecked") T t = (T) a[i++]; action.accept(t); } } index = fence; est = 0; } @Override public boolean tryAdvance(Consumer<? super T> action) { if (it != null) { if (it.hasNext()) { action.accept(it.next()); return true; } it = null; index = fence; } else if (index < fence) { @SuppressWarnings("unchecked") T t = (T) array[index++]; action.accept(t); return true; } est = 0; return false; } } static class USOfInt extends UnknownSizeSpliterator<Integer, USOfInt, PrimitiveIterator.OfInt> implements Spliterator.OfInt { int[] array; USOfInt(PrimitiveIterator.OfInt iterator) { super(iterator); } USOfInt(int[] array, int index, int fence) { super(index, fence); this.array = array; } @Override public Spliterator.OfInt trySplit() { PrimitiveIterator.OfInt i = it; if (i != null) { int n = getN(); int[] a = new int[n]; int j = 0; while (i.hasNext() && j < n) { a[j++] = i.next(); } fence = j; if (i.hasNext()) { return correctSize(new USOfInt(a, 0, j)); } it = null; array = a; } int lo = index, mid = (lo + fence) >>> 1; return (lo >= mid) ? null : correctSize(new USOfInt(array, lo, index = mid)); } @Override public void forEachRemaining(IntConsumer action) { if (it != null) it.forEachRemaining(action); else { int[] a = array; int i = index, hi = fence; while (i < hi) { action.accept(a[i++]); } } index = fence; est = 0; } @Override public boolean tryAdvance(IntConsumer action) { if (it != null) { if (it.hasNext()) { action.accept(it.nextInt()); return true; } it = null; index = fence; } else if (index < fence) { action.accept(array[index++]); return true; } est = 0; return false; } } static class USOfLong extends UnknownSizeSpliterator<Long, USOfLong, PrimitiveIterator.OfLong> implements Spliterator.OfLong { long[] array; USOfLong(PrimitiveIterator.OfLong iterator) { super(iterator); } USOfLong(long[] array, int index, int fence) { super(index, fence); this.array = array; } @Override public Spliterator.OfLong trySplit() { PrimitiveIterator.OfLong i = it; if (i != null) { int n = getN(); long[] a = new long[n]; int j = 0; while (i.hasNext() && j < n) { a[j++] = i.next(); } fence = j; if (i.hasNext()) { return correctSize(new USOfLong(a, 0, j)); } it = null; array = a; } int lo = index, mid = (lo + fence) >>> 1; return (lo >= mid) ? null : correctSize(new USOfLong(array, lo, index = mid)); } @Override public void forEachRemaining(LongConsumer action) { if (it != null) it.forEachRemaining(action); else { long[] a = array; int i = index, hi = fence; while (i < hi) { action.accept(a[i++]); } } index = fence; est = 0; } @Override public boolean tryAdvance(LongConsumer action) { if (it != null) { if (it.hasNext()) { action.accept(it.nextLong()); return true; } it = null; index = fence; } else if (index < fence) { action.accept(array[index++]); return true; } est = 0; return false; } } static class USOfDouble extends UnknownSizeSpliterator<Double, USOfDouble, PrimitiveIterator.OfDouble> implements Spliterator.OfDouble { double[] array; USOfDouble(PrimitiveIterator.OfDouble iterator) { super(iterator); } USOfDouble(double[] array, int index, int fence) { super(index, fence); this.array = array; } @Override public Spliterator.OfDouble trySplit() { PrimitiveIterator.OfDouble i = it; if (i != null) { int n = getN(); double[] a = new double[n]; int j = 0; while (i.hasNext() && j < n) { a[j++] = i.next(); } fence = j; if (i.hasNext()) { return correctSize(new USOfDouble(a, 0, j)); } it = null; array = a; } int lo = index, mid = (lo + fence) >>> 1; return (lo >= mid) ? null : correctSize(new USOfDouble(array, lo, index = mid)); } @Override public void forEachRemaining(DoubleConsumer action) { if (it != null) it.forEachRemaining(action); else { double[] a = array; int i = index, hi = fence; while (i < hi) { action.accept(a[i++]); } } index = fence; est = 0; } @Override public boolean tryAdvance(DoubleConsumer action) { if (it != null) { if (it.hasNext()) { action.accept(it.nextDouble()); return true; } it = null; index = fence; } else if (index < fence) { action.accept(array[index++]); return true; } est = 0; return false; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.checkpoint.metadata; import org.apache.flink.core.fs.FSDataInputStream; import org.apache.flink.core.fs.FileSystem; import org.apache.flink.core.fs.Path; import org.apache.flink.core.memory.ByteArrayInputStreamWithPos; import org.apache.flink.core.memory.ByteArrayOutputStreamWithPos; import org.apache.flink.core.memory.DataInputViewStreamWrapper; import org.apache.flink.core.memory.DataOutputViewStreamWrapper; import org.apache.flink.runtime.checkpoint.MasterState; import org.apache.flink.runtime.checkpoint.OperatorState; import org.apache.flink.runtime.state.ChangelogTestUtils; import org.apache.flink.runtime.state.KeyGroupRangeOffsets; import org.apache.flink.runtime.state.KeyGroupsStateHandle; import org.apache.flink.runtime.state.KeyedStateHandle; import org.apache.flink.runtime.state.StreamStateHandle; import org.apache.flink.runtime.state.changelog.ChangelogStateBackendHandle; import org.apache.flink.runtime.state.changelog.ChangelogStateBackendHandle.ChangelogStateBackendHandleImpl; import org.apache.flink.runtime.state.memory.ByteStreamStateHandle; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import javax.annotation.Nullable; import java.io.ByteArrayInputStream; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.Random; import java.util.concurrent.ThreadLocalRandom; import java.util.stream.Collectors; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; /** Various tests for the version 3 format serializer of a checkpoint. */ public class MetadataV3SerializerTest { @Rule public final TemporaryFolder temporaryFolder = new TemporaryFolder(); @Test public void testCheckpointWithNoState() throws Exception { final Random rnd = new Random(); for (int i = 0; i < 100; ++i) { final long checkpointId = rnd.nextLong() & 0x7fffffffffffffffL; final Collection<OperatorState> taskStates = Collections.emptyList(); final Collection<MasterState> masterStates = Collections.emptyList(); testCheckpointSerialization(checkpointId, taskStates, masterStates, null); } } @Test public void testCheckpointWithOnlyMasterState() throws Exception { final Random rnd = new Random(); final int maxNumMasterStates = 5; for (int i = 0; i < 100; ++i) { final long checkpointId = rnd.nextLong() & 0x7fffffffffffffffL; final Collection<OperatorState> operatorStates = Collections.emptyList(); final int numMasterStates = rnd.nextInt(maxNumMasterStates) + 1; final Collection<MasterState> masterStates = CheckpointTestUtils.createRandomMasterStates(rnd, numMasterStates); testCheckpointSerialization(checkpointId, operatorStates, masterStates, null); } } @Test public void testCheckpointWithOnlyTaskStateForCheckpoint() throws Exception { testCheckpointWithOnlyTaskState(null); } @Test public void testCheckpointWithOnlyTaskStateForSavepoint() throws Exception { testCheckpointWithOnlyTaskState(temporaryFolder.newFolder().toURI().toString()); } private void testCheckpointWithOnlyTaskState(String basePath) throws Exception { final Random rnd = new Random(); final int maxTaskStates = 20; final int maxNumSubtasks = 20; for (int i = 0; i < 100; ++i) { final long checkpointId = rnd.nextLong() & 0x7fffffffffffffffL; final int numTasks = rnd.nextInt(maxTaskStates) + 1; final int numSubtasks = rnd.nextInt(maxNumSubtasks) + 1; final Collection<OperatorState> taskStates = CheckpointTestUtils.createOperatorStates( rnd, basePath, numTasks, 0, 0, numSubtasks); final Collection<MasterState> masterStates = Collections.emptyList(); testCheckpointSerialization(checkpointId, taskStates, masterStates, basePath); } } @Test public void testCheckpointWithMasterAndTaskStateForCheckpoint() throws Exception { testCheckpointWithMasterAndTaskState(null); } @Test public void testCheckpointWithMasterAndTaskStateForSavepoint() throws Exception { testCheckpointWithMasterAndTaskState(temporaryFolder.newFolder().toURI().toString()); } private void testCheckpointWithMasterAndTaskState(String basePath) throws Exception { final Random rnd = new Random(); final int maxNumMasterStates = 5; final int maxTaskStates = 20; final int maxNumSubtasks = 20; for (int i = 0; i < 100; ++i) { final long checkpointId = rnd.nextLong() & 0x7fffffffffffffffL; final int numTasks = rnd.nextInt(maxTaskStates) + 1; final int numSubtasks = rnd.nextInt(maxNumSubtasks) + 1; final Collection<OperatorState> taskStates = CheckpointTestUtils.createOperatorStates( rnd, basePath, numTasks, 0, 0, numSubtasks); final int numMasterStates = rnd.nextInt(maxNumMasterStates) + 1; final Collection<MasterState> masterStates = CheckpointTestUtils.createRandomMasterStates(rnd, numMasterStates); testCheckpointSerialization(checkpointId, taskStates, masterStates, basePath); } } @Test public void testCheckpointWithFinishedTasksForCheckpoint() throws Exception { testCheckpointWithFinishedTasks(null); } @Test public void testCheckpointWithFinishedTasksForSavepoint() throws Exception { testCheckpointWithFinishedTasks(temporaryFolder.newFolder().toURI().toString()); } private void testCheckpointWithFinishedTasks(String basePath) throws Exception { final Random rnd = new Random(); final int maxNumMasterStates = 5; final int maxNumSubtasks = 20; final int maxAllRunningTaskStates = 20; final int maxPartlyFinishedStates = 10; final int maxFullyFinishedSubtasks = 10; final long checkpointId = rnd.nextLong() & 0x7fffffffffffffffL; final int numSubtasks = rnd.nextInt(maxNumSubtasks) + 1; final int numAllRunningTasks = rnd.nextInt(maxAllRunningTaskStates) + 1; final int numPartlyFinishedTasks = rnd.nextInt(maxPartlyFinishedStates) + 1; final int numFullyFinishedTasks = rnd.nextInt(maxFullyFinishedSubtasks) + 1; final Collection<OperatorState> taskStates = CheckpointTestUtils.createOperatorStates( rnd, basePath, numAllRunningTasks, numPartlyFinishedTasks, numFullyFinishedTasks, numSubtasks); final int numMasterStates = rnd.nextInt(maxNumMasterStates) + 1; final Collection<MasterState> masterStates = CheckpointTestUtils.createRandomMasterStates(rnd, numMasterStates); testCheckpointSerialization(checkpointId, taskStates, masterStates, basePath); } /** * Test checkpoint metadata (de)serialization. * * @param checkpointId The given checkpointId will write into the metadata. * @param operatorStates the given states for all the operators. * @param masterStates the masterStates of the given checkpoint/savepoint. */ private void testCheckpointSerialization( long checkpointId, Collection<OperatorState> operatorStates, Collection<MasterState> masterStates, @Nullable String basePath) throws IOException { MetadataV3Serializer serializer = MetadataV3Serializer.INSTANCE; ByteArrayOutputStreamWithPos baos = new ByteArrayOutputStreamWithPos(); DataOutputStream out = new DataOutputViewStreamWrapper(baos); CheckpointMetadata metadata = new CheckpointMetadata(checkpointId, operatorStates, masterStates); MetadataV3Serializer.serialize(metadata, out); out.close(); // The relative pointer resolution in MetadataV2V3SerializerBase currently runs the same // code as the file system checkpoint location resolution. Because of that, it needs the // a "_metadata" file present. we could change the code to resolve the pointer without doing // file I/O, but it is somewhat delicate to reproduce that logic without I/O and the same // guarantees // to differentiate between the supported options of directory addressing and metadata file // addressing. // So, better safe than sorry, we do actually do the file system operations in the // serializer for now, // even if it makes the tests a a tad bit more clumsy if (basePath != null) { final Path metaPath = new Path(basePath, "_metadata"); // this is in the temp folder, so it will get automatically deleted FileSystem.getLocalFileSystem() .create(metaPath, FileSystem.WriteMode.OVERWRITE) .close(); } byte[] bytes = baos.toByteArray(); DataInputStream in = new DataInputViewStreamWrapper(new ByteArrayInputStreamWithPos(bytes)); CheckpointMetadata deserialized = serializer.deserialize(in, getClass().getClassLoader(), basePath); assertEquals(checkpointId, deserialized.getCheckpointId()); assertEquals(operatorStates, deserialized.getOperatorStates()); assertEquals( operatorStates.stream() .map(OperatorState::isFullyFinished) .collect(Collectors.toList()), deserialized.getOperatorStates().stream() .map(OperatorState::isFullyFinished) .collect(Collectors.toList())); assertEquals(masterStates.size(), deserialized.getMasterStates().size()); for (Iterator<MasterState> a = masterStates.iterator(), b = deserialized.getMasterStates().iterator(); a.hasNext(); ) { CheckpointTestUtils.assertMasterStateEquality(a.next(), b.next()); } } @Test public void testSerializeKeyGroupsStateHandle() throws IOException { KeyGroupRangeOffsets offsets = new KeyGroupRangeOffsets(0, 123); byte[] data = {1, 2, 3, 4}; try (ByteArrayOutputStreamWithPos out = new ByteArrayOutputStreamWithPos()) { MetadataV2V3SerializerBase.serializeStreamStateHandle( new KeyGroupsStateHandle(offsets, new ByteStreamStateHandle("test", data)), new DataOutputStream(out)); try (ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray())) { StreamStateHandle handle = MetadataV2V3SerializerBase.deserializeStreamStateHandle( new DataInputStream(in), null); assertTrue(handle instanceof KeyGroupsStateHandle); assertEquals(offsets, ((KeyGroupsStateHandle) handle).getGroupRangeOffsets()); byte[] deserialized = new byte[data.length]; try (FSDataInputStream dataStream = handle.openInputStream()) { dataStream.read(deserialized); assertArrayEquals(data, deserialized); } } } } @Test public void testSerializeIncrementalChangelogStateBackendHandle() throws IOException { testSerializeChangelogStateBackendHandle(false); } @Test public void testSerializeFullChangelogStateBackendHandle() throws IOException { testSerializeChangelogStateBackendHandle(true); } private void testSerializeChangelogStateBackendHandle(boolean fullSnapshot) throws IOException { ChangelogStateBackendHandle handle = createChangelogStateBackendHandle(fullSnapshot); try (ByteArrayOutputStreamWithPos out = new ByteArrayOutputStreamWithPos()) { MetadataV2V3SerializerBase.serializeKeyedStateHandle(handle, new DataOutputStream(out)); try (ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray())) { KeyedStateHandle deserialized = MetadataV2V3SerializerBase.deserializeKeyedStateHandle( new DataInputStream(in), null); assertTrue(deserialized instanceof ChangelogStateBackendHandleImpl); assertEquals( ((ChangelogStateBackendHandleImpl) deserialized) .getMaterializedStateHandles(), handle.getMaterializedStateHandles()); } } } private ChangelogStateBackendHandle createChangelogStateBackendHandle(boolean fullSnapshot) { KeyedStateHandle keyedStateHandle = fullSnapshot ? CheckpointTestUtils.createDummyKeyGroupStateHandle( ThreadLocalRandom.current(), null) : CheckpointTestUtils.createDummyIncrementalKeyedStateHandle( ThreadLocalRandom.current()); return ChangelogTestUtils.createChangelogStateBackendHandle(keyedStateHandle); } }
/* * Licensed to Metamarkets Group Inc. (Metamarkets) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Metamarkets licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.druid.segment; import com.google.common.base.Throwables; import com.google.common.io.CharSource; import com.google.common.io.LineProcessor; import com.google.common.io.Resources; import io.druid.data.input.impl.DelimitedParseSpec; import io.druid.data.input.impl.DimensionSchema; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.DoubleDimensionSchema; import io.druid.data.input.impl.FloatDimensionSchema; import io.druid.data.input.impl.LongDimensionSchema; import io.druid.data.input.impl.StringDimensionSchema; import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.TimestampSpec; import io.druid.hll.HyperLogLogHash; import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Intervals; import io.druid.java.util.common.logger.Logger; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.DoubleMaxAggregatorFactory; import io.druid.query.aggregation.DoubleMinAggregatorFactory; import io.druid.query.aggregation.DoubleSumAggregatorFactory; import io.druid.query.aggregation.FloatMaxAggregatorFactory; import io.druid.query.aggregation.FloatMinAggregatorFactory; import io.druid.query.aggregation.FloatSumAggregatorFactory; import io.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory; import io.druid.query.aggregation.hyperloglog.HyperUniquesSerde; import io.druid.query.expression.TestExprMacroTable; import io.druid.segment.column.ValueType; import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndexSchema; import io.druid.segment.serde.ComplexMetrics; import io.druid.segment.virtual.ExpressionVirtualColumn; import io.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import org.joda.time.Interval; import java.io.File; import java.io.IOException; import java.net.URL; import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.concurrent.atomic.AtomicLong; /** */ public class TestIndex { public static final String[] COLUMNS = new String[]{ "ts", "market", "quality", "qualityLong", "qualityFloat", "qualityDouble", "qualityNumericString", "placement", "placementish", "index", "partial_null_column", "null_column", "quality_uniques", "indexMin", "indexMaxPlusTen" }; public static final String[] DIMENSIONS = new String[]{ "market", "quality", "qualityLong", "qualityFloat", "qualityDouble", "qualityNumericString", "placement", "placementish", "partial_null_column", "null_column" }; public static final List<DimensionSchema> DIMENSION_SCHEMAS = Arrays.asList( new StringDimensionSchema("market"), new StringDimensionSchema("quality"), new LongDimensionSchema("qualityLong"), new FloatDimensionSchema("qualityFloat"), new DoubleDimensionSchema("qualityDouble"), new StringDimensionSchema("qualityNumericString"), new StringDimensionSchema("placement"), new StringDimensionSchema("placementish"), new StringDimensionSchema("partial_null_column"), new StringDimensionSchema("null_column") ); public static final DimensionsSpec DIMENSIONS_SPEC = new DimensionsSpec( DIMENSION_SCHEMAS, null, null ); public static final String[] DOUBLE_METRICS = new String[]{"index", "indexMin", "indexMaxPlusTen"}; public static final String[] FLOAT_METRICS = new String[]{"indexFloat", "indexMinFloat", "indexMaxFloat"}; private static final Logger log = new Logger(TestIndex.class); private static final Interval DATA_INTERVAL = Intervals.of("2011-01-12T00:00:00.000Z/2011-05-01T00:00:00.000Z"); private static final VirtualColumns VIRTUAL_COLUMNS = VirtualColumns.create( Collections.<VirtualColumn>singletonList( new ExpressionVirtualColumn("expr", "index + 10", ValueType.FLOAT, TestExprMacroTable.INSTANCE) ) ); public static final AggregatorFactory[] METRIC_AGGS = new AggregatorFactory[]{ new DoubleSumAggregatorFactory(DOUBLE_METRICS[0], "index"), new FloatSumAggregatorFactory(FLOAT_METRICS[0], "index"), new DoubleMinAggregatorFactory(DOUBLE_METRICS[1], "index"), new FloatMinAggregatorFactory(FLOAT_METRICS[1], "index"), new FloatMaxAggregatorFactory(FLOAT_METRICS[2], "index"), new DoubleMaxAggregatorFactory(DOUBLE_METRICS[2], VIRTUAL_COLUMNS.getVirtualColumns()[0].getOutputName()), new HyperUniquesAggregatorFactory("quality_uniques", "quality") }; private static final IndexSpec indexSpec = new IndexSpec(); private static final IndexMerger INDEX_MERGER = TestHelper.getTestIndexMergerV9(OffHeapMemorySegmentWriteOutMediumFactory.instance()); private static final IndexIO INDEX_IO = TestHelper.getTestIndexIO(OffHeapMemorySegmentWriteOutMediumFactory.instance()); static { if (ComplexMetrics.getSerdeForType("hyperUnique") == null) { ComplexMetrics.registerSerde("hyperUnique", new HyperUniquesSerde(HyperLogLogHash.getDefault())); } } private static IncrementalIndex realtimeIndex = null; private static IncrementalIndex noRollupRealtimeIndex = null; private static QueryableIndex mmappedIndex = null; private static QueryableIndex noRollupMmappedIndex = null; private static QueryableIndex mergedRealtime = null; public static IncrementalIndex getIncrementalTestIndex() { synchronized (log) { if (realtimeIndex != null) { return realtimeIndex; } } return realtimeIndex = makeRealtimeIndex("druid.sample.numeric.tsv"); } public static IncrementalIndex getNoRollupIncrementalTestIndex() { synchronized (log) { if (noRollupRealtimeIndex != null) { return noRollupRealtimeIndex; } } return noRollupRealtimeIndex = makeRealtimeIndex("druid.sample.numeric.tsv", false); } public static QueryableIndex getMMappedTestIndex() { synchronized (log) { if (mmappedIndex != null) { return mmappedIndex; } } IncrementalIndex incrementalIndex = getIncrementalTestIndex(); mmappedIndex = persistRealtimeAndLoadMMapped(incrementalIndex); return mmappedIndex; } public static QueryableIndex getNoRollupMMappedTestIndex() { synchronized (log) { if (noRollupMmappedIndex != null) { return noRollupMmappedIndex; } } IncrementalIndex incrementalIndex = getNoRollupIncrementalTestIndex(); noRollupMmappedIndex = persistRealtimeAndLoadMMapped(incrementalIndex); return noRollupMmappedIndex; } public static QueryableIndex mergedRealtimeIndex() { synchronized (log) { if (mergedRealtime != null) { return mergedRealtime; } try { IncrementalIndex top = makeRealtimeIndex("druid.sample.numeric.tsv.top"); IncrementalIndex bottom = makeRealtimeIndex("druid.sample.numeric.tsv.bottom"); File tmpFile = File.createTempFile("yay", "who"); tmpFile.delete(); File topFile = new File(tmpFile, "top"); File bottomFile = new File(tmpFile, "bottom"); File mergedFile = new File(tmpFile, "merged"); topFile.mkdirs(); topFile.deleteOnExit(); bottomFile.mkdirs(); bottomFile.deleteOnExit(); mergedFile.mkdirs(); mergedFile.deleteOnExit(); INDEX_MERGER.persist(top, DATA_INTERVAL, topFile, indexSpec, null); INDEX_MERGER.persist(bottom, DATA_INTERVAL, bottomFile, indexSpec, null); mergedRealtime = INDEX_IO.loadIndex( INDEX_MERGER.mergeQueryableIndex( Arrays.asList(INDEX_IO.loadIndex(topFile), INDEX_IO.loadIndex(bottomFile)), true, METRIC_AGGS, mergedFile, indexSpec, null ) ); return mergedRealtime; } catch (IOException e) { throw Throwables.propagate(e); } } } public static IncrementalIndex makeRealtimeIndex(final String resourceFilename) { return makeRealtimeIndex(resourceFilename, true); } public static IncrementalIndex makeRealtimeIndex(final String resourceFilename, boolean rollup) { final URL resource = TestIndex.class.getClassLoader().getResource(resourceFilename); if (resource == null) { throw new IllegalArgumentException("cannot find resource " + resourceFilename); } log.info("Realtime loading index file[%s]", resource); CharSource stream = Resources.asByteSource(resource).asCharSource(StandardCharsets.UTF_8); return makeRealtimeIndex(stream, rollup); } public static IncrementalIndex makeRealtimeIndex(final CharSource source) { return makeRealtimeIndex(source, true); } public static IncrementalIndex makeRealtimeIndex(final CharSource source, boolean rollup) { final IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder() .withMinTimestamp(DateTimes.of("2011-01-12T00:00:00.000Z").getMillis()) .withTimestampSpec(new TimestampSpec("ds", "auto", null)) .withDimensionsSpec(DIMENSIONS_SPEC) .withVirtualColumns(VIRTUAL_COLUMNS) .withMetrics(METRIC_AGGS) .withRollup(rollup) .build(); final IncrementalIndex retVal = new IncrementalIndex.Builder() .setIndexSchema(schema) .setMaxRowCount(10000) .buildOnheap(); try { return loadIncrementalIndex(retVal, source); } catch (Exception e) { if (rollup) { realtimeIndex = null; } else { noRollupRealtimeIndex = null; } throw Throwables.propagate(e); } } public static IncrementalIndex loadIncrementalIndex( final IncrementalIndex retVal, final CharSource source ) throws IOException { final StringInputRowParser parser = new StringInputRowParser( new DelimitedParseSpec( new TimestampSpec("ts", "iso", null), new DimensionsSpec(DIMENSION_SCHEMAS, null, null), "\t", "\u0001", Arrays.asList(COLUMNS), false, 0 ), "utf8" ); return loadIncrementalIndex(retVal, source, parser); } public static IncrementalIndex loadIncrementalIndex( final IncrementalIndex retVal, final CharSource source, final StringInputRowParser parser ) throws IOException { final AtomicLong startTime = new AtomicLong(); int lineCount = source.readLines( new LineProcessor<Integer>() { boolean runOnce = false; int lineCount = 0; @Override public boolean processLine(String line) throws IOException { if (!runOnce) { startTime.set(System.currentTimeMillis()); runOnce = true; } retVal.add(parser.parse(line)); ++lineCount; return true; } @Override public Integer getResult() { return lineCount; } } ); log.info("Loaded %,d lines in %,d millis.", lineCount, System.currentTimeMillis() - startTime.get()); return retVal; } public static QueryableIndex persistRealtimeAndLoadMMapped(IncrementalIndex index) { try { File someTmpFile = File.createTempFile("billy", "yay"); someTmpFile.delete(); someTmpFile.mkdirs(); someTmpFile.deleteOnExit(); INDEX_MERGER.persist(index, someTmpFile, indexSpec, null); return INDEX_IO.loadIndex(someTmpFile); } catch (IOException e) { throw Throwables.propagate(e); } } }
/* * Copyright 2012-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.jvm.java; import com.facebook.buck.io.ProjectFilesystem; import com.facebook.buck.model.BuildTarget; import com.facebook.buck.model.BuildTargets; import com.facebook.buck.rules.BuildRule; import com.facebook.buck.rules.BuildTargetSourcePath; import com.facebook.buck.rules.RuleKeyAppendable; import com.facebook.buck.rules.RuleKeyObjectSink; import com.facebook.buck.rules.SourcePath; import com.facebook.buck.util.HumanReadableException; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.Sets; import java.nio.file.Path; import java.util.Collection; import java.util.Set; import javax.annotation.Nullable; /** * Information for annotation processing. * * Annotation processing involves a set of processors, their classpath(s), and a few other * command-line options for javac. We want to be able to specify all this various information * in a BUCK configuration file and use it when we generate the javac command. This facilitates * threading the information through buck in a more descriptive package rather than passing all * the components separately. */ public class AnnotationProcessingParams implements RuleKeyAppendable { public static final AnnotationProcessingParams EMPTY = new AnnotationProcessingParams( /* owner target */ null, /* project filesystem */ null, ImmutableSet.<Path>of(), ImmutableSet.<String>of(), ImmutableSet.<String>of(), ImmutableSortedSet.<SourcePath>of(), false); @Nullable private final BuildTarget ownerTarget; @Nullable private final ProjectFilesystem filesystem; private final ImmutableSortedSet<Path> searchPathElements; private final ImmutableSortedSet<String> names; private final ImmutableSortedSet<String> parameters; private final ImmutableSortedSet<SourcePath> inputs; private final boolean processOnly; private AnnotationProcessingParams( @Nullable BuildTarget ownerTarget, @Nullable ProjectFilesystem filesystem, Set<Path> searchPathElements, Set<String> names, Set<String> parameters, ImmutableSortedSet<SourcePath> inputs, boolean processOnly) { this.ownerTarget = ownerTarget; this.filesystem = filesystem; this.searchPathElements = ImmutableSortedSet.copyOf(searchPathElements); this.names = ImmutableSortedSet.copyOf(names); this.parameters = ImmutableSortedSet.copyOf(parameters); this.inputs = inputs; this.processOnly = processOnly; if (!isEmpty() && ownerTarget != null) { Preconditions.checkNotNull(filesystem); } } private Path getGeneratedSrcFolder() { Preconditions.checkNotNull(filesystem); return BuildTargets.getAnnotationPath( filesystem, Preconditions.checkNotNull(ownerTarget), "__%s_gen__"); } public boolean isEmpty() { return searchPathElements.isEmpty() && names.isEmpty() && parameters.isEmpty(); } public ImmutableSortedSet<Path> getSearchPathElements() { return searchPathElements; } public ImmutableSortedSet<String> getNames() { return names; } public ImmutableSortedSet<String> getParameters() { return parameters; } public ImmutableSortedSet<SourcePath> getInputs() { return inputs; } @Override public void appendToRuleKey(RuleKeyObjectSink sink) { if (!isEmpty()) { // searchPathElements is not needed here since it comes from rules, which is appended below. sink.setReflectively("owner", ownerTarget) .setReflectively("names", names) .setReflectively("parameters", parameters) .setReflectively("processOnly", processOnly) .setReflectively("inputs", inputs); } } public boolean getProcessOnly() { return processOnly; } @Nullable public Path getGeneratedSourceFolderName() { if ((ownerTarget != null) && !isEmpty()) { return getGeneratedSrcFolder(); } else { return null; } } public static class Builder { @Nullable private BuildTarget ownerTarget; @Nullable private ProjectFilesystem filesystem; private Set<BuildRule> rules = Sets.newHashSet(); private Set<String> names = Sets.newHashSet(); private Set<String> parameters = Sets.newHashSet(); private boolean processOnly; public Builder setOwnerTarget(BuildTarget owner) { ownerTarget = owner; return this; } public Builder addProcessorBuildTarget(BuildRule rule) { rules.add(rule); return this; } public Builder addAllProcessors(Collection<? extends String> processorNames) { names.addAll(processorNames); return this; } public Builder addParameter(String parameter) { parameters.add(parameter); return this; } public Builder setProcessOnly(boolean processOnly) { this.processOnly = processOnly; return this; } public Builder setProjectFilesystem(ProjectFilesystem filesystem) { this.filesystem = filesystem; return this; } public AnnotationProcessingParams build() { if (names.isEmpty() && rules.isEmpty() && parameters.isEmpty()) { return EMPTY; } ImmutableSortedSet.Builder<SourcePath> inputs = ImmutableSortedSet.naturalOrder(); Set<Path> searchPathElements = Sets.newHashSet(); for (BuildRule rule : this.rules) { if (rule.getClass().isAnnotationPresent(BuildsAnnotationProcessor.class)) { Path pathToOutput = rule.getPathToOutput(); if (pathToOutput != null) { inputs.add( new BuildTargetSourcePath(rule.getBuildTarget())); searchPathElements.add(pathToOutput); } } else if (rule instanceof HasClasspathEntries) { HasClasspathEntries hasClasspathEntries = (HasClasspathEntries) rule; ImmutableSet<JavaLibrary> entries = hasClasspathEntries.getTransitiveClasspathDeps(); for (JavaLibrary entry : entries) { inputs.add(new BuildTargetSourcePath(entry.getBuildTarget())); } searchPathElements.addAll(hasClasspathEntries.getTransitiveClasspathEntries().values()); } else { throw new HumanReadableException( "%1$s: Error adding '%2$s' to annotation_processing_deps: " + "must refer only to prebuilt jar, java binary, or java library targets.", ownerTarget, rule.getFullyQualifiedName()); } } return new AnnotationProcessingParams( ownerTarget, filesystem, searchPathElements, names, parameters, inputs.build(), processOnly); } } }
/* * Copyright 2010-2012 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.dynamodb.model.transform; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.OutputStreamWriter; import java.io.StringWriter; import java.io.Writer; import java.util.Map; import java.util.List; import com.amazonaws.AmazonClientException; import com.amazonaws.Request; import com.amazonaws.DefaultRequest; import com.amazonaws.http.HttpMethodName; import com.amazonaws.services.dynamodb.model.*; import com.amazonaws.transform.Marshaller; import com.amazonaws.util.StringUtils; import com.amazonaws.util.StringInputStream; import com.amazonaws.util.json.*; /** * Batch Get Item Request Marshaller */ public class BatchGetItemRequestMarshaller implements Marshaller<Request<BatchGetItemRequest>, BatchGetItemRequest> { public Request<BatchGetItemRequest> marshall(BatchGetItemRequest batchGetItemRequest) { if (batchGetItemRequest == null) { throw new AmazonClientException("Invalid argument passed to marshall(...)"); } Request<BatchGetItemRequest> request = new DefaultRequest<BatchGetItemRequest>(batchGetItemRequest, "AmazonDynamoDB"); String target = "DynamoDB_20111205.BatchGetItem"; request.addHeader("X-Amz-Target", target); request.addHeader("Content-Type", "application/x-amz-json-1.0"); request.setHttpMethod(HttpMethodName.POST); String uriResourcePath = ""; uriResourcePath = uriResourcePath.replaceAll("//", "/"); if (uriResourcePath.contains("?")) { String queryString = uriResourcePath.substring(uriResourcePath.indexOf("?") + 1); uriResourcePath = uriResourcePath.substring(0, uriResourcePath.indexOf("?")); for (String s : queryString.split("[;&]")) { String[] nameValuePair = s.split("="); if (nameValuePair.length == 2) { request.addParameter(nameValuePair[0], nameValuePair[1]); } else { request.addParameter(s, null); } } } request.setResourcePath(uriResourcePath); try { StringWriter stringWriter = new StringWriter(); JSONWriter jsonWriter = new JSONWriter(stringWriter); jsonWriter.object(); if (batchGetItemRequest.getRequestItems() != null) { jsonWriter.key("RequestItems"); jsonWriter.object(); for (Map.Entry<String, KeysAndAttributes> requestItemsListValue : batchGetItemRequest.getRequestItems().entrySet()) { if (requestItemsListValue.getValue() != null) { jsonWriter.key(requestItemsListValue.getKey()); jsonWriter.object(); java.util.List<Key> keysList = requestItemsListValue.getValue().getKeys(); if (keysList != null && keysList.size() > 0) { jsonWriter.key("Keys"); jsonWriter.array(); for (Key keysListValue : keysList) { if (keysListValue != null) { jsonWriter.object(); AttributeValue hashKeyElement = keysListValue.getHashKeyElement(); if (hashKeyElement != null) { jsonWriter.key("HashKeyElement"); jsonWriter.object(); if (hashKeyElement.getS() != null) { jsonWriter.key("S").value(hashKeyElement.getS()); } if (hashKeyElement.getN() != null) { jsonWriter.key("N").value(hashKeyElement.getN()); } if (hashKeyElement.getB() != null) { jsonWriter.key("B").value(hashKeyElement.getB()); } java.util.List<String> sSList = hashKeyElement.getSS(); if (sSList != null && sSList.size() > 0) { jsonWriter.key("SS"); jsonWriter.array(); for (String sSListValue : sSList) { if (sSListValue != null) { jsonWriter.value(sSListValue); } } jsonWriter.endArray(); } java.util.List<String> nSList = hashKeyElement.getNS(); if (nSList != null && nSList.size() > 0) { jsonWriter.key("NS"); jsonWriter.array(); for (String nSListValue : nSList) { if (nSListValue != null) { jsonWriter.value(nSListValue); } } jsonWriter.endArray(); } java.util.List<java.nio.ByteBuffer> bSList = hashKeyElement.getBS(); if (bSList != null && bSList.size() > 0) { jsonWriter.key("BS"); jsonWriter.array(); for (java.nio.ByteBuffer bSListValue : bSList) { if (bSListValue != null) { jsonWriter.value(bSListValue); } } jsonWriter.endArray(); } jsonWriter.endObject(); } AttributeValue rangeKeyElement = keysListValue.getRangeKeyElement(); if (rangeKeyElement != null) { jsonWriter.key("RangeKeyElement"); jsonWriter.object(); if (rangeKeyElement.getS() != null) { jsonWriter.key("S").value(rangeKeyElement.getS()); } if (rangeKeyElement.getN() != null) { jsonWriter.key("N").value(rangeKeyElement.getN()); } if (rangeKeyElement.getB() != null) { jsonWriter.key("B").value(rangeKeyElement.getB()); } java.util.List<String> sSList = rangeKeyElement.getSS(); if (sSList != null && sSList.size() > 0) { jsonWriter.key("SS"); jsonWriter.array(); for (String sSListValue : sSList) { if (sSListValue != null) { jsonWriter.value(sSListValue); } } jsonWriter.endArray(); } java.util.List<String> nSList = rangeKeyElement.getNS(); if (nSList != null && nSList.size() > 0) { jsonWriter.key("NS"); jsonWriter.array(); for (String nSListValue : nSList) { if (nSListValue != null) { jsonWriter.value(nSListValue); } } jsonWriter.endArray(); } java.util.List<java.nio.ByteBuffer> bSList = rangeKeyElement.getBS(); if (bSList != null && bSList.size() > 0) { jsonWriter.key("BS"); jsonWriter.array(); for (java.nio.ByteBuffer bSListValue : bSList) { if (bSListValue != null) { jsonWriter.value(bSListValue); } } jsonWriter.endArray(); } jsonWriter.endObject(); } jsonWriter.endObject(); } } jsonWriter.endArray(); } java.util.List<String> attributesToGetList = requestItemsListValue.getValue().getAttributesToGet(); if (attributesToGetList != null && attributesToGetList.size() > 0) { jsonWriter.key("AttributesToGet"); jsonWriter.array(); for (String attributesToGetListValue : attributesToGetList) { if (attributesToGetListValue != null) { jsonWriter.value(attributesToGetListValue); } } jsonWriter.endArray(); } jsonWriter.endObject(); } } jsonWriter.endObject(); } jsonWriter.endObject(); String snippet = stringWriter.toString(); byte[] content = snippet.getBytes("UTF-8"); request.setContent(new StringInputStream(snippet)); request.addHeader("Content-Length", Integer.toString(content.length)); } catch(Throwable t) { throw new AmazonClientException("Unable to marshall request to JSON: " + t.getMessage(), t); } return request; } private String getString(String s) { if (s == null) return ""; return s; } }
/* * Copyright 2021 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.dialogflow.cx.v3beta1; import com.google.api.core.BetaApi; import com.google.api.pathtemplate.PathTemplate; import com.google.api.pathtemplate.ValidationException; import com.google.api.resourcenames.ResourceName; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableMap; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Objects; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. @Generated("by gapic-generator-java") public class SessionName implements ResourceName { private static final PathTemplate PROJECT_LOCATION_AGENT_SESSION = PathTemplate.createWithoutUrlEncoding( "projects/{project}/locations/{location}/agents/{agent}/sessions/{session}"); private static final PathTemplate PROJECT_LOCATION_AGENT_ENVIRONMENT_SESSION = PathTemplate.createWithoutUrlEncoding( "projects/{project}/locations/{location}/agents/{agent}/environments/{environment}/sessions/{session}"); private volatile Map<String, String> fieldValuesMap; private PathTemplate pathTemplate; private String fixedValue; private final String project; private final String location; private final String agent; private final String session; private final String environment; @Deprecated protected SessionName() { project = null; location = null; agent = null; session = null; environment = null; } private SessionName(Builder builder) { project = Preconditions.checkNotNull(builder.getProject()); location = Preconditions.checkNotNull(builder.getLocation()); agent = Preconditions.checkNotNull(builder.getAgent()); session = Preconditions.checkNotNull(builder.getSession()); environment = null; pathTemplate = PROJECT_LOCATION_AGENT_SESSION; } private SessionName(ProjectLocationAgentEnvironmentSessionBuilder builder) { project = Preconditions.checkNotNull(builder.getProject()); location = Preconditions.checkNotNull(builder.getLocation()); agent = Preconditions.checkNotNull(builder.getAgent()); environment = Preconditions.checkNotNull(builder.getEnvironment()); session = Preconditions.checkNotNull(builder.getSession()); pathTemplate = PROJECT_LOCATION_AGENT_ENVIRONMENT_SESSION; } public String getProject() { return project; } public String getLocation() { return location; } public String getAgent() { return agent; } public String getSession() { return session; } public String getEnvironment() { return environment; } public static Builder newBuilder() { return new Builder(); } @BetaApi("The per-pattern Builders are not stable yet and may be changed in the future.") public static Builder newProjectLocationAgentSessionBuilder() { return new Builder(); } @BetaApi("The per-pattern Builders are not stable yet and may be changed in the future.") public static ProjectLocationAgentEnvironmentSessionBuilder newProjectLocationAgentEnvironmentSessionBuilder() { return new ProjectLocationAgentEnvironmentSessionBuilder(); } public Builder toBuilder() { return new Builder(this); } public static SessionName of(String project, String location, String agent, String session) { return newBuilder() .setProject(project) .setLocation(location) .setAgent(agent) .setSession(session) .build(); } @BetaApi("The static create methods are not stable yet and may be changed in the future.") public static SessionName ofProjectLocationAgentSessionName( String project, String location, String agent, String session) { return newBuilder() .setProject(project) .setLocation(location) .setAgent(agent) .setSession(session) .build(); } @BetaApi("The static create methods are not stable yet and may be changed in the future.") public static SessionName ofProjectLocationAgentEnvironmentSessionName( String project, String location, String agent, String environment, String session) { return newProjectLocationAgentEnvironmentSessionBuilder() .setProject(project) .setLocation(location) .setAgent(agent) .setEnvironment(environment) .setSession(session) .build(); } public static String format(String project, String location, String agent, String session) { return newBuilder() .setProject(project) .setLocation(location) .setAgent(agent) .setSession(session) .build() .toString(); } @BetaApi("The static format methods are not stable yet and may be changed in the future.") public static String formatProjectLocationAgentSessionName( String project, String location, String agent, String session) { return newBuilder() .setProject(project) .setLocation(location) .setAgent(agent) .setSession(session) .build() .toString(); } @BetaApi("The static format methods are not stable yet and may be changed in the future.") public static String formatProjectLocationAgentEnvironmentSessionName( String project, String location, String agent, String environment, String session) { return newProjectLocationAgentEnvironmentSessionBuilder() .setProject(project) .setLocation(location) .setAgent(agent) .setEnvironment(environment) .setSession(session) .build() .toString(); } public static SessionName parse(String formattedString) { if (formattedString.isEmpty()) { return null; } if (PROJECT_LOCATION_AGENT_SESSION.matches(formattedString)) { Map<String, String> matchMap = PROJECT_LOCATION_AGENT_SESSION.match(formattedString); return ofProjectLocationAgentSessionName( matchMap.get("project"), matchMap.get("location"), matchMap.get("agent"), matchMap.get("session")); } else if (PROJECT_LOCATION_AGENT_ENVIRONMENT_SESSION.matches(formattedString)) { Map<String, String> matchMap = PROJECT_LOCATION_AGENT_ENVIRONMENT_SESSION.match(formattedString); return ofProjectLocationAgentEnvironmentSessionName( matchMap.get("project"), matchMap.get("location"), matchMap.get("agent"), matchMap.get("environment"), matchMap.get("session")); } throw new ValidationException("SessionName.parse: formattedString not in valid format"); } public static List<SessionName> parseList(List<String> formattedStrings) { List<SessionName> list = new ArrayList<>(formattedStrings.size()); for (String formattedString : formattedStrings) { list.add(parse(formattedString)); } return list; } public static List<String> toStringList(List<SessionName> values) { List<String> list = new ArrayList<>(values.size()); for (SessionName value : values) { if (value == null) { list.add(""); } else { list.add(value.toString()); } } return list; } public static boolean isParsableFrom(String formattedString) { return PROJECT_LOCATION_AGENT_SESSION.matches(formattedString) || PROJECT_LOCATION_AGENT_ENVIRONMENT_SESSION.matches(formattedString); } @Override public Map<String, String> getFieldValuesMap() { if (fieldValuesMap == null) { synchronized (this) { if (fieldValuesMap == null) { ImmutableMap.Builder<String, String> fieldMapBuilder = ImmutableMap.builder(); if (project != null) { fieldMapBuilder.put("project", project); } if (location != null) { fieldMapBuilder.put("location", location); } if (agent != null) { fieldMapBuilder.put("agent", agent); } if (session != null) { fieldMapBuilder.put("session", session); } if (environment != null) { fieldMapBuilder.put("environment", environment); } fieldValuesMap = fieldMapBuilder.build(); } } } return fieldValuesMap; } public String getFieldValue(String fieldName) { return getFieldValuesMap().get(fieldName); } @Override public String toString() { return fixedValue != null ? fixedValue : pathTemplate.instantiate(getFieldValuesMap()); } @Override public boolean equals(Object o) { if (o == this) { return true; } if (o != null || getClass() == o.getClass()) { SessionName that = ((SessionName) o); return Objects.equals(this.project, that.project) && Objects.equals(this.location, that.location) && Objects.equals(this.agent, that.agent) && Objects.equals(this.session, that.session) && Objects.equals(this.environment, that.environment); } return false; } @Override public int hashCode() { int h = 1; h *= 1000003; h ^= Objects.hashCode(fixedValue); h *= 1000003; h ^= Objects.hashCode(project); h *= 1000003; h ^= Objects.hashCode(location); h *= 1000003; h ^= Objects.hashCode(agent); h *= 1000003; h ^= Objects.hashCode(session); h *= 1000003; h ^= Objects.hashCode(environment); return h; } /** Builder for projects/{project}/locations/{location}/agents/{agent}/sessions/{session}. */ public static class Builder { private String project; private String location; private String agent; private String session; protected Builder() {} public String getProject() { return project; } public String getLocation() { return location; } public String getAgent() { return agent; } public String getSession() { return session; } public Builder setProject(String project) { this.project = project; return this; } public Builder setLocation(String location) { this.location = location; return this; } public Builder setAgent(String agent) { this.agent = agent; return this; } public Builder setSession(String session) { this.session = session; return this; } private Builder(SessionName sessionName) { Preconditions.checkArgument( Objects.equals(sessionName.pathTemplate, PROJECT_LOCATION_AGENT_SESSION), "toBuilder is only supported when SessionName has the pattern of projects/{project}/locations/{location}/agents/{agent}/sessions/{session}"); this.project = sessionName.project; this.location = sessionName.location; this.agent = sessionName.agent; this.session = sessionName.session; } public SessionName build() { return new SessionName(this); } } /** * Builder for * projects/{project}/locations/{location}/agents/{agent}/environments/{environment}/sessions/{session}. */ @BetaApi("The per-pattern Builders are not stable yet and may be changed in the future.") public static class ProjectLocationAgentEnvironmentSessionBuilder { private String project; private String location; private String agent; private String environment; private String session; protected ProjectLocationAgentEnvironmentSessionBuilder() {} public String getProject() { return project; } public String getLocation() { return location; } public String getAgent() { return agent; } public String getEnvironment() { return environment; } public String getSession() { return session; } public ProjectLocationAgentEnvironmentSessionBuilder setProject(String project) { this.project = project; return this; } public ProjectLocationAgentEnvironmentSessionBuilder setLocation(String location) { this.location = location; return this; } public ProjectLocationAgentEnvironmentSessionBuilder setAgent(String agent) { this.agent = agent; return this; } public ProjectLocationAgentEnvironmentSessionBuilder setEnvironment(String environment) { this.environment = environment; return this; } public ProjectLocationAgentEnvironmentSessionBuilder setSession(String session) { this.session = session; return this; } public SessionName build() { return new SessionName(this); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.processors.poi; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.PrintStream; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; import org.apache.commons.csv.CSVFormat; import org.apache.commons.csv.CSVPrinter; import org.apache.commons.io.FilenameUtils; import org.apache.commons.lang3.StringUtils; import org.apache.nifi.annotation.behavior.WritesAttribute; import org.apache.nifi.annotation.behavior.WritesAttributes; import org.apache.nifi.annotation.documentation.CapabilityDescription; import org.apache.nifi.annotation.documentation.Tags; import org.apache.nifi.components.PropertyDescriptor; import org.apache.nifi.csv.CSVUtils; import org.apache.nifi.expression.ExpressionLanguageScope; import org.apache.nifi.flowfile.FlowFile; import org.apache.nifi.flowfile.attributes.CoreAttributes; import org.apache.nifi.processor.AbstractProcessor; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; import org.apache.nifi.processor.ProcessorInitializationContext; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.io.InputStreamCallback; import org.apache.nifi.processor.io.OutputStreamCallback; import org.apache.nifi.processor.util.StandardValidators; import org.apache.poi.openxml4j.exceptions.InvalidFormatException; import org.apache.poi.openxml4j.exceptions.OpenXML4JException; import org.apache.poi.openxml4j.opc.OPCPackage; import org.apache.poi.ss.usermodel.DataFormatter; import org.apache.poi.ss.util.CellAddress; import org.apache.poi.ss.util.CellReference; import org.apache.poi.ooxml.util.SAXHelper; import org.apache.poi.xssf.eventusermodel.ReadOnlySharedStringsTable; import org.apache.poi.xssf.eventusermodel.XSSFReader; import org.apache.poi.xssf.eventusermodel.XSSFSheetXMLHandler; import org.apache.poi.xssf.model.StylesTable; import org.apache.poi.xssf.usermodel.XSSFComment; import org.xml.sax.InputSource; import org.xml.sax.SAXException; import org.xml.sax.XMLReader; import javax.xml.parsers.ParserConfigurationException; @Tags({"excel", "csv", "poi"}) @CapabilityDescription("Consumes a Microsoft Excel document and converts each worksheet to csv. Each sheet from the incoming Excel " + "document will generate a new Flowfile that will be output from this processor. Each output Flowfile's contents will be formatted as a csv file " + "where the each row from the excel sheet is output as a newline in the csv file. This processor is currently only capable of processing .xlsx " + "(XSSF 2007 OOXML file format) Excel documents and not older .xls (HSSF '97(-2007) file format) documents. This processor also expects well formatted " + "CSV content and will not escape cell's containing invalid content such as newlines or additional commas.") @WritesAttributes({@WritesAttribute(attribute="sheetname", description="The name of the Excel sheet that this particular row of data came from in the Excel document"), @WritesAttribute(attribute="numrows", description="The number of rows in this Excel Sheet"), @WritesAttribute(attribute="sourcefilename", description="The name of the Excel document file that this data originated from"), @WritesAttribute(attribute="convertexceltocsvprocessor.error", description="Error message that was encountered on a per Excel sheet basis. This attribute is" + " only populated if an error was occured while processing the particular sheet. Having the error present at the sheet level will allow for the end" + " user to better understand what syntax errors in their excel doc on a larger scale caused the error.")}) public class ConvertExcelToCSVProcessor extends AbstractProcessor { private static final String CSV_MIME_TYPE = "text/csv"; public static final String SHEET_NAME = "sheetname"; public static final String ROW_NUM = "numrows"; public static final String SOURCE_FILE_NAME = "sourcefilename"; private static final String DESIRED_SHEETS_DELIMITER = ","; private static final String UNKNOWN_SHEET_NAME = "UNKNOWN"; public static final PropertyDescriptor DESIRED_SHEETS = new PropertyDescriptor .Builder().name("extract-sheets") .displayName("Sheets to Extract") .description("Comma separated list of Excel document sheet names that should be extracted from the excel document. If this property" + " is left blank then all of the sheets will be extracted from the Excel document. The list of names is case in-sensitive. Any sheets not " + "specified in this value will be ignored.") .required(false) .expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES) .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) .build(); public static final PropertyDescriptor ROWS_TO_SKIP = new PropertyDescriptor .Builder().name("excel-extract-first-row") .displayName("Number of Rows to Skip") .description("The row number of the first row to start processing." + "Use this to skip over rows of data at the top of your worksheet that are not part of the dataset." + "Empty rows of data anywhere in the spreadsheet will always be skipped, no matter what this value is set to.") .required(true) .defaultValue("0") .expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES) .addValidator(StandardValidators.NON_NEGATIVE_INTEGER_VALIDATOR) .build(); public static final PropertyDescriptor COLUMNS_TO_SKIP = new PropertyDescriptor .Builder().name("excel-extract-column-to-skip") .displayName("Columns To Skip") .description("Comma delimited list of column numbers to skip. Use the columns number and not the letter designation. " + "Use this to skip over columns anywhere in your worksheet that you don't want extracted as part of the record.") .required(false) .expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES) .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) .build(); public static final PropertyDescriptor FORMAT_VALUES = new PropertyDescriptor.Builder() .name("excel-format-values") .displayName("Format Cell Values") .description("Should the cell values be written to CSV using the formatting applied in Excel, or should they be printed as raw values.") .allowableValues("true", "false") .defaultValue("false") .required(true) .build(); public static final Relationship ORIGINAL = new Relationship.Builder() .name("original") .description("Original Excel document received by this processor") .build(); public static final Relationship SUCCESS = new Relationship.Builder() .name("success") .description("Excel data converted to csv") .build(); public static final Relationship FAILURE = new Relationship.Builder() .name("failure") .description("Failed to parse the Excel document") .build(); private List<PropertyDescriptor> descriptors; private Set<Relationship> relationships; @Override protected void init(final ProcessorInitializationContext context) { final List<PropertyDescriptor> descriptors = new ArrayList<>(); descriptors.add(DESIRED_SHEETS); descriptors.add(ROWS_TO_SKIP); descriptors.add(COLUMNS_TO_SKIP); descriptors.add(FORMAT_VALUES); descriptors.add(CSVUtils.CSV_FORMAT); descriptors.add(CSVUtils.VALUE_SEPARATOR); descriptors.add(CSVUtils.INCLUDE_HEADER_LINE); descriptors.add(CSVUtils.QUOTE_CHAR); descriptors.add(CSVUtils.ESCAPE_CHAR); descriptors.add(CSVUtils.COMMENT_MARKER); descriptors.add(CSVUtils.NULL_STRING); descriptors.add(CSVUtils.TRIM_FIELDS); descriptors.add(new PropertyDescriptor.Builder() .fromPropertyDescriptor(CSVUtils.QUOTE_MODE) .defaultValue(CSVUtils.QUOTE_NONE.getValue()) .build()); descriptors.add(CSVUtils.RECORD_SEPARATOR); descriptors.add(CSVUtils.TRAILING_DELIMITER); this.descriptors = Collections.unmodifiableList(descriptors); final Set<Relationship> relationships = new HashSet<>(); relationships.add(ORIGINAL); relationships.add(SUCCESS); relationships.add(FAILURE); this.relationships = Collections.unmodifiableSet(relationships); } @Override public Set<Relationship> getRelationships() { return this.relationships; } @Override public final List<PropertyDescriptor> getSupportedPropertyDescriptors() { return descriptors; } @Override public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException { final FlowFile flowFile = session.get(); if ( flowFile == null ) { return; } final String desiredSheetsDelimited = context.getProperty(DESIRED_SHEETS).evaluateAttributeExpressions(flowFile).getValue(); final boolean formatValues = context.getProperty(FORMAT_VALUES).asBoolean(); final CSVFormat csvFormat = CSVUtils.createCSVFormat(context); //Switch to 0 based index final int firstRow = context.getProperty(ROWS_TO_SKIP).evaluateAttributeExpressions(flowFile).asInteger() - 1; final String[] sColumnsToSkip = StringUtils .split(context.getProperty(COLUMNS_TO_SKIP).evaluateAttributeExpressions(flowFile).getValue(), ","); final List<Integer> columnsToSkip = new ArrayList<>(); if(sColumnsToSkip != null && sColumnsToSkip.length > 0) { for (String c : sColumnsToSkip) { try { //Switch to 0 based index columnsToSkip.add(Integer.parseInt(c) - 1); } catch (NumberFormatException e) { throw new ProcessException("Invalid column in Columns to Skip list.", e); } } } try { session.read(flowFile, new InputStreamCallback() { @Override public void process(InputStream inputStream) throws IOException { try { OPCPackage pkg = OPCPackage.open(inputStream); XSSFReader r = new XSSFReader(pkg); ReadOnlySharedStringsTable sst = new ReadOnlySharedStringsTable(pkg); StylesTable styles = r.getStylesTable(); XSSFReader.SheetIterator iter = (XSSFReader.SheetIterator) r.getSheetsData(); if (desiredSheetsDelimited != null) { String[] desiredSheets = StringUtils .split(desiredSheetsDelimited, DESIRED_SHEETS_DELIMITER); if (desiredSheets != null) { while (iter.hasNext()) { InputStream sheet = iter.next(); String sheetName = iter.getSheetName(); for (int i = 0; i < desiredSheets.length; i++) { //If the sheetName is a desired one parse it if (sheetName.equalsIgnoreCase(desiredSheets[i])) { ExcelSheetReadConfig readConfig = new ExcelSheetReadConfig(columnsToSkip, firstRow, sheetName, formatValues, sst, styles); handleExcelSheet(session, flowFile, sheet, readConfig, csvFormat); break; } } } } else { getLogger().debug("Excel document was parsed but no sheets with the specified desired names were found."); } } else { //Get all of the sheets in the document. while (iter.hasNext()) { InputStream sheet = iter.next(); String sheetName = iter.getSheetName(); ExcelSheetReadConfig readConfig = new ExcelSheetReadConfig(columnsToSkip, firstRow, sheetName, formatValues, sst, styles); handleExcelSheet(session, flowFile, sheet, readConfig, csvFormat); } } } catch (InvalidFormatException ife) { getLogger().error("Only .xlsx Excel 2007 OOXML files are supported", ife); throw new UnsupportedOperationException("Only .xlsx Excel 2007 OOXML files are supported", ife); } catch (OpenXML4JException | SAXException e) { getLogger().error("Error occurred while processing Excel document metadata", e); } } }); session.transfer(flowFile, ORIGINAL); } catch (RuntimeException ex) { getLogger().error("Failed to process incoming Excel document. " + ex.getMessage(), ex); FlowFile failedFlowFile = session.putAttribute(flowFile, ConvertExcelToCSVProcessor.class.getName() + ".error", ex.getMessage()); session.transfer(failedFlowFile, FAILURE); } } /** * Handles an individual Excel sheet from the entire Excel document. Each sheet will result in an individual flowfile. * * @param session * The NiFi ProcessSession instance for the current invocation. */ private void handleExcelSheet(ProcessSession session, FlowFile originalParentFF, final InputStream sheetInputStream, ExcelSheetReadConfig readConfig, CSVFormat csvFormat) throws IOException { FlowFile ff = session.create(originalParentFF); try { final DataFormatter formatter = new DataFormatter(); final InputSource sheetSource = new InputSource(sheetInputStream); final SheetToCSV sheetHandler = new SheetToCSV(readConfig, csvFormat); final XMLReader parser = SAXHelper.newXMLReader(); //If Value Formatting is set to false then don't pass in the styles table. // This will cause the XSSF Handler to return the raw value instead of the formatted one. final StylesTable sst = readConfig.getFormatValues()?readConfig.getStyles():null; final XSSFSheetXMLHandler handler = new XSSFSheetXMLHandler( sst, null, readConfig.getSharedStringsTable(), sheetHandler, formatter, false); parser.setContentHandler(handler); ff = session.write(ff, new OutputStreamCallback() { @Override public void process(OutputStream out) throws IOException { PrintStream outPrint = new PrintStream(out); sheetHandler.setOutput(outPrint); try { parser.parse(sheetSource); sheetInputStream.close(); sheetHandler.close(); outPrint.close(); } catch (SAXException se) { getLogger().error("Error occurred while processing Excel sheet {}", new Object[]{readConfig.getSheetName()}, se); } } }); ff = session.putAttribute(ff, SHEET_NAME, readConfig.getSheetName()); ff = session.putAttribute(ff, ROW_NUM, new Long(sheetHandler.getRowCount()).toString()); if (StringUtils.isNotEmpty(originalParentFF.getAttribute(CoreAttributes.FILENAME.key()))) { ff = session.putAttribute(ff, SOURCE_FILE_NAME, originalParentFF.getAttribute(CoreAttributes.FILENAME.key())); } else { ff = session.putAttribute(ff, SOURCE_FILE_NAME, UNKNOWN_SHEET_NAME); } //Update the CoreAttributes.FILENAME to have the .csv extension now. Also update MIME.TYPE ff = session.putAttribute(ff, CoreAttributes.FILENAME.key(), updateFilenameToCSVExtension(ff.getAttribute(CoreAttributes.UUID.key()), ff.getAttribute(CoreAttributes.FILENAME.key()), readConfig.getSheetName())); ff = session.putAttribute(ff, CoreAttributes.MIME_TYPE.key(), CSV_MIME_TYPE); session.transfer(ff, SUCCESS); } catch (SAXException | ParserConfigurationException saxE) { getLogger().error("Failed to create instance of Parser.", saxE); ff = session.putAttribute(ff, ConvertExcelToCSVProcessor.class.getName() + ".error", saxE.getMessage()); session.transfer(ff, FAILURE); } finally { sheetInputStream.close(); } } /** * Uses the XSSF Event SAX helpers to do most of the work * of parsing the Sheet XML, and outputs the contents * as a (basic) CSV. */ private class SheetToCSV implements XSSFSheetXMLHandler.SheetContentsHandler { private ExcelSheetReadConfig readConfig; CSVFormat csvFormat; private boolean firstCellOfRow; private boolean skipRow; private int currentRow = -1; private int currentCol = -1; private int rowCount = 0; private boolean rowHasValues=false; private int skippedColumns=0; private CSVPrinter printer; private boolean firstRow=false; private ArrayList<Object> fieldValues; public int getRowCount(){ return rowCount; } public void setOutput(PrintStream output){ final OutputStreamWriter streamWriter = new OutputStreamWriter(output); try { printer = new CSVPrinter(streamWriter, csvFormat); } catch (IOException e) { throw new ProcessException("Failed to create CSV Printer.", e); } } public SheetToCSV(ExcelSheetReadConfig readConfig, CSVFormat csvFormat){ this.readConfig = readConfig; this.csvFormat = csvFormat; } @Override public void startRow(int rowNum) { if(rowNum <= readConfig.getOverrideFirstRow()) { skipRow = true; return; } // Prepare for this row skipRow = false; firstCellOfRow = true; firstRow = currentRow==-1; currentRow = rowNum; currentCol = -1; rowHasValues = false; fieldValues = new ArrayList<>(); } @Override public void endRow(int rowNum) { if(skipRow) { return; } if(firstRow){ readConfig.setLastColumn(currentCol); } //if there was no data in this row, don't write it if(!rowHasValues) { return; } // Ensure the correct number of columns int columnsToAdd = (readConfig.getLastColumn() - currentCol) - readConfig.getColumnsToSkip().size(); for (int i=0; i<columnsToAdd; i++) { fieldValues.add(null); } try { printer.printRecord(fieldValues); } catch (IOException e) { e.printStackTrace(); } rowCount++; } @Override public void cell(String cellReference, String formattedValue, XSSFComment comment) { if(skipRow) { return; } // gracefully handle missing CellRef here in a similar way as XSSFCell does if(cellReference == null) { cellReference = new CellAddress(currentRow, currentCol).formatAsString(); } // Did we miss any cells? int thisCol = (new CellReference(cellReference)).getCol(); // Should we skip this //Use the first row of the file to decide on the area of data to export if(firstRow && firstCellOfRow){ readConfig.setFirstRow(currentRow); readConfig.setFirstColumn(thisCol); } //if this cell falls outside our area, or has been explcitely marked as a skipped column, return and don't write it out. if(!firstRow && (thisCol < readConfig.getFirstColumn() || thisCol > readConfig.getLastColumn())){ return; } if(readConfig.getColumnsToSkip().contains(thisCol)){ skippedColumns++; return; } int missedCols = (thisCol - readConfig.getFirstColumn()) - (currentCol - readConfig.getFirstColumn()) - 1; if(firstCellOfRow){ missedCols = (thisCol - readConfig.getFirstColumn()); } missedCols -= skippedColumns; if (firstCellOfRow) { firstCellOfRow = false; } for (int i=0; i<missedCols; i++) { fieldValues.add(null); } currentCol = thisCol; fieldValues.add(formattedValue); rowHasValues = true; skippedColumns = 0; } @Override public void headerFooter(String s, boolean b, String s1) { } public void close() throws IOException { printer.close(); } } /** * Takes the original input filename and updates it by removing the file extension and replacing it with * the .csv extension. * * @param origFileName * Original filename from the input file. * * @return * The new filename with the .csv extension that should be place in the output flowfile's attributes */ private String updateFilenameToCSVExtension(String nifiUUID, String origFileName, String sheetName) { StringBuilder stringBuilder = new StringBuilder(); if (StringUtils.isNotEmpty(origFileName)) { String ext = FilenameUtils.getExtension(origFileName); if (StringUtils.isNotEmpty(ext)) { stringBuilder.append(StringUtils.replace(origFileName, ("." + ext), "")); } else { stringBuilder.append(origFileName); } } else { stringBuilder.append(nifiUUID); } stringBuilder.append("_"); stringBuilder.append(sheetName); stringBuilder.append("."); stringBuilder.append("csv"); return stringBuilder.toString(); } private class ExcelSheetReadConfig { public String getSheetName(){ return sheetName; } public int getFirstColumn(){ return firstColumn; } public void setFirstColumn(int value){ this.firstColumn = value; } public int getLastColumn(){ return lastColumn; } public void setLastColumn(int lastColumn) { this.lastColumn = lastColumn; } public int getOverrideFirstRow(){ return overrideFirstRow; } public boolean getFormatValues() { return formatValues; } public int getFirstRow(){ return firstRow; } public void setFirstRow(int value){ firstRow = value; } public int getLastRow(){ return lastRow; } public void setLastRow(int value){ lastRow = value; } public List<Integer> getColumnsToSkip(){ return columnsToSkip; } public ReadOnlySharedStringsTable getSharedStringsTable(){ return sst; } public StylesTable getStyles(){ return styles; } private int firstColumn; private int lastColumn; private int firstRow; private int lastRow; private int overrideFirstRow; private String sheetName; private boolean formatValues; private ReadOnlySharedStringsTable sst; private StylesTable styles; private List<Integer> columnsToSkip; public ExcelSheetReadConfig(List<Integer> columnsToSkip, int overrideFirstRow, String sheetName, boolean formatValues, ReadOnlySharedStringsTable sst, StylesTable styles){ this.sheetName = sheetName; this.columnsToSkip = columnsToSkip; this.overrideFirstRow = overrideFirstRow; this.formatValues = formatValues; this.sst = sst; this.styles = styles; } } }
/* * Copyright (c) 2012. HappyDroids LLC, All rights reserved. */ package com.happydroids.droidtowers.gui; import com.badlogic.gdx.Gdx; import com.badlogic.gdx.files.FileHandle; import com.badlogic.gdx.graphics.Color; import com.badlogic.gdx.graphics.Texture; import com.badlogic.gdx.graphics.g2d.TextureRegion; import com.badlogic.gdx.scenes.scene2d.Actor; import com.badlogic.gdx.scenes.scene2d.InputEvent; import com.badlogic.gdx.scenes.scene2d.Stage; import com.badlogic.gdx.scenes.scene2d.ui.Image; import com.badlogic.gdx.scenes.scene2d.ui.Table; import com.badlogic.gdx.scenes.scene2d.ui.TextButton; import com.badlogic.gdx.scenes.scene2d.utils.Align; import com.badlogic.gdx.scenes.scene2d.utils.TextureRegionDrawable; import com.badlogic.gdx.utils.Scaling; import com.google.common.collect.Sets; import com.happydroids.droidtowers.TowerConsts; import com.happydroids.droidtowers.gamestate.GameSave; import com.happydroids.droidtowers.gamestate.GameSaveFactory; import com.happydroids.droidtowers.platform.Display; import com.happydroids.droidtowers.scenes.LoadTowerSplashScene; import com.happydroids.droidtowers.scenes.components.SceneManager; import com.happydroids.droidtowers.tasks.WaitForCloudSyncTask; import org.ocpsoft.pretty.time.PrettyTime; import java.util.Date; import java.util.Set; import static java.text.NumberFormat.getNumberInstance; public class LoadTowerWindow extends ScrollableTowerWindow { private static final String TAG = LoadTowerWindow.class.getSimpleName(); private boolean foundSaveFile; private final Dialog progressDialog; private final WaitForCloudSyncTask waitForCloudSyncTask; private Set<Texture> towerImageTextures; public LoadTowerWindow(Stage stage) { super("Load a Tower", stage); towerImageTextures = Sets.newHashSet(); progressDialog = new ProgressDialog() .setMessage("looking for towers") .hideButtons(true); waitForCloudSyncTask = new WaitForCloudSyncTask(this); waitForCloudSyncTask.run(); setDismissCallback(new Runnable() { @Override public void run() { progressDialog.dismiss(); waitForCloudSyncTask.cancel(); } }); } public void buildGameSaveList() { FileHandle storage = Gdx.files.external(TowerConsts.GAME_SAVE_DIRECTORY); FileHandle[] files = storage.list(".json"); if (files != null && files.length > 0) { for (FileHandle file : files) { Table fileRow = makeGameFileRow(file); if (fileRow != null) { row().fillX(); add(fileRow).expandX(); foundSaveFile = true; } } } if (!foundSaveFile) { add(FontManager.RobotoBold18.makeLabel("No saved games were found on this device.")); } else { shoveContentUp(); } progressDialog.dismiss(); } private Table makeGameFileRow(final FileHandle gameSaveFile) { GameSave towerData; try { towerData = GameSaveFactory.readMetadata(gameSaveFile.read()); } catch (Exception e) { Gdx.app.log(TAG, "Failed to parse file.", e); return null; } FileHandle imageFile = Gdx.files.external(TowerConsts.GAME_SAVE_DIRECTORY + gameSaveFile.name() + ".png"); Actor imageActor = null; if (imageFile.exists()) { try { imageActor = new Image(loadTowerImage(imageFile), Scaling.fit, Align.top); } catch (Exception ignored) { imageActor = null; } } if (imageActor == null) { imageActor = FontManager.Default.makeLabel("No image."); } Table fileRow = new Table(); fileRow.defaults().fillX().pad(Display.devicePixel(10)).space(Display.devicePixel(10)); fileRow.row(); fileRow.add(imageActor).width(Display.devicePixel(64)).height(Display.devicePixel(64)).center(); fileRow.add(makeGameFileInfoBox(fileRow, gameSaveFile, towerData)).expandX().top(); fileRow.row().fillX(); fileRow.add(new HorizontalRule(Color.DARK_GRAY, 2)).colspan(2); return fileRow; } private TextureRegionDrawable loadTowerImage(FileHandle imageFile) { Texture texture = new Texture(imageFile); towerImageTextures.add(texture); return new TextureRegionDrawable(new TextureRegion(texture)); } private Table makeGameFileInfoBox(final Table fileRow, final FileHandle savedGameFile, GameSave towerData) { TextButton launchButton = FontManager.RobotoBold18.makeTextButton("Play"); launchButton.addListener(new VibrateClickListener() { @Override public void onClick(InputEvent event, float x, float y) { dismiss(); try { SceneManager.changeScene(LoadTowerSplashScene.class, GameSaveFactory.readFile(savedGameFile)); } catch (Exception e) { throw new RuntimeException(e); } } }); TextButton deleteButton = FontManager.RobotoBold18.makeTextButton("Delete"); deleteButton.addListener(new VibrateClickListener() { @Override public void onClick(InputEvent event, float x, float y) { new Dialog().setTitle("Are you sure you want to delete this Tower?") .setMessage("If you delete this tower, it will disappear forever.\n\nAre you sure?") .addButton("Yes, delete it", new OnClickCallback() { @Override public void onClick(Dialog dialog) { savedGameFile.delete(); content.getCell(fileRow).ignore(); content.removeActor(fileRow); content.invalidate(); dialog.dismiss(); } }) .addButton("Keep it!", new OnClickCallback() { @Override public void onClick(Dialog dialog) { dialog.dismiss(); } }) .show(); } }); Table metadata = new Table(); metadata.defaults().top().left().fillX(); addLabelRow(metadata, towerData.getTowerName(), FontManager.RobotoBold18, Color.WHITE); addLabelRow(metadata, "Population: " + getNumberInstance().format(towerData.getPlayer() .getTotalPopulation()), FontManager.Default, Color.GRAY); Date lastPlayed = towerData.getMetadata().lastPlayed; if (lastPlayed != null) { PrettyTime prettyTime = new PrettyTime(); addLabelRow(metadata, "Last played: " + prettyTime.format(lastPlayed), FontManager.Default, Color.GRAY); } Table box = new Table(); box.defaults().fillX().space(Display.devicePixel(5)); box.row().top().left().fillX(); box.add(metadata).top().left().expandX(); box.add(deleteButton).width(Display.devicePixel(80)); box.add(launchButton).width(Display.devicePixel(80)); return box; } private void addLabelRow(Table table, String content, FontHelper font, Color fontColor) { table.row().fillX(); table.add(font.makeLabel(content, fontColor)).expandX(); } @Override public TowerWindow show() { super.show(); progressDialog.show(); progressDialog.clearActions(); progressDialog.getColor().a = 1f; return this; } @Override public void dismiss() { super.dismiss(); for (Texture texture : towerImageTextures) { try { texture.dispose(); } catch (Exception ignored) { } } towerImageTextures.clear(); } }
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.skyframe; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.devtools.build.lib.util.OS; import com.google.devtools.build.lib.vfs.ModifiedFileSet; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.build.lib.vfs.Root; import com.google.devtools.common.options.Option; import com.google.devtools.common.options.OptionDocumentationCategory; import com.google.devtools.common.options.OptionEffectTag; import com.google.devtools.common.options.OptionsBase; import java.io.IOException; import java.nio.file.FileSystems; import java.nio.file.Path; import java.util.Set; /** * File system watcher for local filesystems. It's able to provide a list of changed files between * two consecutive calls. On Linux, uses the standard Java WatchService, which uses 'inotify' and, * on OS X, uses {@link MacOSXFsEventsDiffAwareness}, which use FSEvents. * * <p> * This is an abstract class, specialized by {@link MacOSXFsEventsDiffAwareness} and * {@link WatchServiceDiffAwareness}. */ public abstract class LocalDiffAwareness implements DiffAwareness { /** * Option to enable / disable local diff awareness. */ public static final class Options extends OptionsBase { @Option( name = "watchfs", defaultValue = "false", documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, effectTags = {OptionEffectTag.UNKNOWN}, help = "On Linux/macOS: If true, %{product} tries to use the operating system's file watch " + "service for local changes instead of scanning every file for a change. On " + "Windows: this flag currently is a non-op but can be enabled in conjunction " + "with --experimental_windows_watchfs. On any OS: The behavior is undefined " + "if your workspace is on a network file system, and files are edited on a " + "remote machine.") public boolean watchFS; @Option( name = "experimental_windows_watchfs", defaultValue = "false", documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, effectTags = {OptionEffectTag.UNKNOWN}, help = "If true, experimental Windows support for --watchfs is enabled. Otherwise --watchfs" + "is a non-op on Windows. Make sure to also enable --watchfs.") public boolean windowsWatchFS; } /** Factory for creating {@link LocalDiffAwareness} instances. */ public static class Factory implements DiffAwareness.Factory { private final ImmutableList<String> excludedNetworkFileSystemsPrefixes; /** * Creates a new factory; the file system watcher may not work on all file systems, particularly * for network file systems. The prefix list can be used to exclude known paths that point to * network file systems. */ public Factory(ImmutableList<String> excludedNetworkFileSystemsPrefixes) { this.excludedNetworkFileSystemsPrefixes = excludedNetworkFileSystemsPrefixes; } @Override public DiffAwareness maybeCreate(Root pathEntry) { com.google.devtools.build.lib.vfs.Path resolvedPathEntry; try { resolvedPathEntry = pathEntry.asPath().resolveSymbolicLinks(); } catch (IOException e) { return null; } PathFragment resolvedPathEntryFragment = resolvedPathEntry.asFragment(); // There's no good way to automatically detect network file systems. We rely on a list of // paths to exclude for now (and maybe add a command-line option in the future?). for (String prefix : excludedNetworkFileSystemsPrefixes) { if (resolvedPathEntryFragment.startsWith(PathFragment.create(prefix))) { return null; } } // On OSX uses FsEvents due to https://bugs.openjdk.java.net/browse/JDK-7133447 if (OS.getCurrent() == OS.DARWIN) { return new MacOSXFsEventsDiffAwareness(resolvedPathEntryFragment.toString()); } return new WatchServiceDiffAwareness(resolvedPathEntryFragment.toString()); } } /** * A view that results in any subsequent getDiff calls returning * {@link ModifiedFileSet#EVERYTHING_MODIFIED}. Use this if --watchFs is disabled. * * <p>The position is set to -2 in order for {@link #areInSequence} below to always return false * if this view is passed to it. Any negative number would work; we don't use -1 as the other * view may have a position of 0. */ protected static final View EVERYTHING_MODIFIED = new SequentialView(/*owner=*/null, /*position=*/-2, ImmutableSet.<Path>of()); public static boolean areInSequence(SequentialView oldView, SequentialView newView) { // Keep this in sync with the EVERYTHING_MODIFIED View above. return oldView.owner == newView.owner && (oldView.position + 1) == newView.position; } private int numGetCurrentViewCalls = 0; /** Root directory to watch. This is an absolute path. */ protected final Path watchRootPath; protected LocalDiffAwareness(String watchRoot) { this.watchRootPath = FileSystems.getDefault().getPath(watchRoot); } /** * The WatchService is inherently sequential and side-effectful, so we enforce this by only * supporting {@link #getDiff} calls that happen to be sequential. */ @VisibleForTesting static class SequentialView implements DiffAwareness.View { private final LocalDiffAwareness owner; private final int position; private final Set<Path> modifiedAbsolutePaths; public SequentialView(LocalDiffAwareness owner, int position, Set<Path> modifiedAbsolutePaths) { this.owner = owner; this.position = position; this.modifiedAbsolutePaths = modifiedAbsolutePaths; } @Override public String toString() { return String.format("SequentialView[owner=%s, position=%d, modifiedAbsolutePaths=%s]", owner, position, modifiedAbsolutePaths); } } /** * Returns true on any call before first call to {@link #newView}. */ protected boolean isFirstCall() { return numGetCurrentViewCalls == 0; } /** * Create a new views using a list of modified absolute paths. This will increase the view * counter. */ protected SequentialView newView(Set<Path> modifiedAbsolutePaths) { numGetCurrentViewCalls++; return new SequentialView(this, numGetCurrentViewCalls, modifiedAbsolutePaths); } @Override public ModifiedFileSet getDiff(View oldView, View newView) throws IncompatibleViewException, BrokenDiffAwarenessException { SequentialView oldSequentialView; SequentialView newSequentialView; try { oldSequentialView = (SequentialView) oldView; newSequentialView = (SequentialView) newView; } catch (ClassCastException e) { throw new IncompatibleViewException("Given views are not from LocalDiffAwareness"); } if (!areInSequence(oldSequentialView, newSequentialView)) { return ModifiedFileSet.EVERYTHING_MODIFIED; } ModifiedFileSet.Builder resultBuilder = ModifiedFileSet.builder(); for (Path modifiedPath : newSequentialView.modifiedAbsolutePaths) { if (!modifiedPath.startsWith(watchRootPath)) { throw new BrokenDiffAwarenessException( String.format("%s is not under %s", modifiedPath, watchRootPath)); } PathFragment relativePath = PathFragment.create(watchRootPath.relativize(modifiedPath).toString()); if (!relativePath.isEmpty()) { resultBuilder.modify(relativePath); } } return resultBuilder.build(); } @Override public String name() { return "local"; } }
/* * Copyright (C) Lightbend Inc. <https://www.lightbend.com> */ package javaguide.i18n; import org.junit.Test; import static java.util.stream.Collectors.joining; import static org.hamcrest.CoreMatchers.*; import static org.junit.Assert.*; import javaguide.testhelpers.MockJavaAction; import javaguide.testhelpers.MockJavaActionHelper; import javaguide.i18n.html.hellotemplate; import javaguide.i18n.html.hellotemplateshort; import play.Application; import play.core.j.JavaHandlerComponents; import play.mvc.Http; import play.mvc.Result; import play.test.WithApplication; import static play.test.Helpers.*; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import play.i18n.Lang; import play.i18n.Messages; import play.i18n.MessagesApi; import java.util.*; public class JavaI18N extends WithApplication { @Override public Application provideApplication() { return fakeApplication( ImmutableMap.of( "play.i18n.langs", ImmutableList.of("en", "en-US", "fr"), "messages.path", "javaguide/i18n")); } @Test public void checkSpecifyLangHello() { MessagesApi messagesApi = instanceOf(MessagesApi.class); // #specify-lang-render String title = messagesApi.get(Lang.forCode("fr"), "hello"); // #specify-lang-render assertTrue(title.equals("bonjour")); } @Test public void checkDefaultHello() { Result result = MockJavaActionHelper.call( new DefaultLangController( instanceOf(JavaHandlerComponents.class), instanceOf(MessagesApi.class)), fakeRequest("GET", "/"), mat); assertThat(contentAsString(result), containsString("hello")); } public static class DefaultLangController extends MockJavaAction { private final MessagesApi messagesApi; DefaultLangController(JavaHandlerComponents javaHandlerComponents, MessagesApi messagesApi) { super(javaHandlerComponents); this.messagesApi = messagesApi; } // #default-lang-render public Result index(Http.Request request) { Messages messages = this.messagesApi.preferred(request); return ok(hellotemplate.render(messages)); } // #default-lang-render } @Test public void checkDefaultScalaHello() { Result result = MockJavaActionHelper.call( new DefaultScalaLangController( instanceOf(JavaHandlerComponents.class), instanceOf(MessagesApi.class)), fakeRequest("GET", "/"), mat); assertThat(contentAsString(result), containsString("hello")); } public static class DefaultScalaLangController extends MockJavaAction { private final MessagesApi messagesApi; DefaultScalaLangController( JavaHandlerComponents javaHandlerComponents, MessagesApi messagesApi) { super(javaHandlerComponents); this.messagesApi = messagesApi; } public Result index(Http.Request request) { Messages messages = this.messagesApi.preferred(request); return ok(hellotemplateshort.render(messages)); // "hello" } } @Test public void checkChangeLangHello() { Result result = MockJavaActionHelper.call( new ChangeLangController( instanceOf(JavaHandlerComponents.class), instanceOf(MessagesApi.class)), fakeRequest("GET", "/"), mat); assertThat(contentAsString(result), containsString("bonjour")); } @Test public void checkRequestMessages() { RequestMessagesController c = app.injector().instanceOf(RequestMessagesController.class); Result result = MockJavaActionHelper.call(c, fakeRequest("GET", "/"), mat); assertThat(contentAsString(result), containsString("hello")); } public static class ChangeLangController extends MockJavaAction { private final MessagesApi messagesApi; ChangeLangController(JavaHandlerComponents javaHandlerComponents, MessagesApi messagesApi) { super(javaHandlerComponents); this.messagesApi = messagesApi; } // #change-lang-render public Result index(Http.Request request) { Lang lang = Lang.forCode("fr"); Messages messages = this.messagesApi.preferred(request.withTransientLang(lang)); return ok(hellotemplate.render(messages)).withLang(lang, messagesApi); } // #change-lang-render } public static class RequestMessagesController extends MockJavaAction { @javax.inject.Inject public RequestMessagesController(JavaHandlerComponents javaHandlerComponents) { super(javaHandlerComponents); } @javax.inject.Inject private MessagesApi messagesApi; // #show-request-messages public Result index(Http.Request request) { Messages messages = this.messagesApi.preferred(request); String hello = messages.at("hello"); return ok(hellotemplate.render(messages)); } // #show-request-messages } @Test public void checkSetTransientLangHello() { Result result = MockJavaActionHelper.call( new SetTransientLangController( instanceOf(JavaHandlerComponents.class), instanceOf(MessagesApi.class)), fakeRequest("GET", "/"), mat); assertThat(contentAsString(result), containsString("howdy")); } public static class SetTransientLangController extends MockJavaAction { private final MessagesApi messagesApi; SetTransientLangController( JavaHandlerComponents javaHandlerComponents, MessagesApi messagesApi) { super(javaHandlerComponents); this.messagesApi = messagesApi; } // #set-transient-lang-render public Result index(Http.Request request) { Lang lang = Lang.forCode("en-US"); Messages messages = this.messagesApi.preferred(request.withTransientLang(lang)); return ok(hellotemplate.render(messages)); } // #set-transient-lang-render } @Test public void testAcceptedLanguages() { Result result = MockJavaActionHelper.call( new AcceptedLanguageController(instanceOf(JavaHandlerComponents.class)), fakeRequest("GET", "/") .header("Accept-Language", "fr-CH, fr;q=0.9, en;q=0.8, de;q=0.7, *;q=0.5"), mat); assertThat(contentAsString(result), equalTo("fr-CH,fr,en,de")); } private static final class AcceptedLanguageController extends MockJavaAction { AcceptedLanguageController(JavaHandlerComponents javaHandlerComponents) { super(javaHandlerComponents); } // #accepted-languages public Result index(Http.Request request) { List<Lang> langs = request.acceptLanguages(); String codes = langs.stream().map(Lang::code).collect(joining(",")); return ok(codes); } // #accepted-languages } @Test public void testSingleApostrophe() { assertTrue(singleApostrophe()); } private Boolean singleApostrophe() { MessagesApi messagesApi = app.injector().instanceOf(MessagesApi.class); Collection<Lang> candidates = Collections.singletonList(new Lang(Locale.US)); Messages messages = messagesApi.preferred(candidates); // #single-apostrophe String errorMessage = messages.at("info.error"); Boolean areEqual = errorMessage.equals("You aren't logged in!"); // #single-apostrophe return areEqual; } @Test public void testEscapedParameters() { assertTrue(escapedParameters()); } private Boolean escapedParameters() { MessagesApi messagesApi = app.injector().instanceOf(MessagesApi.class); Collection<Lang> candidates = Collections.singletonList(new Lang(Locale.US)); Messages messages = messagesApi.preferred(candidates); // #parameter-escaping String errorMessage = messages.at("example.formatting"); Boolean areEqual = errorMessage.equals( "When using MessageFormat, '{0}' is replaced with the first parameter."); // #parameter-escaping return areEqual; } // #explicit-messages-api private MessagesApi explicitMessagesApi() { return new play.i18n.MessagesApi( new play.api.i18n.DefaultMessagesApi( Collections.singletonMap( Lang.defaultLang().code(), Collections.singletonMap("foo", "bar")), new play.api.i18n.DefaultLangs().asJava())); } // #explicit-messages-api @Test public void testExplicitMessagesApi() { MessagesApi messagesApi = explicitMessagesApi(); String message = messagesApi.get(Lang.defaultLang(), "foo"); assertThat(message, equalTo("bar")); } }
/** * <copyright> * </copyright> * * $Id$ */ package org.wso2.developerstudio.eclipse.gmf.esb.impl; import java.util.Collection; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.notify.NotificationChain; import org.eclipse.emf.common.util.EList; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.InternalEObject; import org.eclipse.emf.ecore.impl.ENotificationImpl; import org.eclipse.emf.ecore.util.EObjectContainmentEList; import org.eclipse.emf.ecore.util.InternalEList; import org.wso2.developerstudio.eclipse.gmf.esb.EsbPackage; import org.wso2.developerstudio.eclipse.gmf.esb.RegistryKeyProperty; import org.wso2.developerstudio.eclipse.gmf.esb.ThrottleContainer; import org.wso2.developerstudio.eclipse.gmf.esb.ThrottleMediator; import org.wso2.developerstudio.eclipse.gmf.esb.ThrottleMediatorInputConnector; import org.wso2.developerstudio.eclipse.gmf.esb.ThrottleMediatorOnAcceptOutputConnector; import org.wso2.developerstudio.eclipse.gmf.esb.ThrottleMediatorOnRejectOutputConnector; import org.wso2.developerstudio.eclipse.gmf.esb.ThrottleMediatorOutputConnector; import org.wso2.developerstudio.eclipse.gmf.esb.ThrottleOnAcceptBranch; import org.wso2.developerstudio.eclipse.gmf.esb.ThrottleOnRejectBranch; import org.wso2.developerstudio.eclipse.gmf.esb.ThrottlePolicyConfiguration; import org.wso2.developerstudio.eclipse.gmf.esb.ThrottlePolicyEntry; import org.wso2.developerstudio.eclipse.gmf.esb.ThrottlePolicyType; import org.wso2.developerstudio.eclipse.gmf.esb.ThrottleSequenceType; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>Throttle Mediator</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * </p> * <ul> * <li>{@link org.wso2.developerstudio.eclipse.gmf.esb.impl.ThrottleMediatorImpl#getGroupId <em>Group Id</em>}</li> * <li>{@link org.wso2.developerstudio.eclipse.gmf.esb.impl.ThrottleMediatorImpl#getPolicyType <em>Policy Type</em>}</li> * <li>{@link org.wso2.developerstudio.eclipse.gmf.esb.impl.ThrottleMediatorImpl#getPolicyKey <em>Policy Key</em>}</li> * <li>{@link org.wso2.developerstudio.eclipse.gmf.esb.impl.ThrottleMediatorImpl#getMaxConcurrentAccessCount <em>Max Concurrent Access Count</em>}</li> * <li>{@link org.wso2.developerstudio.eclipse.gmf.esb.impl.ThrottleMediatorImpl#getPolicyEntries <em>Policy Entries</em>}</li> * <li>{@link org.wso2.developerstudio.eclipse.gmf.esb.impl.ThrottleMediatorImpl#getPolicyConfiguration <em>Policy Configuration</em>}</li> * <li>{@link org.wso2.developerstudio.eclipse.gmf.esb.impl.ThrottleMediatorImpl#getOnAcceptBranch <em>On Accept Branch</em>}</li> * <li>{@link org.wso2.developerstudio.eclipse.gmf.esb.impl.ThrottleMediatorImpl#getOnRejectBranch <em>On Reject Branch</em>}</li> * <li>{@link org.wso2.developerstudio.eclipse.gmf.esb.impl.ThrottleMediatorImpl#getInputConnector <em>Input Connector</em>}</li> * <li>{@link org.wso2.developerstudio.eclipse.gmf.esb.impl.ThrottleMediatorImpl#getOutputConnector <em>Output Connector</em>}</li> * <li>{@link org.wso2.developerstudio.eclipse.gmf.esb.impl.ThrottleMediatorImpl#getOnAcceptOutputConnector <em>On Accept Output Connector</em>}</li> * <li>{@link org.wso2.developerstudio.eclipse.gmf.esb.impl.ThrottleMediatorImpl#getOnRejectOutputConnector <em>On Reject Output Connector</em>}</li> * <li>{@link org.wso2.developerstudio.eclipse.gmf.esb.impl.ThrottleMediatorImpl#getThrottleContainer <em>Throttle Container</em>}</li> * <li>{@link org.wso2.developerstudio.eclipse.gmf.esb.impl.ThrottleMediatorImpl#getOnAcceptBranchsequenceType <em>On Accept Branchsequence Type</em>}</li> * <li>{@link org.wso2.developerstudio.eclipse.gmf.esb.impl.ThrottleMediatorImpl#getOnAcceptBranchsequenceKey <em>On Accept Branchsequence Key</em>}</li> * <li>{@link org.wso2.developerstudio.eclipse.gmf.esb.impl.ThrottleMediatorImpl#getOnRejectBranchsequenceType <em>On Reject Branchsequence Type</em>}</li> * <li>{@link org.wso2.developerstudio.eclipse.gmf.esb.impl.ThrottleMediatorImpl#getOnRejectBranchsequenceKey <em>On Reject Branchsequence Key</em>}</li> * </ul> * * @generated */ public class ThrottleMediatorImpl extends MediatorImpl implements ThrottleMediator { /** * The default value of the '{@link #getGroupId() <em>Group Id</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getGroupId() * @generated * @ordered */ protected static final String GROUP_ID_EDEFAULT = "group_id"; /** * The cached value of the '{@link #getGroupId() <em>Group Id</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getGroupId() * @generated * @ordered */ protected String groupId = GROUP_ID_EDEFAULT; /** * The default value of the '{@link #getPolicyType() <em>Policy Type</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getPolicyType() * @generated * @ordered */ protected static final ThrottlePolicyType POLICY_TYPE_EDEFAULT = ThrottlePolicyType.INLINE; /** * The cached value of the '{@link #getPolicyType() <em>Policy Type</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getPolicyType() * @generated * @ordered */ protected ThrottlePolicyType policyType = POLICY_TYPE_EDEFAULT; /** * The cached value of the '{@link #getPolicyKey() <em>Policy Key</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getPolicyKey() * @generated * @ordered */ protected RegistryKeyProperty policyKey; /** * The default value of the '{@link #getMaxConcurrentAccessCount() <em>Max Concurrent Access Count</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getMaxConcurrentAccessCount() * @generated * @ordered */ protected static final int MAX_CONCURRENT_ACCESS_COUNT_EDEFAULT = 0; /** * The cached value of the '{@link #getMaxConcurrentAccessCount() <em>Max Concurrent Access Count</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getMaxConcurrentAccessCount() * @generated * @ordered */ protected int maxConcurrentAccessCount = MAX_CONCURRENT_ACCESS_COUNT_EDEFAULT; /** * The cached value of the '{@link #getPolicyEntries() <em>Policy Entries</em>}' containment reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getPolicyEntries() * @generated * @ordered */ protected EList<ThrottlePolicyEntry> policyEntries; /** * The cached value of the '{@link #getPolicyConfiguration() <em>Policy Configuration</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getPolicyConfiguration() * @generated * @ordered */ protected ThrottlePolicyConfiguration policyConfiguration; /** * The cached value of the '{@link #getOnAcceptBranch() <em>On Accept Branch</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getOnAcceptBranch() * @generated * @ordered */ protected ThrottleOnAcceptBranch onAcceptBranch; /** * The cached value of the '{@link #getOnRejectBranch() <em>On Reject Branch</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getOnRejectBranch() * @generated * @ordered */ protected ThrottleOnRejectBranch onRejectBranch; /** * The cached value of the '{@link #getInputConnector() <em>Input Connector</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getInputConnector() * @generated * @ordered */ protected ThrottleMediatorInputConnector inputConnector; /** * The cached value of the '{@link #getOutputConnector() <em>Output Connector</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getOutputConnector() * @generated * @ordered */ protected ThrottleMediatorOutputConnector outputConnector; /** * The cached value of the '{@link #getOnAcceptOutputConnector() <em>On Accept Output Connector</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getOnAcceptOutputConnector() * @generated * @ordered */ protected ThrottleMediatorOnAcceptOutputConnector onAcceptOutputConnector; /** * The cached value of the '{@link #getOnRejectOutputConnector() <em>On Reject Output Connector</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getOnRejectOutputConnector() * @generated * @ordered */ protected ThrottleMediatorOnRejectOutputConnector onRejectOutputConnector; /** * The cached value of the '{@link #getThrottleContainer() <em>Throttle Container</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getThrottleContainer() * @generated * @ordered */ protected ThrottleContainer throttleContainer; /** * The default value of the '{@link #getOnAcceptBranchsequenceType() <em>On Accept Branchsequence Type</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getOnAcceptBranchsequenceType() * @generated * @ordered */ protected static final ThrottleSequenceType ON_ACCEPT_BRANCHSEQUENCE_TYPE_EDEFAULT = ThrottleSequenceType.ANONYMOUS; /** * The cached value of the '{@link #getOnAcceptBranchsequenceType() <em>On Accept Branchsequence Type</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getOnAcceptBranchsequenceType() * @generated * @ordered */ protected ThrottleSequenceType onAcceptBranchsequenceType = ON_ACCEPT_BRANCHSEQUENCE_TYPE_EDEFAULT; /** * The cached value of the '{@link #getOnAcceptBranchsequenceKey() <em>On Accept Branchsequence Key</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getOnAcceptBranchsequenceKey() * @generated * @ordered */ protected RegistryKeyProperty onAcceptBranchsequenceKey; /** * The default value of the '{@link #getOnRejectBranchsequenceType() <em>On Reject Branchsequence Type</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getOnRejectBranchsequenceType() * @generated * @ordered */ protected static final ThrottleSequenceType ON_REJECT_BRANCHSEQUENCE_TYPE_EDEFAULT = ThrottleSequenceType.ANONYMOUS; /** * The cached value of the '{@link #getOnRejectBranchsequenceType() <em>On Reject Branchsequence Type</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getOnRejectBranchsequenceType() * @generated * @ordered */ protected ThrottleSequenceType onRejectBranchsequenceType = ON_REJECT_BRANCHSEQUENCE_TYPE_EDEFAULT; /** * The cached value of the '{@link #getOnRejectBranchsequenceKey() <em>On Reject Branchsequence Key</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getOnRejectBranchsequenceKey() * @generated * @ordered */ protected RegistryKeyProperty onRejectBranchsequenceKey; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated NOT */ protected ThrottleMediatorImpl() { super(); RegistryKeyProperty policyKey = EsbFactoryImpl.eINSTANCE .createRegistryKeyProperty(); policyKey.setKeyValue("default/key"); setPolicyKey(policyKey); RegistryKeyProperty onAcceptKey = EsbFactoryImpl.eINSTANCE .createRegistryKeyProperty(); onAcceptKey.setKeyValue("default/key"); setOnAcceptBranchsequenceKey(onAcceptKey); RegistryKeyProperty onRejectKey = EsbFactoryImpl.eINSTANCE .createRegistryKeyProperty(); onRejectKey.setKeyValue("default/key"); setOnRejectBranchsequenceKey(onRejectKey); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return EsbPackage.Literals.THROTTLE_MEDIATOR; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getGroupId() { return groupId; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setGroupId(String newGroupId) { String oldGroupId = groupId; groupId = newGroupId; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, EsbPackage.THROTTLE_MEDIATOR__GROUP_ID, oldGroupId, groupId)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ThrottlePolicyType getPolicyType() { return policyType; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setPolicyType(ThrottlePolicyType newPolicyType) { ThrottlePolicyType oldPolicyType = policyType; policyType = newPolicyType == null ? POLICY_TYPE_EDEFAULT : newPolicyType; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, EsbPackage.THROTTLE_MEDIATOR__POLICY_TYPE, oldPolicyType, policyType)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public RegistryKeyProperty getPolicyKey() { return policyKey; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetPolicyKey(RegistryKeyProperty newPolicyKey, NotificationChain msgs) { RegistryKeyProperty oldPolicyKey = policyKey; policyKey = newPolicyKey; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, EsbPackage.THROTTLE_MEDIATOR__POLICY_KEY, oldPolicyKey, newPolicyKey); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setPolicyKey(RegistryKeyProperty newPolicyKey) { if (newPolicyKey != policyKey) { NotificationChain msgs = null; if (policyKey != null) msgs = ((InternalEObject)policyKey).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - EsbPackage.THROTTLE_MEDIATOR__POLICY_KEY, null, msgs); if (newPolicyKey != null) msgs = ((InternalEObject)newPolicyKey).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - EsbPackage.THROTTLE_MEDIATOR__POLICY_KEY, null, msgs); msgs = basicSetPolicyKey(newPolicyKey, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, EsbPackage.THROTTLE_MEDIATOR__POLICY_KEY, newPolicyKey, newPolicyKey)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public int getMaxConcurrentAccessCount() { return maxConcurrentAccessCount; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setMaxConcurrentAccessCount(int newMaxConcurrentAccessCount) { int oldMaxConcurrentAccessCount = maxConcurrentAccessCount; maxConcurrentAccessCount = newMaxConcurrentAccessCount; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, EsbPackage.THROTTLE_MEDIATOR__MAX_CONCURRENT_ACCESS_COUNT, oldMaxConcurrentAccessCount, maxConcurrentAccessCount)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EList<ThrottlePolicyEntry> getPolicyEntries() { if (policyEntries == null) { policyEntries = new EObjectContainmentEList<ThrottlePolicyEntry>(ThrottlePolicyEntry.class, this, EsbPackage.THROTTLE_MEDIATOR__POLICY_ENTRIES); } return policyEntries; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ThrottlePolicyConfiguration getPolicyConfiguration() { return policyConfiguration; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetPolicyConfiguration(ThrottlePolicyConfiguration newPolicyConfiguration, NotificationChain msgs) { ThrottlePolicyConfiguration oldPolicyConfiguration = policyConfiguration; policyConfiguration = newPolicyConfiguration; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, EsbPackage.THROTTLE_MEDIATOR__POLICY_CONFIGURATION, oldPolicyConfiguration, newPolicyConfiguration); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setPolicyConfiguration(ThrottlePolicyConfiguration newPolicyConfiguration) { if (newPolicyConfiguration != policyConfiguration) { NotificationChain msgs = null; if (policyConfiguration != null) msgs = ((InternalEObject)policyConfiguration).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - EsbPackage.THROTTLE_MEDIATOR__POLICY_CONFIGURATION, null, msgs); if (newPolicyConfiguration != null) msgs = ((InternalEObject)newPolicyConfiguration).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - EsbPackage.THROTTLE_MEDIATOR__POLICY_CONFIGURATION, null, msgs); msgs = basicSetPolicyConfiguration(newPolicyConfiguration, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, EsbPackage.THROTTLE_MEDIATOR__POLICY_CONFIGURATION, newPolicyConfiguration, newPolicyConfiguration)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ThrottleOnAcceptBranch getOnAcceptBranch() { return onAcceptBranch; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetOnAcceptBranch(ThrottleOnAcceptBranch newOnAcceptBranch, NotificationChain msgs) { ThrottleOnAcceptBranch oldOnAcceptBranch = onAcceptBranch; onAcceptBranch = newOnAcceptBranch; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, EsbPackage.THROTTLE_MEDIATOR__ON_ACCEPT_BRANCH, oldOnAcceptBranch, newOnAcceptBranch); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setOnAcceptBranch(ThrottleOnAcceptBranch newOnAcceptBranch) { if (newOnAcceptBranch != onAcceptBranch) { NotificationChain msgs = null; if (onAcceptBranch != null) msgs = ((InternalEObject)onAcceptBranch).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - EsbPackage.THROTTLE_MEDIATOR__ON_ACCEPT_BRANCH, null, msgs); if (newOnAcceptBranch != null) msgs = ((InternalEObject)newOnAcceptBranch).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - EsbPackage.THROTTLE_MEDIATOR__ON_ACCEPT_BRANCH, null, msgs); msgs = basicSetOnAcceptBranch(newOnAcceptBranch, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, EsbPackage.THROTTLE_MEDIATOR__ON_ACCEPT_BRANCH, newOnAcceptBranch, newOnAcceptBranch)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ThrottleOnRejectBranch getOnRejectBranch() { return onRejectBranch; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetOnRejectBranch(ThrottleOnRejectBranch newOnRejectBranch, NotificationChain msgs) { ThrottleOnRejectBranch oldOnRejectBranch = onRejectBranch; onRejectBranch = newOnRejectBranch; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, EsbPackage.THROTTLE_MEDIATOR__ON_REJECT_BRANCH, oldOnRejectBranch, newOnRejectBranch); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setOnRejectBranch(ThrottleOnRejectBranch newOnRejectBranch) { if (newOnRejectBranch != onRejectBranch) { NotificationChain msgs = null; if (onRejectBranch != null) msgs = ((InternalEObject)onRejectBranch).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - EsbPackage.THROTTLE_MEDIATOR__ON_REJECT_BRANCH, null, msgs); if (newOnRejectBranch != null) msgs = ((InternalEObject)newOnRejectBranch).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - EsbPackage.THROTTLE_MEDIATOR__ON_REJECT_BRANCH, null, msgs); msgs = basicSetOnRejectBranch(newOnRejectBranch, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, EsbPackage.THROTTLE_MEDIATOR__ON_REJECT_BRANCH, newOnRejectBranch, newOnRejectBranch)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ThrottleMediatorInputConnector getInputConnector() { return inputConnector; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetInputConnector(ThrottleMediatorInputConnector newInputConnector, NotificationChain msgs) { ThrottleMediatorInputConnector oldInputConnector = inputConnector; inputConnector = newInputConnector; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, EsbPackage.THROTTLE_MEDIATOR__INPUT_CONNECTOR, oldInputConnector, newInputConnector); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setInputConnector(ThrottleMediatorInputConnector newInputConnector) { if (newInputConnector != inputConnector) { NotificationChain msgs = null; if (inputConnector != null) msgs = ((InternalEObject)inputConnector).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - EsbPackage.THROTTLE_MEDIATOR__INPUT_CONNECTOR, null, msgs); if (newInputConnector != null) msgs = ((InternalEObject)newInputConnector).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - EsbPackage.THROTTLE_MEDIATOR__INPUT_CONNECTOR, null, msgs); msgs = basicSetInputConnector(newInputConnector, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, EsbPackage.THROTTLE_MEDIATOR__INPUT_CONNECTOR, newInputConnector, newInputConnector)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ThrottleMediatorOutputConnector getOutputConnector() { return outputConnector; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetOutputConnector(ThrottleMediatorOutputConnector newOutputConnector, NotificationChain msgs) { ThrottleMediatorOutputConnector oldOutputConnector = outputConnector; outputConnector = newOutputConnector; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, EsbPackage.THROTTLE_MEDIATOR__OUTPUT_CONNECTOR, oldOutputConnector, newOutputConnector); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setOutputConnector(ThrottleMediatorOutputConnector newOutputConnector) { if (newOutputConnector != outputConnector) { NotificationChain msgs = null; if (outputConnector != null) msgs = ((InternalEObject)outputConnector).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - EsbPackage.THROTTLE_MEDIATOR__OUTPUT_CONNECTOR, null, msgs); if (newOutputConnector != null) msgs = ((InternalEObject)newOutputConnector).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - EsbPackage.THROTTLE_MEDIATOR__OUTPUT_CONNECTOR, null, msgs); msgs = basicSetOutputConnector(newOutputConnector, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, EsbPackage.THROTTLE_MEDIATOR__OUTPUT_CONNECTOR, newOutputConnector, newOutputConnector)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ThrottleMediatorOnAcceptOutputConnector getOnAcceptOutputConnector() { return onAcceptOutputConnector; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetOnAcceptOutputConnector(ThrottleMediatorOnAcceptOutputConnector newOnAcceptOutputConnector, NotificationChain msgs) { ThrottleMediatorOnAcceptOutputConnector oldOnAcceptOutputConnector = onAcceptOutputConnector; onAcceptOutputConnector = newOnAcceptOutputConnector; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, EsbPackage.THROTTLE_MEDIATOR__ON_ACCEPT_OUTPUT_CONNECTOR, oldOnAcceptOutputConnector, newOnAcceptOutputConnector); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setOnAcceptOutputConnector(ThrottleMediatorOnAcceptOutputConnector newOnAcceptOutputConnector) { if (newOnAcceptOutputConnector != onAcceptOutputConnector) { NotificationChain msgs = null; if (onAcceptOutputConnector != null) msgs = ((InternalEObject)onAcceptOutputConnector).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - EsbPackage.THROTTLE_MEDIATOR__ON_ACCEPT_OUTPUT_CONNECTOR, null, msgs); if (newOnAcceptOutputConnector != null) msgs = ((InternalEObject)newOnAcceptOutputConnector).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - EsbPackage.THROTTLE_MEDIATOR__ON_ACCEPT_OUTPUT_CONNECTOR, null, msgs); msgs = basicSetOnAcceptOutputConnector(newOnAcceptOutputConnector, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, EsbPackage.THROTTLE_MEDIATOR__ON_ACCEPT_OUTPUT_CONNECTOR, newOnAcceptOutputConnector, newOnAcceptOutputConnector)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ThrottleMediatorOnRejectOutputConnector getOnRejectOutputConnector() { return onRejectOutputConnector; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetOnRejectOutputConnector(ThrottleMediatorOnRejectOutputConnector newOnRejectOutputConnector, NotificationChain msgs) { ThrottleMediatorOnRejectOutputConnector oldOnRejectOutputConnector = onRejectOutputConnector; onRejectOutputConnector = newOnRejectOutputConnector; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, EsbPackage.THROTTLE_MEDIATOR__ON_REJECT_OUTPUT_CONNECTOR, oldOnRejectOutputConnector, newOnRejectOutputConnector); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setOnRejectOutputConnector(ThrottleMediatorOnRejectOutputConnector newOnRejectOutputConnector) { if (newOnRejectOutputConnector != onRejectOutputConnector) { NotificationChain msgs = null; if (onRejectOutputConnector != null) msgs = ((InternalEObject)onRejectOutputConnector).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - EsbPackage.THROTTLE_MEDIATOR__ON_REJECT_OUTPUT_CONNECTOR, null, msgs); if (newOnRejectOutputConnector != null) msgs = ((InternalEObject)newOnRejectOutputConnector).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - EsbPackage.THROTTLE_MEDIATOR__ON_REJECT_OUTPUT_CONNECTOR, null, msgs); msgs = basicSetOnRejectOutputConnector(newOnRejectOutputConnector, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, EsbPackage.THROTTLE_MEDIATOR__ON_REJECT_OUTPUT_CONNECTOR, newOnRejectOutputConnector, newOnRejectOutputConnector)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ThrottleContainer getThrottleContainer() { return throttleContainer; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetThrottleContainer(ThrottleContainer newThrottleContainer, NotificationChain msgs) { ThrottleContainer oldThrottleContainer = throttleContainer; throttleContainer = newThrottleContainer; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, EsbPackage.THROTTLE_MEDIATOR__THROTTLE_CONTAINER, oldThrottleContainer, newThrottleContainer); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setThrottleContainer(ThrottleContainer newThrottleContainer) { if (newThrottleContainer != throttleContainer) { NotificationChain msgs = null; if (throttleContainer != null) msgs = ((InternalEObject)throttleContainer).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - EsbPackage.THROTTLE_MEDIATOR__THROTTLE_CONTAINER, null, msgs); if (newThrottleContainer != null) msgs = ((InternalEObject)newThrottleContainer).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - EsbPackage.THROTTLE_MEDIATOR__THROTTLE_CONTAINER, null, msgs); msgs = basicSetThrottleContainer(newThrottleContainer, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, EsbPackage.THROTTLE_MEDIATOR__THROTTLE_CONTAINER, newThrottleContainer, newThrottleContainer)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ThrottleSequenceType getOnAcceptBranchsequenceType() { return onAcceptBranchsequenceType; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setOnAcceptBranchsequenceType(ThrottleSequenceType newOnAcceptBranchsequenceType) { ThrottleSequenceType oldOnAcceptBranchsequenceType = onAcceptBranchsequenceType; onAcceptBranchsequenceType = newOnAcceptBranchsequenceType == null ? ON_ACCEPT_BRANCHSEQUENCE_TYPE_EDEFAULT : newOnAcceptBranchsequenceType; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, EsbPackage.THROTTLE_MEDIATOR__ON_ACCEPT_BRANCHSEQUENCE_TYPE, oldOnAcceptBranchsequenceType, onAcceptBranchsequenceType)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public RegistryKeyProperty getOnAcceptBranchsequenceKey() { return onAcceptBranchsequenceKey; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetOnAcceptBranchsequenceKey(RegistryKeyProperty newOnAcceptBranchsequenceKey, NotificationChain msgs) { RegistryKeyProperty oldOnAcceptBranchsequenceKey = onAcceptBranchsequenceKey; onAcceptBranchsequenceKey = newOnAcceptBranchsequenceKey; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, EsbPackage.THROTTLE_MEDIATOR__ON_ACCEPT_BRANCHSEQUENCE_KEY, oldOnAcceptBranchsequenceKey, newOnAcceptBranchsequenceKey); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setOnAcceptBranchsequenceKey(RegistryKeyProperty newOnAcceptBranchsequenceKey) { if (newOnAcceptBranchsequenceKey != onAcceptBranchsequenceKey) { NotificationChain msgs = null; if (onAcceptBranchsequenceKey != null) msgs = ((InternalEObject)onAcceptBranchsequenceKey).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - EsbPackage.THROTTLE_MEDIATOR__ON_ACCEPT_BRANCHSEQUENCE_KEY, null, msgs); if (newOnAcceptBranchsequenceKey != null) msgs = ((InternalEObject)newOnAcceptBranchsequenceKey).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - EsbPackage.THROTTLE_MEDIATOR__ON_ACCEPT_BRANCHSEQUENCE_KEY, null, msgs); msgs = basicSetOnAcceptBranchsequenceKey(newOnAcceptBranchsequenceKey, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, EsbPackage.THROTTLE_MEDIATOR__ON_ACCEPT_BRANCHSEQUENCE_KEY, newOnAcceptBranchsequenceKey, newOnAcceptBranchsequenceKey)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ThrottleSequenceType getOnRejectBranchsequenceType() { return onRejectBranchsequenceType; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setOnRejectBranchsequenceType(ThrottleSequenceType newOnRejectBranchsequenceType) { ThrottleSequenceType oldOnRejectBranchsequenceType = onRejectBranchsequenceType; onRejectBranchsequenceType = newOnRejectBranchsequenceType == null ? ON_REJECT_BRANCHSEQUENCE_TYPE_EDEFAULT : newOnRejectBranchsequenceType; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, EsbPackage.THROTTLE_MEDIATOR__ON_REJECT_BRANCHSEQUENCE_TYPE, oldOnRejectBranchsequenceType, onRejectBranchsequenceType)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public RegistryKeyProperty getOnRejectBranchsequenceKey() { return onRejectBranchsequenceKey; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetOnRejectBranchsequenceKey(RegistryKeyProperty newOnRejectBranchsequenceKey, NotificationChain msgs) { RegistryKeyProperty oldOnRejectBranchsequenceKey = onRejectBranchsequenceKey; onRejectBranchsequenceKey = newOnRejectBranchsequenceKey; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, EsbPackage.THROTTLE_MEDIATOR__ON_REJECT_BRANCHSEQUENCE_KEY, oldOnRejectBranchsequenceKey, newOnRejectBranchsequenceKey); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setOnRejectBranchsequenceKey(RegistryKeyProperty newOnRejectBranchsequenceKey) { if (newOnRejectBranchsequenceKey != onRejectBranchsequenceKey) { NotificationChain msgs = null; if (onRejectBranchsequenceKey != null) msgs = ((InternalEObject)onRejectBranchsequenceKey).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - EsbPackage.THROTTLE_MEDIATOR__ON_REJECT_BRANCHSEQUENCE_KEY, null, msgs); if (newOnRejectBranchsequenceKey != null) msgs = ((InternalEObject)newOnRejectBranchsequenceKey).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - EsbPackage.THROTTLE_MEDIATOR__ON_REJECT_BRANCHSEQUENCE_KEY, null, msgs); msgs = basicSetOnRejectBranchsequenceKey(newOnRejectBranchsequenceKey, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, EsbPackage.THROTTLE_MEDIATOR__ON_REJECT_BRANCHSEQUENCE_KEY, newOnRejectBranchsequenceKey, newOnRejectBranchsequenceKey)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case EsbPackage.THROTTLE_MEDIATOR__POLICY_KEY: return basicSetPolicyKey(null, msgs); case EsbPackage.THROTTLE_MEDIATOR__POLICY_ENTRIES: return ((InternalEList<?>)getPolicyEntries()).basicRemove(otherEnd, msgs); case EsbPackage.THROTTLE_MEDIATOR__POLICY_CONFIGURATION: return basicSetPolicyConfiguration(null, msgs); case EsbPackage.THROTTLE_MEDIATOR__ON_ACCEPT_BRANCH: return basicSetOnAcceptBranch(null, msgs); case EsbPackage.THROTTLE_MEDIATOR__ON_REJECT_BRANCH: return basicSetOnRejectBranch(null, msgs); case EsbPackage.THROTTLE_MEDIATOR__INPUT_CONNECTOR: return basicSetInputConnector(null, msgs); case EsbPackage.THROTTLE_MEDIATOR__OUTPUT_CONNECTOR: return basicSetOutputConnector(null, msgs); case EsbPackage.THROTTLE_MEDIATOR__ON_ACCEPT_OUTPUT_CONNECTOR: return basicSetOnAcceptOutputConnector(null, msgs); case EsbPackage.THROTTLE_MEDIATOR__ON_REJECT_OUTPUT_CONNECTOR: return basicSetOnRejectOutputConnector(null, msgs); case EsbPackage.THROTTLE_MEDIATOR__THROTTLE_CONTAINER: return basicSetThrottleContainer(null, msgs); case EsbPackage.THROTTLE_MEDIATOR__ON_ACCEPT_BRANCHSEQUENCE_KEY: return basicSetOnAcceptBranchsequenceKey(null, msgs); case EsbPackage.THROTTLE_MEDIATOR__ON_REJECT_BRANCHSEQUENCE_KEY: return basicSetOnRejectBranchsequenceKey(null, msgs); } return super.eInverseRemove(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case EsbPackage.THROTTLE_MEDIATOR__GROUP_ID: return getGroupId(); case EsbPackage.THROTTLE_MEDIATOR__POLICY_TYPE: return getPolicyType(); case EsbPackage.THROTTLE_MEDIATOR__POLICY_KEY: return getPolicyKey(); case EsbPackage.THROTTLE_MEDIATOR__MAX_CONCURRENT_ACCESS_COUNT: return getMaxConcurrentAccessCount(); case EsbPackage.THROTTLE_MEDIATOR__POLICY_ENTRIES: return getPolicyEntries(); case EsbPackage.THROTTLE_MEDIATOR__POLICY_CONFIGURATION: return getPolicyConfiguration(); case EsbPackage.THROTTLE_MEDIATOR__ON_ACCEPT_BRANCH: return getOnAcceptBranch(); case EsbPackage.THROTTLE_MEDIATOR__ON_REJECT_BRANCH: return getOnRejectBranch(); case EsbPackage.THROTTLE_MEDIATOR__INPUT_CONNECTOR: return getInputConnector(); case EsbPackage.THROTTLE_MEDIATOR__OUTPUT_CONNECTOR: return getOutputConnector(); case EsbPackage.THROTTLE_MEDIATOR__ON_ACCEPT_OUTPUT_CONNECTOR: return getOnAcceptOutputConnector(); case EsbPackage.THROTTLE_MEDIATOR__ON_REJECT_OUTPUT_CONNECTOR: return getOnRejectOutputConnector(); case EsbPackage.THROTTLE_MEDIATOR__THROTTLE_CONTAINER: return getThrottleContainer(); case EsbPackage.THROTTLE_MEDIATOR__ON_ACCEPT_BRANCHSEQUENCE_TYPE: return getOnAcceptBranchsequenceType(); case EsbPackage.THROTTLE_MEDIATOR__ON_ACCEPT_BRANCHSEQUENCE_KEY: return getOnAcceptBranchsequenceKey(); case EsbPackage.THROTTLE_MEDIATOR__ON_REJECT_BRANCHSEQUENCE_TYPE: return getOnRejectBranchsequenceType(); case EsbPackage.THROTTLE_MEDIATOR__ON_REJECT_BRANCHSEQUENCE_KEY: return getOnRejectBranchsequenceKey(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @SuppressWarnings("unchecked") @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case EsbPackage.THROTTLE_MEDIATOR__GROUP_ID: setGroupId((String)newValue); return; case EsbPackage.THROTTLE_MEDIATOR__POLICY_TYPE: setPolicyType((ThrottlePolicyType)newValue); return; case EsbPackage.THROTTLE_MEDIATOR__POLICY_KEY: setPolicyKey((RegistryKeyProperty)newValue); return; case EsbPackage.THROTTLE_MEDIATOR__MAX_CONCURRENT_ACCESS_COUNT: setMaxConcurrentAccessCount((Integer)newValue); return; case EsbPackage.THROTTLE_MEDIATOR__POLICY_ENTRIES: getPolicyEntries().clear(); getPolicyEntries().addAll((Collection<? extends ThrottlePolicyEntry>)newValue); return; case EsbPackage.THROTTLE_MEDIATOR__POLICY_CONFIGURATION: setPolicyConfiguration((ThrottlePolicyConfiguration)newValue); return; case EsbPackage.THROTTLE_MEDIATOR__ON_ACCEPT_BRANCH: setOnAcceptBranch((ThrottleOnAcceptBranch)newValue); return; case EsbPackage.THROTTLE_MEDIATOR__ON_REJECT_BRANCH: setOnRejectBranch((ThrottleOnRejectBranch)newValue); return; case EsbPackage.THROTTLE_MEDIATOR__INPUT_CONNECTOR: setInputConnector((ThrottleMediatorInputConnector)newValue); return; case EsbPackage.THROTTLE_MEDIATOR__OUTPUT_CONNECTOR: setOutputConnector((ThrottleMediatorOutputConnector)newValue); return; case EsbPackage.THROTTLE_MEDIATOR__ON_ACCEPT_OUTPUT_CONNECTOR: setOnAcceptOutputConnector((ThrottleMediatorOnAcceptOutputConnector)newValue); return; case EsbPackage.THROTTLE_MEDIATOR__ON_REJECT_OUTPUT_CONNECTOR: setOnRejectOutputConnector((ThrottleMediatorOnRejectOutputConnector)newValue); return; case EsbPackage.THROTTLE_MEDIATOR__THROTTLE_CONTAINER: setThrottleContainer((ThrottleContainer)newValue); return; case EsbPackage.THROTTLE_MEDIATOR__ON_ACCEPT_BRANCHSEQUENCE_TYPE: setOnAcceptBranchsequenceType((ThrottleSequenceType)newValue); return; case EsbPackage.THROTTLE_MEDIATOR__ON_ACCEPT_BRANCHSEQUENCE_KEY: setOnAcceptBranchsequenceKey((RegistryKeyProperty)newValue); return; case EsbPackage.THROTTLE_MEDIATOR__ON_REJECT_BRANCHSEQUENCE_TYPE: setOnRejectBranchsequenceType((ThrottleSequenceType)newValue); return; case EsbPackage.THROTTLE_MEDIATOR__ON_REJECT_BRANCHSEQUENCE_KEY: setOnRejectBranchsequenceKey((RegistryKeyProperty)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case EsbPackage.THROTTLE_MEDIATOR__GROUP_ID: setGroupId(GROUP_ID_EDEFAULT); return; case EsbPackage.THROTTLE_MEDIATOR__POLICY_TYPE: setPolicyType(POLICY_TYPE_EDEFAULT); return; case EsbPackage.THROTTLE_MEDIATOR__POLICY_KEY: setPolicyKey((RegistryKeyProperty)null); return; case EsbPackage.THROTTLE_MEDIATOR__MAX_CONCURRENT_ACCESS_COUNT: setMaxConcurrentAccessCount(MAX_CONCURRENT_ACCESS_COUNT_EDEFAULT); return; case EsbPackage.THROTTLE_MEDIATOR__POLICY_ENTRIES: getPolicyEntries().clear(); return; case EsbPackage.THROTTLE_MEDIATOR__POLICY_CONFIGURATION: setPolicyConfiguration((ThrottlePolicyConfiguration)null); return; case EsbPackage.THROTTLE_MEDIATOR__ON_ACCEPT_BRANCH: setOnAcceptBranch((ThrottleOnAcceptBranch)null); return; case EsbPackage.THROTTLE_MEDIATOR__ON_REJECT_BRANCH: setOnRejectBranch((ThrottleOnRejectBranch)null); return; case EsbPackage.THROTTLE_MEDIATOR__INPUT_CONNECTOR: setInputConnector((ThrottleMediatorInputConnector)null); return; case EsbPackage.THROTTLE_MEDIATOR__OUTPUT_CONNECTOR: setOutputConnector((ThrottleMediatorOutputConnector)null); return; case EsbPackage.THROTTLE_MEDIATOR__ON_ACCEPT_OUTPUT_CONNECTOR: setOnAcceptOutputConnector((ThrottleMediatorOnAcceptOutputConnector)null); return; case EsbPackage.THROTTLE_MEDIATOR__ON_REJECT_OUTPUT_CONNECTOR: setOnRejectOutputConnector((ThrottleMediatorOnRejectOutputConnector)null); return; case EsbPackage.THROTTLE_MEDIATOR__THROTTLE_CONTAINER: setThrottleContainer((ThrottleContainer)null); return; case EsbPackage.THROTTLE_MEDIATOR__ON_ACCEPT_BRANCHSEQUENCE_TYPE: setOnAcceptBranchsequenceType(ON_ACCEPT_BRANCHSEQUENCE_TYPE_EDEFAULT); return; case EsbPackage.THROTTLE_MEDIATOR__ON_ACCEPT_BRANCHSEQUENCE_KEY: setOnAcceptBranchsequenceKey((RegistryKeyProperty)null); return; case EsbPackage.THROTTLE_MEDIATOR__ON_REJECT_BRANCHSEQUENCE_TYPE: setOnRejectBranchsequenceType(ON_REJECT_BRANCHSEQUENCE_TYPE_EDEFAULT); return; case EsbPackage.THROTTLE_MEDIATOR__ON_REJECT_BRANCHSEQUENCE_KEY: setOnRejectBranchsequenceKey((RegistryKeyProperty)null); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case EsbPackage.THROTTLE_MEDIATOR__GROUP_ID: return GROUP_ID_EDEFAULT == null ? groupId != null : !GROUP_ID_EDEFAULT.equals(groupId); case EsbPackage.THROTTLE_MEDIATOR__POLICY_TYPE: return policyType != POLICY_TYPE_EDEFAULT; case EsbPackage.THROTTLE_MEDIATOR__POLICY_KEY: return policyKey != null; case EsbPackage.THROTTLE_MEDIATOR__MAX_CONCURRENT_ACCESS_COUNT: return maxConcurrentAccessCount != MAX_CONCURRENT_ACCESS_COUNT_EDEFAULT; case EsbPackage.THROTTLE_MEDIATOR__POLICY_ENTRIES: return policyEntries != null && !policyEntries.isEmpty(); case EsbPackage.THROTTLE_MEDIATOR__POLICY_CONFIGURATION: return policyConfiguration != null; case EsbPackage.THROTTLE_MEDIATOR__ON_ACCEPT_BRANCH: return onAcceptBranch != null; case EsbPackage.THROTTLE_MEDIATOR__ON_REJECT_BRANCH: return onRejectBranch != null; case EsbPackage.THROTTLE_MEDIATOR__INPUT_CONNECTOR: return inputConnector != null; case EsbPackage.THROTTLE_MEDIATOR__OUTPUT_CONNECTOR: return outputConnector != null; case EsbPackage.THROTTLE_MEDIATOR__ON_ACCEPT_OUTPUT_CONNECTOR: return onAcceptOutputConnector != null; case EsbPackage.THROTTLE_MEDIATOR__ON_REJECT_OUTPUT_CONNECTOR: return onRejectOutputConnector != null; case EsbPackage.THROTTLE_MEDIATOR__THROTTLE_CONTAINER: return throttleContainer != null; case EsbPackage.THROTTLE_MEDIATOR__ON_ACCEPT_BRANCHSEQUENCE_TYPE: return onAcceptBranchsequenceType != ON_ACCEPT_BRANCHSEQUENCE_TYPE_EDEFAULT; case EsbPackage.THROTTLE_MEDIATOR__ON_ACCEPT_BRANCHSEQUENCE_KEY: return onAcceptBranchsequenceKey != null; case EsbPackage.THROTTLE_MEDIATOR__ON_REJECT_BRANCHSEQUENCE_TYPE: return onRejectBranchsequenceType != ON_REJECT_BRANCHSEQUENCE_TYPE_EDEFAULT; case EsbPackage.THROTTLE_MEDIATOR__ON_REJECT_BRANCHSEQUENCE_KEY: return onRejectBranchsequenceKey != null; } return super.eIsSet(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String toString() { if (eIsProxy()) return super.toString(); StringBuffer result = new StringBuffer(super.toString()); result.append(" (groupId: "); result.append(groupId); result.append(", policyType: "); result.append(policyType); result.append(", maxConcurrentAccessCount: "); result.append(maxConcurrentAccessCount); result.append(", OnAcceptBranchsequenceType: "); result.append(onAcceptBranchsequenceType); result.append(", OnRejectBranchsequenceType: "); result.append(onRejectBranchsequenceType); result.append(')'); return result.toString(); } } //ThrottleMediatorImpl
/* * Copyright 2012-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.configurationprocessor; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.stream.Collectors; import javax.annotation.processing.Messager; import javax.annotation.processing.ProcessingEnvironment; import javax.lang.model.element.AnnotationMirror; import javax.lang.model.element.AnnotationValue; import javax.lang.model.element.Element; import javax.lang.model.element.ExecutableElement; import javax.lang.model.element.TypeElement; import javax.lang.model.element.VariableElement; import javax.lang.model.type.TypeKind; import javax.lang.model.type.TypeMirror; import javax.lang.model.util.Elements; import org.springframework.boot.configurationprocessor.fieldvalues.FieldValuesParser; import org.springframework.boot.configurationprocessor.fieldvalues.javac.JavaCompilerFieldValuesParser; import org.springframework.boot.configurationprocessor.metadata.ItemDeprecation; /** * Provide utilities to detect and validate configuration properties. * * @author Stephane Nicoll */ class MetadataGenerationEnvironment { private static final String NULLABLE_ANNOTATION = "org.springframework.lang.Nullable"; private static final Set<String> TYPE_EXCLUDES; static { Set<String> excludes = new HashSet<>(); excludes.add("com.zaxxer.hikari.IConnectionCustomizer"); excludes.add("groovy.lang.MetaClass"); excludes.add("groovy.text.markup.MarkupTemplateEngine"); excludes.add("java.io.Writer"); excludes.add("java.io.PrintWriter"); excludes.add("java.lang.ClassLoader"); excludes.add("java.util.concurrent.ThreadFactory"); excludes.add("javax.jms.XAConnectionFactory"); excludes.add("javax.sql.DataSource"); excludes.add("javax.sql.XADataSource"); excludes.add("org.apache.tomcat.jdbc.pool.PoolConfiguration"); excludes.add("org.apache.tomcat.jdbc.pool.Validator"); excludes.add("org.flywaydb.core.api.callback.FlywayCallback"); excludes.add("org.flywaydb.core.api.resolver.MigrationResolver"); TYPE_EXCLUDES = Collections.unmodifiableSet(excludes); } private final TypeUtils typeUtils; private final Elements elements; private final Messager messager; private final FieldValuesParser fieldValuesParser; private final Map<TypeElement, Map<String, Object>> defaultValues = new HashMap<>(); private final String configurationPropertiesAnnotation; private final String nestedConfigurationPropertyAnnotation; private final String deprecatedConfigurationPropertyAnnotation; private final String constructorBindingAnnotation; private final String defaultValueAnnotation; private final Set<String> endpointAnnotations; private final String readOperationAnnotation; private final String nameAnnotation; MetadataGenerationEnvironment(ProcessingEnvironment environment, String configurationPropertiesAnnotation, String nestedConfigurationPropertyAnnotation, String deprecatedConfigurationPropertyAnnotation, String constructorBindingAnnotation, String defaultValueAnnotation, Set<String> endpointAnnotations, String readOperationAnnotation, String nameAnnotation) { this.typeUtils = new TypeUtils(environment); this.elements = environment.getElementUtils(); this.messager = environment.getMessager(); this.fieldValuesParser = resolveFieldValuesParser(environment); this.configurationPropertiesAnnotation = configurationPropertiesAnnotation; this.nestedConfigurationPropertyAnnotation = nestedConfigurationPropertyAnnotation; this.deprecatedConfigurationPropertyAnnotation = deprecatedConfigurationPropertyAnnotation; this.constructorBindingAnnotation = constructorBindingAnnotation; this.defaultValueAnnotation = defaultValueAnnotation; this.endpointAnnotations = endpointAnnotations; this.readOperationAnnotation = readOperationAnnotation; this.nameAnnotation = nameAnnotation; } private static FieldValuesParser resolveFieldValuesParser(ProcessingEnvironment env) { try { return new JavaCompilerFieldValuesParser(env); } catch (Throwable ex) { return FieldValuesParser.NONE; } } TypeUtils getTypeUtils() { return this.typeUtils; } Messager getMessager() { return this.messager; } /** * Return the default value of the field with the specified {@code name}. * @param type the type to consider * @param name the name of the field * @return the default value or {@code null} if the field does not exist or no default * value has been detected */ Object getFieldDefaultValue(TypeElement type, String name) { return this.defaultValues.computeIfAbsent(type, this::resolveFieldValues).get(name); } boolean isExcluded(TypeMirror type) { if (type == null) { return false; } String typeName = type.toString(); if (typeName.endsWith("[]")) { typeName = typeName.substring(0, typeName.length() - 2); } return TYPE_EXCLUDES.contains(typeName); } boolean isDeprecated(Element element) { if (isElementDeprecated(element)) { return true; } if (element instanceof VariableElement || element instanceof ExecutableElement) { return isElementDeprecated(element.getEnclosingElement()); } return false; } ItemDeprecation resolveItemDeprecation(Element element) { AnnotationMirror annotation = getAnnotation(element, this.deprecatedConfigurationPropertyAnnotation); String reason = null; String replacement = null; if (annotation != null) { Map<String, Object> elementValues = getAnnotationElementValues(annotation); reason = (String) elementValues.get("reason"); replacement = (String) elementValues.get("replacement"); } reason = (reason == null || reason.isEmpty()) ? null : reason; replacement = (replacement == null || replacement.isEmpty()) ? null : replacement; return new ItemDeprecation(reason, replacement); } boolean hasConstructorBindingAnnotation(TypeElement typeElement) { return hasAnnotationRecursive(typeElement, this.constructorBindingAnnotation); } boolean hasConstructorBindingAnnotation(ExecutableElement element) { return hasAnnotation(element, this.constructorBindingAnnotation); } boolean hasAnnotation(Element element, String type) { return getAnnotation(element, type) != null; } AnnotationMirror getAnnotation(Element element, String type) { if (element != null) { for (AnnotationMirror annotation : element.getAnnotationMirrors()) { if (type.equals(annotation.getAnnotationType().toString())) { return annotation; } } } return null; } /** * Collect the annotations that are annotated or meta-annotated with the specified * {@link TypeElement annotation}. * @param element the element to inspect * @param annotationType the annotation to discover * @return the annotations that are annotated or meta-annotated with this annotation */ List<Element> getElementsAnnotatedOrMetaAnnotatedWith(Element element, TypeElement annotationType) { LinkedList<Element> stack = new LinkedList<>(); stack.push(element); collectElementsAnnotatedOrMetaAnnotatedWith(annotationType, stack); stack.removeFirst(); return Collections.unmodifiableList(stack); } private boolean hasAnnotationRecursive(Element element, String type) { return !getElementsAnnotatedOrMetaAnnotatedWith(element, this.elements.getTypeElement(type)).isEmpty(); } private boolean collectElementsAnnotatedOrMetaAnnotatedWith(TypeElement annotationType, LinkedList<Element> stack) { Element element = stack.peekLast(); for (AnnotationMirror annotation : this.elements.getAllAnnotationMirrors(element)) { Element annotationElement = annotation.getAnnotationType().asElement(); if (!stack.contains(annotationElement)) { stack.addLast(annotationElement); if (annotationElement.equals(annotationType)) { return true; } if (!collectElementsAnnotatedOrMetaAnnotatedWith(annotationType, stack)) { stack.removeLast(); } } } return false; } Map<String, Object> getAnnotationElementValues(AnnotationMirror annotation) { Map<String, Object> values = new LinkedHashMap<>(); annotation.getElementValues() .forEach((name, value) -> values.put(name.getSimpleName().toString(), getAnnotationValue(value))); return values; } private Object getAnnotationValue(AnnotationValue annotationValue) { Object value = annotationValue.getValue(); if (value instanceof List) { List<Object> values = new ArrayList<>(); ((List<?>) value).forEach((v) -> values.add(((AnnotationValue) v).getValue())); return values; } return value; } TypeElement getConfigurationPropertiesAnnotationElement() { return this.elements.getTypeElement(this.configurationPropertiesAnnotation); } AnnotationMirror getConfigurationPropertiesAnnotation(Element element) { return getAnnotation(element, this.configurationPropertiesAnnotation); } AnnotationMirror getNestedConfigurationPropertyAnnotation(Element element) { return getAnnotation(element, this.nestedConfigurationPropertyAnnotation); } AnnotationMirror getDefaultValueAnnotation(Element element) { return getAnnotation(element, this.defaultValueAnnotation); } Set<TypeElement> getEndpointAnnotationElements() { return this.endpointAnnotations.stream().map(this.elements::getTypeElement).filter(Objects::nonNull) .collect(Collectors.toSet()); } AnnotationMirror getReadOperationAnnotation(Element element) { return getAnnotation(element, this.readOperationAnnotation); } AnnotationMirror getNameAnnotation(Element element) { return getAnnotation(element, this.nameAnnotation); } boolean hasNullableAnnotation(Element element) { return getAnnotation(element, NULLABLE_ANNOTATION) != null; } private boolean isElementDeprecated(Element element) { return hasAnnotation(element, "java.lang.Deprecated") || hasAnnotation(element, this.deprecatedConfigurationPropertyAnnotation); } private Map<String, Object> resolveFieldValues(TypeElement element) { Map<String, Object> values = new LinkedHashMap<>(); resolveFieldValuesFor(values, element); return values; } private void resolveFieldValuesFor(Map<String, Object> values, TypeElement element) { try { this.fieldValuesParser.getFieldValues(element).forEach((name, value) -> { if (!values.containsKey(name)) { values.put(name, value); } }); } catch (Exception ex) { // continue } Element superType = this.typeUtils.asElement(element.getSuperclass()); if (superType instanceof TypeElement && superType.asType().getKind() != TypeKind.NONE) { resolveFieldValuesFor(values, (TypeElement) superType); } } }
package org.nextrtc.signalingserver.domain; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.nextrtc.signalingserver.BaseTest; import org.nextrtc.signalingserver.cases.JoinConversation; import org.nextrtc.signalingserver.repository.Conversations; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.test.context.ContextConfiguration; import java.util.List; import static org.awaitility.Awaitility.await; import static org.hamcrest.Matchers.is; import static org.junit.Assert.*; @ContextConfiguration(classes = {ServerEventCheck.class, LocalStreamCreated2.class}) public class ServerActorTest extends BaseTest { @Autowired protected ServerEventCheck eventCheckerCall; @Autowired protected LocalStreamCreated2 eventLocalStream; @Autowired private Server server; @Autowired private MessageSender sender; @Autowired private JoinConversation joinConversation; @Autowired private Conversations conversations; @Autowired private SignalResolver resolver; @Test public void shouldExchangeSignalsBetweenActors() throws Exception { // given TestClientActor john = new TestClientActor("John", server); TestClientActor bob = new TestClientActor("Bob", server); // when john.openSocket(); john.create("AAA", "MESH"); // then await().until(() -> conversations.findBy("AAA").isPresent()); assertTrue(conversations.findBy("AAA").isPresent()); Conversation conversation = conversations.findBy("AAA").get(); assertTrue(conversation.has(john.asMember())); // when bob.openSocket(); bob.join("AAA"); await().until(() -> conversation.has(bob.asMember())); // then assertTrue(conversation.has(john.asMember())); assertTrue(conversation.has(bob.asMember())); // when bob.closeSocket(); await().until(() -> !conversation.has(bob.asMember())); // then assertFalse(conversation.has(bob.asMember())); // when john.closeSocket(); await().until(() -> !conversation.has(john.asMember())); // then assertFalse(conversation.has(john.asMember())); assertFalse(conversations.findBy("AAA").isPresent()); assertNoErrors(john); assertNoErrors(bob); } @Test public void shouldExchangeSignalsBetweenActors_Broadcast() throws Exception { // given TestClientActor john = new TestClientActor("John", server); TestClientActor bob = new TestClientActor("Bob", server); // when john.openSocket(); john.create("AAA", "BROADCAST"); // then assertTrue(conversations.findBy("AAA").isPresent()); Conversation conversation = conversations.findBy("AAA").get(); assertTrue(conversation.has(john.asMember())); // when bob.openSocket(); bob.join("AAA"); // then assertTrue(conversation.has(john.asMember())); assertTrue(conversation.has(bob.asMember())); // when bob.closeSocket(); // then assertFalse(conversation.has(bob.asMember())); // when john.closeSocket(); // then assertFalse(conversation.has(john.asMember())); assertFalse(conversations.findBy("AAA").isPresent()); assertNoErrors(john); assertNoErrors(bob); } @Test public void shouldCheckBehaviorWhenBroadcasterWillEndConnectionFirst() throws Exception { // given TestClientActor john = new TestClientActor("John", server); TestClientActor bob = new TestClientActor("Bob", server); // when john.openSocket(); john.create("AAA", "BROADCAST"); Conversation conversation = conversations.findBy("AAA").get(); bob.openSocket(); bob.join("AAA"); john.closeSocket(); bob.closeSocket(); // then assertFalse(conversation.has(bob.asMember())); assertFalse(conversations.findBy("AAA").isPresent()); final Message message = bob.getMessages().stream().filter(m -> m.getSignal().equals(Signals.END)).findFirst().get(); assertThat(message.getContent(), is("AAA")); assertThat(message.getSignal(), is(Signals.END)); assertNoErrors(john); assertNoErrors(bob); } @Test public void shouldCheckSignalExchangeForThreeMembers() throws Exception { // given TestClientActor john = new TestClientActor("John", server); TestClientActor bob = new TestClientActor("Bob", server); TestClientActor alice = new TestClientActor("Alice", server); TestClientActor mike = new TestClientActor("Mike", server); // when alice.openSocket(); mike.openSocket(); john.openSocket(); john.create("AAA", "BROADCAST"); bob.openSocket(); bob.join("AAA"); alice.join("AAA"); mike.join("AAA"); john.closeSocket(); bob.closeSocket(); alice.closeSocket(); mike.closeSocket(); // then assertThat(bob.getMessages().size(), is(alice.getMessages().size())); List<Message> bobMessages = bob.getMessages(); List<Message> aliceMessages = alice.getMessages(); List<Message> mikeMessages = mike.getMessages(); for (int i = 0; i < bobMessages.size(); i++) { Message bobMsg = bobMessages.get(i); Message aliceMsg = aliceMessages.get(i); Message mikeMsg = mikeMessages.get(i); assertTrue(bobMsg.getSignal().equals(aliceMsg.getSignal())); assertTrue(mikeMsg.getSignal().equals(aliceMsg.getSignal())); } assertNoErrors(john); assertNoErrors(bob); assertNoErrors(alice); assertNoErrors(mike); } @Test public void shouldSendTextMessageToOtherAudience() throws Exception { // given TestClientActor john = new TestClientActor("John", server); TestClientActor bob = new TestClientActor("Bob", server); TestClientActor alice = new TestClientActor("Alice", server); alice.openSocket(); bob.openSocket(); john.openSocket(); john.create("AAA", "MESH"); Conversation conversation = conversations.findBy("AAA").get(); bob.join("AAA"); alice.join("AAA"); // when john.sendToServer(Message.create() .to(bob.asMember().getId()) .signal(Signals.TEXT) .content("Hello") .build()); // then List<Message> messages = bob.getMessages(); Message message = messages.get(messages.size() - 1); message.getContent().equals("Hello"); assertNoErrors(john); assertNoErrors(bob); assertNoErrors(alice); } @Test public void shouldSendTextMessageToOtherAudience_BROADCAST() throws Exception { // given TestClientActor john = new TestClientActor("John", server); TestClientActor bob = new TestClientActor("Bob", server); TestClientActor alice = new TestClientActor("Alice", server); alice.openSocket(); bob.openSocket(); john.openSocket(); john.create("AAA", "BROADCAST"); Conversation conversation = conversations.findBy("AAA").get(); bob.join("AAA"); alice.join("AAA"); // when john.sendToServer(Message.create() .to(bob.asMember().getId()) .signal(Signals.TEXT) .content("Hello") .build()); // then List<Message> messages = bob.getMessages(); Message message = messages.get(messages.size() - 1); message.getContent().equals("Hello"); assertNoErrors(john); assertNoErrors(bob); assertNoErrors(alice); } @Test public void shouldBeAbleToHandleCustomSignal() throws Exception { // given resolver.addCustomSignal(Signal.fromString("upperCase"), (message) -> sender.send(InternalMessage.create()// .to(message.getFrom()) .content(message.getContent().toUpperCase()) .signal(Signal.fromString("upperCase")) .build() )); TestClientActor john = new TestClientActor("John", server); john.openSocket(); // when john.sendToServer(Message.create() .signal("upperCase") .content("Hello") .build()); // then assertThat(john.getMessages().get(0).getContent(), is("HELLO")); assertNoErrors(john); } @Test public void shouldOverrideExistingSignal() throws Exception { // given resolver.addCustomSignal(Signal.fromString("join"), (message) -> sender.send(InternalMessage.create()// .to(message.getFrom()) .content(message.getContent().toUpperCase()) .signal(Signal.fromString("upperCase")) .build() )); TestClientActor john = new TestClientActor("John", server); john.openSocket(); // when john.sendToServer(Message.create() .signal("join") .content("Hello") .build()); // then assertThat(john.getMessages().get(0).getContent(), is("HELLO")); assertNoErrors(john); } @After public void removeOverrides() { resolver.addCustomSignal(Signal.JOIN, joinConversation); } private void assertNoErrors(TestClientActor john) { assertTrue(john.getMessages().stream().allMatch(m -> !m.getSignal().equals(Signals.ERROR))); } @Before public void resetObjects() { eventCheckerCall.reset(); eventLocalStream.reset(); } }
/* * Copyright (c) 2004, 2008, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package sun.tools.jconsole.inspector; import java.io.IOException; import java.util.*; import javax.management.*; import javax.swing.*; import javax.swing.tree.*; import sun.tools.jconsole.JConsole; import sun.tools.jconsole.MBeansTab; import sun.tools.jconsole.Resources; import sun.tools.jconsole.inspector.XNodeInfo; import static sun.tools.jconsole.inspector.XNodeInfo.Type; @SuppressWarnings("serial") public class XTree extends JTree { private static final List<String> orderedKeyPropertyList = new ArrayList<String>(); static { String keyPropertyList = System.getProperty("com.sun.tools.jconsole.mbeans.keyPropertyList"); if (keyPropertyList == null) { orderedKeyPropertyList.add("type"); orderedKeyPropertyList.add("j2eeType"); } else { StringTokenizer st = new StringTokenizer(keyPropertyList, ","); while (st.hasMoreTokens()) { orderedKeyPropertyList.add(st.nextToken()); } } } private MBeansTab mbeansTab; private Map<String, DefaultMutableTreeNode> nodes = new HashMap<String, DefaultMutableTreeNode>(); public XTree(MBeansTab mbeansTab) { this(new DefaultMutableTreeNode("MBeanTreeRootNode"), mbeansTab); } public XTree(TreeNode root, MBeansTab mbeansTab) { super(root, true); this.mbeansTab = mbeansTab; setRootVisible(false); setShowsRootHandles(true); ToolTipManager.sharedInstance().registerComponent(this); } /** * This method removes the node from its parent */ // Call on EDT private synchronized void removeChildNode(DefaultMutableTreeNode child) { DefaultTreeModel model = (DefaultTreeModel) getModel(); model.removeNodeFromParent(child); } /** * This method adds the child to the specified parent node * at specific index. */ // Call on EDT private synchronized void addChildNode( DefaultMutableTreeNode parent, DefaultMutableTreeNode child, int index) { DefaultTreeModel model = (DefaultTreeModel) getModel(); model.insertNodeInto(child, parent, index); } /** * This method adds the child to the specified parent node. * The index where the child is to be added depends on the * child node being Comparable or not. If the child node is * not Comparable then it is added at the end, i.e. right * after the current parent's children. */ // Call on EDT private synchronized void addChildNode( DefaultMutableTreeNode parent, DefaultMutableTreeNode child) { int childCount = parent.getChildCount(); if (childCount == 0) { addChildNode(parent, child, 0); return; } if (child instanceof ComparableDefaultMutableTreeNode) { ComparableDefaultMutableTreeNode comparableChild = (ComparableDefaultMutableTreeNode) child; for (int i = childCount - 1; i >= 0; i--) { DefaultMutableTreeNode brother = (DefaultMutableTreeNode) parent.getChildAt(i); // expr1: child node must be inserted after metadata nodes // - OR - // expr2: "child >= brother" if ((i <= 2 && isMetadataNode(brother)) || comparableChild.compareTo(brother) >= 0) { addChildNode(parent, child, i + 1); return; } } // "child < all brothers", add at the beginning addChildNode(parent, child, 0); return; } // "child not comparable", add at the end addChildNode(parent, child, childCount); } /** * This method removes all the displayed nodes from the tree, * but does not affect actual MBeanServer contents. */ // Call on EDT @Override public synchronized void removeAll() { DefaultTreeModel model = (DefaultTreeModel) getModel(); DefaultMutableTreeNode root = (DefaultMutableTreeNode) model.getRoot(); root.removeAllChildren(); model.nodeStructureChanged(root); nodes.clear(); } // Call on EDT public synchronized void removeMBeanFromView(ObjectName mbean) { // We assume here that MBeans are removed one by one (on MBean // unregistered notification). Deletes the tree node associated // with the given MBean and recursively all the node parents // which are leaves and non XMBean. // DefaultMutableTreeNode node = null; Dn dn = new Dn(mbean); if (dn.getTokenCount() > 0) { DefaultTreeModel model = (DefaultTreeModel) getModel(); Token token = dn.getToken(0); String hashKey = dn.getHashKey(token); node = nodes.get(hashKey); if ((node != null) && (!node.isRoot())) { if (hasNonMetadataNodes(node)) { removeMetadataNodes(node); String label = token.getValue(); XNodeInfo userObject = new XNodeInfo( Type.NONMBEAN, label, label, token.getTokenValue()); changeNodeValue(node, userObject); } else { DefaultMutableTreeNode parent = (DefaultMutableTreeNode) node.getParent(); model.removeNodeFromParent(node); nodes.remove(hashKey); removeParentFromView(dn, 1, parent); } } } } /** * Returns true if any of the children nodes is a non MBean metadata node. */ private boolean hasNonMetadataNodes(DefaultMutableTreeNode node) { for (Enumeration e = node.children(); e.hasMoreElements();) { DefaultMutableTreeNode n = (DefaultMutableTreeNode) e.nextElement(); Object uo = n.getUserObject(); if (uo instanceof XNodeInfo) { switch (((XNodeInfo) uo).getType()) { case ATTRIBUTES: case NOTIFICATIONS: case OPERATIONS: break; default: return true; } } else { return true; } } return false; } /** * Returns true if any of the children nodes is an MBean metadata node. */ public boolean hasMetadataNodes(DefaultMutableTreeNode node) { for (Enumeration e = node.children(); e.hasMoreElements();) { DefaultMutableTreeNode n = (DefaultMutableTreeNode) e.nextElement(); Object uo = n.getUserObject(); if (uo instanceof XNodeInfo) { switch (((XNodeInfo) uo).getType()) { case ATTRIBUTES: case NOTIFICATIONS: case OPERATIONS: return true; default: break; } } else { return false; } } return false; } /** * Returns true if the given node is an MBean metadata node. */ public boolean isMetadataNode(DefaultMutableTreeNode node) { Object uo = node.getUserObject(); if (uo instanceof XNodeInfo) { switch (((XNodeInfo) uo).getType()) { case ATTRIBUTES: case NOTIFICATIONS: case OPERATIONS: return true; default: return false; } } else { return false; } } /** * Remove the metadata nodes associated with a given MBean node. */ // Call on EDT private void removeMetadataNodes(DefaultMutableTreeNode node) { Set<DefaultMutableTreeNode> metadataNodes = new HashSet<DefaultMutableTreeNode>(); DefaultTreeModel model = (DefaultTreeModel) getModel(); for (Enumeration e = node.children(); e.hasMoreElements();) { DefaultMutableTreeNode n = (DefaultMutableTreeNode) e.nextElement(); Object uo = n.getUserObject(); if (uo instanceof XNodeInfo) { switch (((XNodeInfo) uo).getType()) { case ATTRIBUTES: case NOTIFICATIONS: case OPERATIONS: metadataNodes.add(n); break; default: break; } } } for (DefaultMutableTreeNode n : metadataNodes) { model.removeNodeFromParent(n); } } /** * Removes only the parent nodes which are non MBean and leaf. * This method assumes the child nodes have been removed before. */ // Call on EDT private DefaultMutableTreeNode removeParentFromView( Dn dn, int index, DefaultMutableTreeNode node) { if ((!node.isRoot()) && node.isLeaf() && (!(((XNodeInfo) node.getUserObject()).getType().equals(Type.MBEAN)))) { DefaultMutableTreeNode parent = (DefaultMutableTreeNode) node.getParent(); removeChildNode(node); String hashKey = dn.getHashKey(dn.getToken(index)); nodes.remove(hashKey); removeParentFromView(dn, index + 1, parent); } return node; } // Call on EDT public synchronized void addMBeansToView(Set<ObjectName> mbeans) { Set<Dn> dns = new TreeSet<Dn>(); for (ObjectName mbean : mbeans) { Dn dn = new Dn(mbean); dns.add(dn); } for (Dn dn : dns) { ObjectName mbean = dn.getObjectName(); XMBean xmbean = new XMBean(mbean, mbeansTab); addMBeanToView(mbean, xmbean, dn); } } // Call on EDT public synchronized void addMBeanToView(ObjectName mbean) { // Build XMBean for the given MBean // XMBean xmbean = new XMBean(mbean, mbeansTab); // Build Dn for the given MBean // Dn dn = new Dn(mbean); // Add the new nodes to the MBean tree from leaf to root // addMBeanToView(mbean, xmbean, dn); } // Call on EDT private synchronized void addMBeanToView( ObjectName mbean, XMBean xmbean, Dn dn) { DefaultMutableTreeNode childNode = null; DefaultMutableTreeNode parentNode = null; // Add the node or replace its user object if already added // Token token = dn.getToken(0); String hashKey = dn.getHashKey(token); if (nodes.containsKey(hashKey)) { // Found existing node previously created when adding another node // childNode = nodes.get(hashKey); // Replace user object to reflect that this node is an MBean // Object data = createNodeValue(xmbean, token); String label = data.toString(); XNodeInfo userObject = new XNodeInfo(Type.MBEAN, data, label, mbean.toString()); changeNodeValue(childNode, userObject); return; } // Create new leaf node // childNode = createDnNode(dn, token, xmbean); nodes.put(hashKey, childNode); // Add intermediate non MBean nodes // for (int i = 1; i < dn.getTokenCount(); i++) { token = dn.getToken(i); hashKey = dn.getHashKey(token); if (nodes.containsKey(hashKey)) { // Intermediate node already present, add new node as child // parentNode = nodes.get(hashKey); addChildNode(parentNode, childNode); return; } else { // Create new intermediate node // if ("domain".equals(token.getTokenType())) { parentNode = createDomainNode(dn, token); DefaultMutableTreeNode root = (DefaultMutableTreeNode) getModel().getRoot(); addChildNode(root, parentNode); } else { parentNode = createSubDnNode(dn, token); } nodes.put(hashKey, parentNode); addChildNode(parentNode, childNode); } childNode = parentNode; } } // Call on EDT private synchronized void changeNodeValue( DefaultMutableTreeNode node, XNodeInfo nodeValue) { if (node instanceof ComparableDefaultMutableTreeNode) { // should it stay at the same place? DefaultMutableTreeNode clone = (DefaultMutableTreeNode) node.clone(); clone.setUserObject(nodeValue); if (((ComparableDefaultMutableTreeNode) node).compareTo(clone) == 0) { // the order in the tree didn't change node.setUserObject(nodeValue); DefaultTreeModel model = (DefaultTreeModel) getModel(); model.nodeChanged(node); } else { // delete the node and re-order it in case the // node value modifies the order in the tree DefaultMutableTreeNode parent = (DefaultMutableTreeNode) node.getParent(); removeChildNode(node); node.setUserObject(nodeValue); addChildNode(parent, node); } } else { // not comparable stays at the same place node.setUserObject(nodeValue); DefaultTreeModel model = (DefaultTreeModel) getModel(); model.nodeChanged(node); } // Load the MBean metadata if type is MBEAN if (nodeValue.getType().equals(Type.MBEAN)) { removeMetadataNodes(node); TreeNode[] treeNodes = node.getPath(); TreePath path = new TreePath(treeNodes); if (isExpanded(path)) { addMetadataNodes(node); } } // Clear the current selection and set it // again so valueChanged() gets called if (node == getLastSelectedPathComponent()) { TreePath selectionPath = getSelectionPath(); clearSelection(); setSelectionPath(selectionPath); } } /** * Creates the domain node. */ private DefaultMutableTreeNode createDomainNode(Dn dn, Token token) { DefaultMutableTreeNode node = new ComparableDefaultMutableTreeNode(); String label = dn.getDomain(); XNodeInfo userObject = new XNodeInfo(Type.NONMBEAN, label, label, label); node.setUserObject(userObject); return node; } /** * Creates the node corresponding to the whole Dn, i.e. an MBean. */ private DefaultMutableTreeNode createDnNode( Dn dn, Token token, XMBean xmbean) { DefaultMutableTreeNode node = new ComparableDefaultMutableTreeNode(); Object data = createNodeValue(xmbean, token); String label = data.toString(); XNodeInfo userObject = new XNodeInfo(Type.MBEAN, data, label, xmbean.getObjectName().toString()); node.setUserObject(userObject); return node; } /** * Creates the node corresponding to a subDn, i.e. a non-MBean * intermediate node. */ private DefaultMutableTreeNode createSubDnNode(Dn dn, Token token) { DefaultMutableTreeNode node = new ComparableDefaultMutableTreeNode(); String label = isKeyValueView() ? token.getTokenValue() : token.getValue(); XNodeInfo userObject = new XNodeInfo(Type.NONMBEAN, label, label, token.getTokenValue()); node.setUserObject(userObject); return node; } private Object createNodeValue(XMBean xmbean, Token token) { String label = isKeyValueView() ? token.getTokenValue() : token.getValue(); xmbean.setText(label); return xmbean; } /** * Parses the MBean ObjectName comma-separated properties string and puts * the individual key/value pairs into the map. Key order in the properties * string is preserved by the map. */ private static Map<String, String> extractKeyValuePairs( String props, ObjectName mbean) { Map<String, String> map = new LinkedHashMap<String, String>(); int eq = props.indexOf("="); while (eq != -1) { String key = props.substring(0, eq); String value = mbean.getKeyProperty(key); map.put(key, value); props = props.substring(key.length() + 1 + value.length()); if (props.startsWith(",")) { props = props.substring(1); } eq = props.indexOf("="); } return map; } /** * Returns the ordered key property list that will be used to build the * MBean tree. If the "com.sun.tools.jconsole.mbeans.keyPropertyList" system * property is not specified, then the ordered key property list used * to build the MBean tree will be the one returned by the method * ObjectName.getKeyPropertyListString() with "type" as first key, * and "j2eeType" as second key, if present. If any of the keys specified * in the comma-separated key property list does not apply to the given * MBean then it will be discarded. */ private static String getKeyPropertyListString(ObjectName mbean) { String props = mbean.getKeyPropertyListString(); Map<String, String> map = extractKeyValuePairs(props, mbean); StringBuilder sb = new StringBuilder(); // Add the key/value pairs to the buffer following the // key order defined by the "orderedKeyPropertyList" for (String key : orderedKeyPropertyList) { if (map.containsKey(key)) { sb.append(key + "=" + map.get(key) + ","); map.remove(key); } } // Add the remaining key/value pairs to the buffer for (Map.Entry<String, String> entry : map.entrySet()) { sb.append(entry.getKey() + "=" + entry.getValue() + ","); } String orderedKeyPropertyListString = sb.toString(); orderedKeyPropertyListString = orderedKeyPropertyListString.substring( 0, orderedKeyPropertyListString.length() - 1); return orderedKeyPropertyListString; } // Call on EDT public void addMetadataNodes(DefaultMutableTreeNode node) { XMBean mbean = (XMBean) ((XNodeInfo) node.getUserObject()).getData(); DefaultTreeModel model = (DefaultTreeModel) getModel(); MBeanInfoNodesSwingWorker sw = new MBeanInfoNodesSwingWorker(model, node, mbean); if (sw != null) { sw.execute(); } } private static class MBeanInfoNodesSwingWorker extends SwingWorker<Object[], Void> { private final DefaultTreeModel model; private final DefaultMutableTreeNode node; private final XMBean mbean; public MBeanInfoNodesSwingWorker( DefaultTreeModel model, DefaultMutableTreeNode node, XMBean mbean) { this.model = model; this.node = node; this.mbean = mbean; } @Override public Object[] doInBackground() throws InstanceNotFoundException, IntrospectionException, ReflectionException, IOException { Object result[] = new Object[2]; // Retrieve MBeanInfo for this MBean result[0] = mbean.getMBeanInfo(); // Check if this MBean is a notification emitter result[1] = mbean.isBroadcaster(); return result; } @Override protected void done() { try { Object result[] = get(); MBeanInfo mbeanInfo = (MBeanInfo) result[0]; Boolean isBroadcaster = (Boolean) result[1]; if (mbeanInfo != null) { addMBeanInfoNodes(model, node, mbean, mbeanInfo, isBroadcaster); } } catch (Exception e) { Throwable t = Utils.getActualException(e); if (JConsole.isDebug()) { t.printStackTrace(); } } } // Call on EDT private void addMBeanInfoNodes( DefaultTreeModel tree, DefaultMutableTreeNode node, XMBean mbean, MBeanInfo mbeanInfo, Boolean isBroadcaster) { MBeanAttributeInfo[] ai = mbeanInfo.getAttributes(); MBeanOperationInfo[] oi = mbeanInfo.getOperations(); MBeanNotificationInfo[] ni = mbeanInfo.getNotifications(); // Insert the Attributes/Operations/Notifications metadata nodes as // the three first children of this MBean node. This is only useful // when this MBean node denotes an MBean but it's not a leaf in the // MBean tree // int childIndex = 0; // MBeanAttributeInfo node // if (ai != null && ai.length > 0) { DefaultMutableTreeNode attributes = new DefaultMutableTreeNode(); XNodeInfo attributesUO = new XNodeInfo(Type.ATTRIBUTES, mbean, Resources.getText("Attributes"), null); attributes.setUserObject(attributesUO); node.insert(attributes, childIndex++); for (MBeanAttributeInfo mbai : ai) { DefaultMutableTreeNode attribute = new DefaultMutableTreeNode(); XNodeInfo attributeUO = new XNodeInfo(Type.ATTRIBUTE, new Object[]{mbean, mbai}, mbai.getName(), null); attribute.setUserObject(attributeUO); attribute.setAllowsChildren(false); attributes.add(attribute); } } // MBeanOperationInfo node // if (oi != null && oi.length > 0) { DefaultMutableTreeNode operations = new DefaultMutableTreeNode(); XNodeInfo operationsUO = new XNodeInfo(Type.OPERATIONS, mbean, Resources.getText("Operations"), null); operations.setUserObject(operationsUO); node.insert(operations, childIndex++); for (MBeanOperationInfo mboi : oi) { // Compute the operation's tool tip text: // "operationname(param1type,param2type,...)" // StringBuilder sb = new StringBuilder(); for (MBeanParameterInfo mbpi : mboi.getSignature()) { sb.append(mbpi.getType() + ","); } String signature = sb.toString(); if (signature.length() > 0) { // Remove the trailing ',' // signature = signature.substring(0, signature.length() - 1); } String toolTipText = mboi.getName() + "(" + signature + ")"; // Create operation node // DefaultMutableTreeNode operation = new DefaultMutableTreeNode(); XNodeInfo operationUO = new XNodeInfo(Type.OPERATION, new Object[]{mbean, mboi}, mboi.getName(), toolTipText); operation.setUserObject(operationUO); operation.setAllowsChildren(false); operations.add(operation); } } // MBeanNotificationInfo node // if (isBroadcaster != null && isBroadcaster.booleanValue()) { DefaultMutableTreeNode notifications = new DefaultMutableTreeNode(); XNodeInfo notificationsUO = new XNodeInfo(Type.NOTIFICATIONS, mbean, Resources.getText("Notifications"), null); notifications.setUserObject(notificationsUO); node.insert(notifications, childIndex++); if (ni != null && ni.length > 0) { for (MBeanNotificationInfo mbni : ni) { DefaultMutableTreeNode notification = new DefaultMutableTreeNode(); XNodeInfo notificationUO = new XNodeInfo(Type.NOTIFICATION, mbni, mbni.getName(), null); notification.setUserObject(notificationUO); notification.setAllowsChildren(false); notifications.add(notification); } } } // Update tree model // model.reload(node); } } // // Tree preferences // private static boolean treeView; private static boolean treeViewInit = false; private static boolean isTreeView() { if (!treeViewInit) { treeView = getTreeViewValue(); treeViewInit = true; } return treeView; } private static boolean getTreeViewValue() { String tv = System.getProperty("treeView"); return ((tv == null) ? true : !(tv.equals("false"))); } // // MBean key-value preferences // private boolean keyValueView = Boolean.getBoolean("keyValueView"); private boolean isKeyValueView() { return keyValueView; } // // Utility classes // private static class ComparableDefaultMutableTreeNode extends DefaultMutableTreeNode implements Comparable<DefaultMutableTreeNode> { public int compareTo(DefaultMutableTreeNode node) { return (this.toString().compareTo(node.toString())); } } private static class Dn implements Comparable<Dn> { private ObjectName mbean; private String domain; private String keyPropertyList; private String hashDn; private List<Token> tokens = new ArrayList<Token>(); public Dn(ObjectName mbean) { this.mbean = mbean; this.domain = mbean.getDomain(); this.keyPropertyList = getKeyPropertyListString(mbean); if (isTreeView()) { // Tree view Map<String, String> map = extractKeyValuePairs(keyPropertyList, mbean); for (Map.Entry<String, String> entry : map.entrySet()) { tokens.add(new Token("key", entry.getKey() + "=" + entry.getValue())); } } else { // Flat view tokens.add(new Token("key", "properties=" + keyPropertyList)); } // Add the domain as the first token in the Dn tokens.add(0, new Token("domain", "domain=" + domain)); // Reverse the Dn (from leaf to root) Collections.reverse(tokens); // Compute hash for Dn computeHashDn(); } public ObjectName getObjectName() { return mbean; } public String getDomain() { return domain; } public String getKeyPropertyList() { return keyPropertyList; } public Token getToken(int index) { return tokens.get(index); } public int getTokenCount() { return tokens.size(); } public String getHashDn() { return hashDn; } public String getHashKey(Token token) { final int begin = hashDn.indexOf(token.getTokenValue()); return hashDn.substring(begin, hashDn.length()); } private void computeHashDn() { if (tokens.isEmpty()) { return; } final StringBuilder hdn = new StringBuilder(); for (int i = 0; i < tokens.size(); i++) { hdn.append(tokens.get(i).getTokenValue()); hdn.append(","); } hashDn = hdn.substring(0, hdn.length() - 1); } @Override public String toString() { return domain + ":" + keyPropertyList; } public int compareTo(Dn dn) { return this.toString().compareTo(dn.toString()); } } private static class Token { private String tokenType; private String tokenValue; private String key; private String value; public Token(String tokenType, String tokenValue) { this.tokenType = tokenType; this.tokenValue = tokenValue; buildKeyValue(); } public String getTokenType() { return tokenType; } public String getTokenValue() { return tokenValue; } public String getKey() { return key; } public String getValue() { return value; } private void buildKeyValue() { int index = tokenValue.indexOf("="); if (index < 0) { key = tokenValue; value = tokenValue; } else { key = tokenValue.substring(0, index); value = tokenValue.substring(index + 1, tokenValue.length()); } } } }
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.2-hudson-jaxb-ri-2.2-63- // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2013.10.29 at 05:09:52 \uc624\ud6c4 KST // package net.ion.open.oadr2.model.v20b.ei; import java.io.Serializable; import java.util.ArrayList; import java.util.Collection; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlType; import org.jvnet.jaxb2_commons.lang.Equals; import org.jvnet.jaxb2_commons.lang.EqualsStrategy; import org.jvnet.jaxb2_commons.lang.HashCode; import org.jvnet.jaxb2_commons.lang.HashCodeStrategy; import org.jvnet.jaxb2_commons.lang.JAXBEqualsStrategy; import org.jvnet.jaxb2_commons.lang.JAXBHashCodeStrategy; import org.jvnet.jaxb2_commons.lang.JAXBToStringStrategy; import org.jvnet.jaxb2_commons.lang.ToString; import org.jvnet.jaxb2_commons.lang.ToStringStrategy; import org.jvnet.jaxb2_commons.locator.ObjectLocator; import org.jvnet.jaxb2_commons.locator.util.LocatorUtils; /** * <p>Java class for eiEventSignalsType complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="eiEventSignalsType"> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element ref="{http://docs.oasis-open.org/ns/energyinterop/201110}eiEventSignal" maxOccurs="unbounded"/> * &lt;element ref="{http://docs.oasis-open.org/ns/energyinterop/201110}eiEventBaseline" minOccurs="0"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "eiEventSignalsType", propOrder = { "eiEventSignals", "eiEventBaseline" }) public class EiEventSignals implements Serializable, Equals, HashCode, ToString { private final static long serialVersionUID = 1L; @XmlElement(name = "eiEventSignal", required = true) protected List<EiEventSignal> eiEventSignals; protected EiEventBaseline eiEventBaseline; /** * Default no-arg constructor * */ public EiEventSignals() { super(); } /** * Fully-initialising value constructor * */ public EiEventSignals(final List<EiEventSignal> eiEventSignals, final EiEventBaseline eiEventBaseline) { this.eiEventSignals = eiEventSignals; this.eiEventBaseline = eiEventBaseline; } /** * Interval data for an event Gets the value of the eiEventSignals property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the eiEventSignals property. * * <p> * For example, to add a new item, do as follows: * <pre> * getEiEventSignals().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link EiEventSignal } * * */ public List<EiEventSignal> getEiEventSignals() { if (eiEventSignals == null) { eiEventSignals = new ArrayList<EiEventSignal>(); } return this.eiEventSignals; } /** * Interval data for a baseline * * @return * possible object is * {@link EiEventBaseline } * */ public EiEventBaseline getEiEventBaseline() { return eiEventBaseline; } /** * Sets the value of the eiEventBaseline property. * * @param value * allowed object is * {@link EiEventBaseline } * */ public void setEiEventBaseline(EiEventBaseline value) { this.eiEventBaseline = value; } public String toString() { final ToStringStrategy strategy = JAXBToStringStrategy.INSTANCE; final StringBuilder buffer = new StringBuilder(); append(null, buffer, strategy); return buffer.toString(); } public StringBuilder append(ObjectLocator locator, StringBuilder buffer, ToStringStrategy strategy) { strategy.appendStart(locator, this, buffer); appendFields(locator, buffer, strategy); strategy.appendEnd(locator, this, buffer); return buffer; } public StringBuilder appendFields(ObjectLocator locator, StringBuilder buffer, ToStringStrategy strategy) { { List<EiEventSignal> theEiEventSignals; theEiEventSignals = (((this.eiEventSignals!= null)&&(!this.eiEventSignals.isEmpty()))?this.getEiEventSignals():null); strategy.appendField(locator, this, "eiEventSignals", buffer, theEiEventSignals); } { EiEventBaseline theEiEventBaseline; theEiEventBaseline = this.getEiEventBaseline(); strategy.appendField(locator, this, "eiEventBaseline", buffer, theEiEventBaseline); } return buffer; } public boolean equals(ObjectLocator thisLocator, ObjectLocator thatLocator, Object object, EqualsStrategy strategy) { if (!(object instanceof EiEventSignals)) { return false; } if (this == object) { return true; } final EiEventSignals that = ((EiEventSignals) object); { List<EiEventSignal> lhsEiEventSignals; lhsEiEventSignals = (((this.eiEventSignals!= null)&&(!this.eiEventSignals.isEmpty()))?this.getEiEventSignals():null); List<EiEventSignal> rhsEiEventSignals; rhsEiEventSignals = (((that.eiEventSignals!= null)&&(!that.eiEventSignals.isEmpty()))?that.getEiEventSignals():null); if (!strategy.equals(LocatorUtils.property(thisLocator, "eiEventSignals", lhsEiEventSignals), LocatorUtils.property(thatLocator, "eiEventSignals", rhsEiEventSignals), lhsEiEventSignals, rhsEiEventSignals)) { return false; } } { EiEventBaseline lhsEiEventBaseline; lhsEiEventBaseline = this.getEiEventBaseline(); EiEventBaseline rhsEiEventBaseline; rhsEiEventBaseline = that.getEiEventBaseline(); if (!strategy.equals(LocatorUtils.property(thisLocator, "eiEventBaseline", lhsEiEventBaseline), LocatorUtils.property(thatLocator, "eiEventBaseline", rhsEiEventBaseline), lhsEiEventBaseline, rhsEiEventBaseline)) { return false; } } return true; } public boolean equals(Object object) { final EqualsStrategy strategy = JAXBEqualsStrategy.INSTANCE; return equals(null, null, object, strategy); } public int hashCode(ObjectLocator locator, HashCodeStrategy strategy) { int currentHashCode = 1; { List<EiEventSignal> theEiEventSignals; theEiEventSignals = (((this.eiEventSignals!= null)&&(!this.eiEventSignals.isEmpty()))?this.getEiEventSignals():null); currentHashCode = strategy.hashCode(LocatorUtils.property(locator, "eiEventSignals", theEiEventSignals), currentHashCode, theEiEventSignals); } { EiEventBaseline theEiEventBaseline; theEiEventBaseline = this.getEiEventBaseline(); currentHashCode = strategy.hashCode(LocatorUtils.property(locator, "eiEventBaseline", theEiEventBaseline), currentHashCode, theEiEventBaseline); } return currentHashCode; } public int hashCode() { final HashCodeStrategy strategy = JAXBHashCodeStrategy.INSTANCE; return this.hashCode(null, strategy); } public EiEventSignals withEiEventSignals(EiEventSignal... values) { if (values!= null) { for (EiEventSignal value: values) { getEiEventSignals().add(value); } } return this; } public EiEventSignals withEiEventSignals(Collection<EiEventSignal> values) { if (values!= null) { getEiEventSignals().addAll(values); } return this; } public EiEventSignals withEiEventBaseline(EiEventBaseline value) { setEiEventBaseline(value); return this; } }
/* * Copyright 2000-2012 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.cvsSupport2.annotate; import com.intellij.CvsBundle; import com.intellij.cvsSupport2.CvsUtil; import com.intellij.cvsSupport2.application.CvsEntriesManager; import com.intellij.cvsSupport2.connections.CvsConnectionSettings; import com.intellij.cvsSupport2.connections.CvsEnvironment; import com.intellij.cvsSupport2.cvsExecution.CvsOperationExecutor; import com.intellij.cvsSupport2.cvsExecution.CvsOperationExecutorCallback; import com.intellij.cvsSupport2.cvshandlers.CommandCvsHandler; import com.intellij.cvsSupport2.cvsoperations.cvsAnnotate.AnnotateOperation; import com.intellij.cvsSupport2.cvsoperations.cvsAnnotate.Annotation; import com.intellij.cvsSupport2.history.CvsHistoryProvider; import com.intellij.cvsSupport2.history.CvsRevisionNumber; import com.intellij.openapi.cvsIntegration.CvsResult; import com.intellij.openapi.project.Project; import com.intellij.openapi.vcs.FilePath; import com.intellij.openapi.vcs.RepositoryLocation; import com.intellij.openapi.vcs.VcsException; import com.intellij.openapi.vcs.actions.VcsContextFactory; import com.intellij.openapi.vcs.annotate.AnnotationProvider; import com.intellij.openapi.vcs.annotate.FileAnnotation; import com.intellij.openapi.vcs.history.VcsFileRevision; import com.intellij.openapi.vcs.history.VcsRevisionNumber; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.util.ArrayUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.File; import java.io.IOException; import java.util.*; public class CvsAnnotationProvider implements AnnotationProvider{ @NonNls private static final String INVALID_OPTION_F = "invalid option -- F"; @NonNls private static final String USAGE_CVSNTSRV_SERVER = "Usage: cvs"; private static final Collection<String> ourDoNotAnnotateBinaryRoots = new HashSet<String>(); private final Project myProject; private final CvsHistoryProvider myCvsHistoryProvider; public CvsAnnotationProvider(final Project project, CvsHistoryProvider cvsHistoryProvider) { myProject = project; myCvsHistoryProvider = cvsHistoryProvider; } public FileAnnotation annotate(VirtualFile virtualFile) throws VcsException { final File file = new File(virtualFile.getPath()); final File cvsLightweightFile = CvsUtil.getCvsLightweightFileForFile(file); final String revision = CvsUtil.getRevisionFor(file); final CvsEntriesManager entriesManager = CvsEntriesManager.getInstance(); final CvsConnectionSettings root = entriesManager.getCvsConnectionSettingsFor(file.getParentFile()); final boolean binary = annotateBinary(virtualFile, root); final AnnotateOperation operation = executeOperation(cvsLightweightFile, revision, root, binary, true); final FilePath filePath = VcsContextFactory.SERVICE.getInstance().createFilePathOn(virtualFile); final List<VcsFileRevision> revisions = myCvsHistoryProvider.createRevisions(filePath); final Annotation[] lineAnnotations = operation.getLineAnnotations(); adjustAnnotation(revisions, lineAnnotations); return new CvsFileAnnotation(operation.getContent(), lineAnnotations, revisions, virtualFile, revision, myProject); } public FileAnnotation annotate(VirtualFile file, VcsFileRevision revision) throws VcsException { final CvsConnectionSettings settings = CvsEntriesManager.getInstance().getCvsConnectionSettingsFor(file.getParent()); return annotate(file, revision.getRevisionNumber().asString(), settings); } public boolean isAnnotationValid(VcsFileRevision rev){ return true; } public FileAnnotation annotate(VirtualFile cvsVirtualFile, String revision, CvsEnvironment environment) throws VcsException { // the VirtualFile has a full path if annotate is called from history (when we have a real file on disk), // and has the path equal to a CVS module name if annotate is called from the CVS repository browser // (when there's no real path) boolean hasLocalFile = false; File cvsFile = new File(cvsVirtualFile.getPath()); if (cvsFile.isAbsolute()) { hasLocalFile = true; cvsFile = new File(CvsUtil.getModuleName(cvsVirtualFile)); } final boolean binary = annotateBinary(cvsVirtualFile, environment); final AnnotateOperation annotateOperation = executeOperation(cvsFile, revision, environment, binary, true); final Annotation[] lineAnnotations = annotateOperation.getLineAnnotations(); final List<VcsFileRevision> revisions; if (hasLocalFile) { final FilePath filePath = VcsContextFactory.SERVICE.getInstance().createFilePathOn(cvsVirtualFile); revisions = myCvsHistoryProvider.createRevisions(filePath); // in annotation cvs returns only 8 symbols of username // try to find usernames in history and use them adjustAnnotation(revisions, lineAnnotations); } else { // imitation revisions = new ArrayList<VcsFileRevision>(); final Set<String> usedRevisions = new HashSet<String>(); for (Annotation annotation : lineAnnotations) { if (! usedRevisions.contains(annotation.getRevision())) { revisions.add(new RevisionPresentation(annotation.getRevision(), annotation.getUserName(), annotation.getDate())); usedRevisions.add(annotation.getRevision()); } } } return new CvsFileAnnotation(annotateOperation.getContent(), lineAnnotations, revisions, cvsVirtualFile, revision, myProject); } private static boolean annotateBinary(VirtualFile cvsVirtualFile, CvsEnvironment environment) { if (ourDoNotAnnotateBinaryRoots.contains(environment.getCvsRootAsString())) { return false; } return CvsEntriesManager.getInstance().getEntryFor(cvsVirtualFile).isBinary(); } private AnnotateOperation executeOperation(File file, String revision, CvsEnvironment root, boolean binary, boolean retryOnFailure) throws VcsException { final AnnotateOperation operation = new AnnotateOperation(file, revision, root, binary); final CvsOperationExecutor executor = new CvsOperationExecutor(myProject); executor.performActionSync(new CommandCvsHandler(CvsBundle.getAnnotateOperationName(), operation), CvsOperationExecutorCallback.EMPTY); final CvsResult result = executor.getResult(); if (result.hasErrors()) { if (!retryOnFailure) { throw result.composeError(); } for (VcsException error : result.getErrors()) { for (String message : error.getMessages()) { if (message.contains(INVALID_OPTION_F) || message.contains(USAGE_CVSNTSRV_SERVER)) { ourDoNotAnnotateBinaryRoots.add(root.getCvsRootAsString()); return executeOperation(file, revision, root, false, false); } } } throw result.composeError(); } return operation; } private static void adjustAnnotation(@Nullable List<VcsFileRevision> revisions, @NotNull Annotation[] lineAnnotations) { if (revisions != null) { final Map<String, VcsFileRevision> revisionMap = new HashMap<String, VcsFileRevision>(); for (VcsFileRevision vcsFileRevision : revisions) { revisionMap.put(vcsFileRevision.getRevisionNumber().asString(), vcsFileRevision); } for (Annotation lineAnnotation : lineAnnotations) { final String revisionNumber = lineAnnotation.getRevision(); final VcsFileRevision revision = revisionMap.get(revisionNumber); if (revision != null) { lineAnnotation.setUser(revision.getAuthor()); lineAnnotation.setDate(revision.getRevisionDate()); } } } } private static class RevisionPresentation implements VcsFileRevision { private final VcsRevisionNumber myNumber; private final String myAuthor; private final Date myDate; private RevisionPresentation(final String revision, final String author, final Date date) { myNumber = new CvsRevisionNumber(revision); myAuthor = author; myDate = date; } public VcsRevisionNumber getRevisionNumber() { return myNumber; } public String getBranchName() { return null; } public Date getRevisionDate() { return myDate; } public String getAuthor() { return myAuthor; } public String getCommitMessage() { return null; } @Nullable @Override public RepositoryLocation getChangedRepositoryPath() { return null; } public byte[] loadContent() throws IOException, VcsException { return getContent(); } public byte[] getContent() throws IOException, VcsException { return ArrayUtil.EMPTY_BYTE_ARRAY; } } }
/* * Copyright 2014, gRPC Authors All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.grpc.internal; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.verifyZeroInteractions; import io.grpc.Codec; import io.grpc.StreamTracer; import io.grpc.internal.testing.TestStreamTracer.TestBaseStreamTracer; import java.io.BufferedInputStream; import java.io.ByteArrayInputStream; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.util.Arrays; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import org.mockito.ArgumentCaptor; import org.mockito.Captor; import org.mockito.Mock; import org.mockito.MockitoAnnotations; /** * Tests for {@link MessageFramer}. */ @RunWith(JUnit4.class) public class MessageFramerTest { @Mock private MessageFramer.Sink sink; private final TestBaseStreamTracer tracer = new TestBaseStreamTracer(); private MessageFramer framer; @Captor private ArgumentCaptor<ByteWritableBuffer> frameCaptor; @Captor private ArgumentCaptor<Long> wireSizeCaptor; @Captor private ArgumentCaptor<Long> uncompressedSizeCaptor; private BytesWritableBufferAllocator allocator = new BytesWritableBufferAllocator(1000, 1000); private StatsTraceContext statsTraceCtx; /** Set up for test. */ @Before public void setUp() { MockitoAnnotations.initMocks(this); // MessageDeframerTest tests with a client-side StatsTraceContext, so here we test with a // server-side StatsTraceContext. statsTraceCtx = new StatsTraceContext(new StreamTracer[]{tracer}); framer = new MessageFramer(sink, allocator, statsTraceCtx); } @Test public void simplePayload() { writeKnownLength(framer, new byte[]{3, 14}); verifyNoMoreInteractions(sink); framer.flush(); verify(sink).deliverFrame(toWriteBuffer(new byte[] {0, 0, 0, 0, 2, 3, 14}), false, true); assertEquals(1, allocator.allocCount); verifyNoMoreInteractions(sink); checkStats(2, 2); } @Test public void simpleUnknownLengthPayload() { writeUnknownLength(framer, new byte[]{3, 14}); framer.flush(); // Header is written first, then payload verify(sink).deliverFrame(toWriteBuffer(new byte[] {0, 0, 0, 0, 2}), false, false); verify(sink).deliverFrame(toWriteBuffer(new byte[] {3, 14}), false, true); assertEquals(2, allocator.allocCount); verifyNoMoreInteractions(sink); checkStats(2, 2); } @Test public void smallPayloadsShouldBeCombined() { writeKnownLength(framer, new byte[]{3}); verifyNoMoreInteractions(sink); writeKnownLength(framer, new byte[]{14}); verifyNoMoreInteractions(sink); framer.flush(); verify(sink).deliverFrame( toWriteBuffer(new byte[] {0, 0, 0, 0, 1, 3, 0, 0, 0, 0, 1, 14}), false, true); verifyNoMoreInteractions(sink); assertEquals(1, allocator.allocCount); checkStats(1, 1, 1, 1); } @Test public void closeCombinedWithFullSink() { writeKnownLength(framer, new byte[]{3, 14, 1, 5, 9, 2, 6}); verifyNoMoreInteractions(sink); framer.close(); verify(sink).deliverFrame( toWriteBuffer(new byte[] {0, 0, 0, 0, 7, 3, 14, 1, 5, 9, 2, 6}), true, true); verifyNoMoreInteractions(sink); assertEquals(1, allocator.allocCount); checkStats(7, 7); } @Test public void closeWithoutBufferedFrameGivesNullBuffer() { framer.close(); verify(sink).deliverFrame(null, true, true); verifyNoMoreInteractions(sink); assertEquals(0, allocator.allocCount); checkStats(); } @Test public void payloadSplitBetweenSinks() { allocator = new BytesWritableBufferAllocator(12, 12); framer = new MessageFramer(sink, allocator, statsTraceCtx); writeKnownLength(framer, new byte[]{3, 14, 1, 5, 9, 2, 6, 5}); verify(sink).deliverFrame( toWriteBuffer(new byte[] {0, 0, 0, 0, 8, 3, 14, 1, 5, 9, 2, 6}), false, false); verifyNoMoreInteractions(sink); framer.flush(); verify(sink).deliverFrame(toWriteBuffer(new byte[] {5}), false, true); verifyNoMoreInteractions(sink); assertEquals(2, allocator.allocCount); checkStats(8, 8); } @Test public void frameHeaderSplitBetweenSinks() { allocator = new BytesWritableBufferAllocator(12, 12); framer = new MessageFramer(sink, allocator, statsTraceCtx); writeKnownLength(framer, new byte[]{3, 14, 1}); writeKnownLength(framer, new byte[]{3}); verify(sink).deliverFrame( toWriteBuffer(new byte[] {0, 0, 0, 0, 3, 3, 14, 1, 0, 0, 0, 0}), false, false); verifyNoMoreInteractions(sink); framer.flush(); verify(sink).deliverFrame(toWriteBufferWithMinSize(new byte[] {1, 3}, 12), false, true); verifyNoMoreInteractions(sink); assertEquals(2, allocator.allocCount); checkStats(3, 3, 1, 1); } @Test public void emptyPayloadYieldsFrame() throws Exception { writeKnownLength(framer, new byte[0]); framer.flush(); verify(sink).deliverFrame(toWriteBuffer(new byte[] {0, 0, 0, 0, 0}), false, true); assertEquals(1, allocator.allocCount); checkStats(0, 0); } @Test public void emptyUnknownLengthPayloadYieldsFrame() throws Exception { writeUnknownLength(framer, new byte[0]); verifyZeroInteractions(sink); framer.flush(); verify(sink).deliverFrame(toWriteBuffer(new byte[] {0, 0, 0, 0, 0}), false, true); // One alloc for the header assertEquals(1, allocator.allocCount); checkStats(0, 0); } @Test public void flushIsIdempotent() { writeKnownLength(framer, new byte[]{3, 14}); framer.flush(); framer.flush(); verify(sink).deliverFrame(toWriteBuffer(new byte[] {0, 0, 0, 0, 2, 3, 14}), false, true); verifyNoMoreInteractions(sink); assertEquals(1, allocator.allocCount); checkStats(2, 2); } @Test public void largerFrameSize() throws Exception { allocator = new BytesWritableBufferAllocator(0, 10000); framer = new MessageFramer(sink, allocator, statsTraceCtx); writeKnownLength(framer, new byte[1000]); framer.flush(); verify(sink).deliverFrame(frameCaptor.capture(), eq(false), eq(true)); ByteWritableBuffer buffer = frameCaptor.getValue(); assertEquals(1005, buffer.size()); byte[] data = new byte[1005]; data[3] = 3; data[4] = (byte) 232; assertEquals(toWriteBuffer(data), buffer); verifyNoMoreInteractions(sink); assertEquals(1, allocator.allocCount); checkStats(1000, 1000); } @Test public void largerFrameSizeUnknownLength() throws Exception { // Force payload to be split into two chunks allocator = new BytesWritableBufferAllocator(500, 500); framer = new MessageFramer(sink, allocator, statsTraceCtx); writeUnknownLength(framer, new byte[1000]); framer.flush(); // Header and first chunk written with flush = false verify(sink, times(2)).deliverFrame(frameCaptor.capture(), eq(false), eq(false)); // On flush third buffer written with flish = true verify(sink).deliverFrame(frameCaptor.capture(), eq(false), eq(true)); // header has fixed length of 5 and specifies correct length assertEquals(5, frameCaptor.getAllValues().get(0).readableBytes()); byte[] data = new byte[5]; data[3] = 3; data[4] = (byte) 232; assertEquals(toWriteBuffer(data), frameCaptor.getAllValues().get(0)); assertEquals(500, frameCaptor.getAllValues().get(1).readableBytes()); assertEquals(500, frameCaptor.getAllValues().get(2).readableBytes()); verifyNoMoreInteractions(sink); assertEquals(3, allocator.allocCount); checkStats(1000, 1000); } @Test public void compressed() throws Exception { allocator = new BytesWritableBufferAllocator(100, Integer.MAX_VALUE); // setMessageCompression should default to true framer = new MessageFramer(sink, allocator, statsTraceCtx).setCompressor(new Codec.Gzip()); writeKnownLength(framer, new byte[1000]); framer.flush(); // The GRPC header is written first as a separate frame. verify(sink).deliverFrame(frameCaptor.capture(), eq(false), eq(false)); verify(sink).deliverFrame(frameCaptor.capture(), eq(false), eq(true)); // Check the header ByteWritableBuffer buffer = frameCaptor.getAllValues().get(0); assertEquals(0x1, buffer.data[0]); ByteBuffer byteBuf = ByteBuffer.wrap(buffer.data, 1, 4); byteBuf.order(ByteOrder.BIG_ENDIAN); int length = byteBuf.getInt(); // compressed data should be smaller than uncompressed data. assertTrue(length < 1000); assertEquals(frameCaptor.getAllValues().get(1).size(), length); checkStats(length, 1000); } @Test public void dontCompressIfNoEncoding() throws Exception { allocator = new BytesWritableBufferAllocator(100, Integer.MAX_VALUE); framer = new MessageFramer(sink, allocator, statsTraceCtx) .setMessageCompression(true); writeKnownLength(framer, new byte[1000]); framer.flush(); // The GRPC header is written first as a separate frame verify(sink).deliverFrame(frameCaptor.capture(), eq(false), eq(true)); // Check the header ByteWritableBuffer buffer = frameCaptor.getAllValues().get(0); // We purposefully don't check the last byte of length, since that depends on how exactly it // compressed. assertEquals(0x0, buffer.data[0]); ByteBuffer byteBuf = ByteBuffer.wrap(buffer.data, 1, 4); byteBuf.order(ByteOrder.BIG_ENDIAN); int length = byteBuf.getInt(); assertEquals(1000, length); assertEquals(buffer.data.length - 5 , length); checkStats(1000, 1000); } @Test public void dontCompressIfNotRequested() throws Exception { allocator = new BytesWritableBufferAllocator(100, Integer.MAX_VALUE); framer = new MessageFramer(sink, allocator, statsTraceCtx) .setCompressor(new Codec.Gzip()) .setMessageCompression(false); writeKnownLength(framer, new byte[1000]); framer.flush(); // The GRPC header is written first as a separate frame verify(sink).deliverFrame(frameCaptor.capture(), eq(false), eq(true)); // Check the header ByteWritableBuffer buffer = frameCaptor.getAllValues().get(0); // We purposefully don't check the last byte of length, since that depends on how exactly it // compressed. assertEquals(0x0, buffer.data[0]); ByteBuffer byteBuf = ByteBuffer.wrap(buffer.data, 1, 4); byteBuf.order(ByteOrder.BIG_ENDIAN); int length = byteBuf.getInt(); assertEquals(1000, length); assertEquals(buffer.data.length - 5 , length); checkStats(1000, 1000); } @Test public void closeIsRentrantSafe() throws Exception { MessageFramer.Sink reentrant = new MessageFramer.Sink() { int count = 0; @Override public void deliverFrame(WritableBuffer frame, boolean endOfStream, boolean flush) { if (count == 0) { framer.close(); count++; } else { fail("received event from reentrant call to close"); } } }; framer = new MessageFramer(reentrant, allocator, statsTraceCtx); writeKnownLength(framer, new byte[]{3, 14}); framer.close(); } @Test public void zeroLengthCompressibleMessageIsNotCompressed() { framer.setCompressor(new Codec.Gzip()); framer.setMessageCompression(true); writeKnownLength(framer, new byte[]{}); framer.flush(); verify(sink).deliverFrame(toWriteBuffer(new byte[] {0, 0, 0, 0, 0}), false, true); checkStats(0, 0); } private static WritableBuffer toWriteBuffer(byte[] data) { return toWriteBufferWithMinSize(data, 0); } private static WritableBuffer toWriteBufferWithMinSize(byte[] data, int minFrameSize) { ByteWritableBuffer buffer = new ByteWritableBuffer(Math.max(data.length, minFrameSize)); buffer.write(data, 0, data.length); return buffer; } private static void writeUnknownLength(MessageFramer framer, byte[] bytes) { framer.writePayload(new BufferedInputStream(new ByteArrayInputStream(bytes))); } private static void writeKnownLength(MessageFramer framer, byte[] bytes) { framer.writePayload(new ByteArrayInputStream(bytes)); // TODO(carl-mastrangelo): add framer.flush() here. } /** * @param sizes in the format {wire0, uncompressed0, wire1, uncompressed1, ...} */ private void checkStats(long... sizes) { assertEquals(0, sizes.length % 2); int count = sizes.length / 2; long expectedWireSize = 0; long expectedUncompressedSize = 0; for (int i = 0; i < count; i++) { assertEquals("outboundMessage(" + i + ")", tracer.nextOutboundEvent()); assertEquals("outboundMessage()", tracer.nextOutboundEvent()); assertEquals( String.format("outboundMessageSent(%d, %d, %d)", i, sizes[i * 2], sizes[i * 2 + 1]), tracer.nextOutboundEvent()); expectedWireSize += sizes[i * 2]; expectedUncompressedSize += sizes[i * 2 + 1]; } assertNull(tracer.nextOutboundEvent()); assertNull(tracer.nextInboundEvent()); assertEquals(expectedWireSize, tracer.getOutboundWireSize()); assertEquals(expectedUncompressedSize, tracer.getOutboundUncompressedSize()); } static class ByteWritableBuffer implements WritableBuffer { byte[] data; private int writeIdx; ByteWritableBuffer(int maxFrameSize) { data = new byte[maxFrameSize]; } @Override public void write(byte[] bytes, int srcIndex, int length) { System.arraycopy(bytes, srcIndex, data, writeIdx, length); writeIdx += length; } @Override public void write(byte b) { data[writeIdx++] = b; } @Override public int writableBytes() { return data.length - writeIdx; } @Override public int readableBytes() { return writeIdx; } @Override public void release() { data = null; } int size() { return writeIdx; } @Override public boolean equals(Object buffer) { if (!(buffer instanceof ByteWritableBuffer)) { return false; } ByteWritableBuffer other = (ByteWritableBuffer) buffer; return readableBytes() == other.readableBytes() && Arrays.equals(Arrays.copyOf(data, readableBytes()), Arrays.copyOf(other.data, readableBytes())); } @Override public int hashCode() { return Arrays.hashCode(data) + writableBytes() + readableBytes(); } } static class BytesWritableBufferAllocator implements WritableBufferAllocator { public int minSize; public int maxSize; public int allocCount = 0; BytesWritableBufferAllocator(int minSize, int maxSize) { this.minSize = minSize; this.maxSize = maxSize; } @Override public WritableBuffer allocate(int capacityHint) { allocCount++; return new ByteWritableBuffer(Math.min(maxSize, Math.max(capacityHint, minSize))); } } }
package magpie.models; import magpie.models.interfaces.MultiModel; import java.util.*; import magpie.data.BaseEntry; import magpie.data.Dataset; import magpie.data.utilities.splitters.BaseDatasetSplitter; import magpie.models.utility.MultiModelUtility; import magpie.user.CommandHandler; import org.apache.commons.lang3.math.NumberUtils; /** * Abstract class for a model that splits the dataset and trains * several submodels. It contains functions necessary to do this partitioning and keep track * of the submodels. * * <p><b><u>How to Use a SplitModel</u></b> * * <p>SplitModels work by first partitioning a Dataset using a {@linkplain BaseDatasetSplitter} * and then training several models * * <usage><p><b>Usage</b>: *No options to set*</usage> * * <p><b><u>Implemented Commands:</u></b> * * <command><p><b>splitter &lt;method> [&lt;options...>]</b> - Define splitter used to partition dataset between models * <br><pr><i>method</i>: Method used to split data. Name of a {@linkplain BaseDatasetSplitter} ("?" for options) * <br><pr><i>options</i>: Any options for the splitter</command> * * <command><p><b>submodel</b> - Print the number of submodels</command> * * <command><p><b>submodel set generic $&lt;model></b> - Define a model template to use for all submodels * <br><pr><i>model</i>: An instance of {@linkplain BaseModel}. * Note: Do not use this command for {@linkplain CompositeRegression} unless each * model automatically uses a different random number seed. Otherwise, each * submodel will be identical.</command> * * <command><p><b>submodel set &lt;number> $&lt;model></b> - Set a specific submodel * <br><pr><i>number</i>: Index of the submodel to set (list starts with 0) * <br><pr><i>model</i>: An instance of {@linkplain BaseModel} to use for that model</command> * * <command><p><b>submodel get generic = &lt;output></b> - Retrieve the template for any unassigned submodels</command> * * <command><p><b>submodel get &lt;number> = &lt;output></b> - Retrieve a specific submodel * <br><pr><i>number</i>: Index of submodel to retrieve (list starts with 0) * Returns a clone of the model - you cannot use this to edit the model.</command> * * <p><b><u>Implemented Print Commands:</u></b> * * <print><p><b>splitter</b> - Print out the name of splitter used by this model</print> * * <print><p><b>submodel</b> - Print out number of submodels</print> * * <print><p><b>submodel &lt;number> [&lt;command...>]</b> - Pass a print command to one of the submodels * <br><pr><i>number</i>: Index of model to operate on (starts at 0) * <br><pr><i>command</i>: Print command that gets passed to that submodel</print> * * @author Logan Ward * @version 1.0 */ abstract public class SplitModel extends BaseModel implements MultiModel { /** * List of of models used by this model */ protected ArrayList<BaseModel> Model = new ArrayList<>(2); /** * Class used to partition data into similar groups */ protected BaseDatasetSplitter Partitioner = null; /** * Model used to if a model template for a certain split is not defined */ protected BaseModel GenericModel = null; @Override public SplitModel clone() { SplitModel x; x = (SplitModel) super.clone(); x.Partitioner = Partitioner.clone(); x.Model = new ArrayList<>(NModels()); for (int i=0; i<NModels(); i++) { x.Model.add(Model.get(i) != null ? Model.get(i).clone() : null); } return x; } @Override public void setOptions(List Options) throws Exception { /** Nothing to set */ } @Override public String printUsage() { return "Usage: *No options to set*"; } @Override public BaseModel getModel(int index) { return Model.get(index); } @Override public void setNumberOfModels(int n) { Model.ensureCapacity(n); // If we have too few, add generic models (or null) while (Model.size() < n) if (GenericModel == null) Model.add(null); else Model.add(GenericModel.clone()); // If we have too many, remove the last ones while (Model.size() > n ) { Model.remove(Model.size()-1); } } /** * Returns the number of model slots currently available */ @Override public int NModels() { return Model.size(); } /** * Set the model template * @param x Template model (will be cloned) */ @Override public void setGenericModel(BaseModel x) { GenericModel = (BaseModel) x.clone(); } @Override public BaseModel getGenericModel() { return GenericModel; } @Override public void setModel(int index, BaseModel x) { resetModel(); if (NModels() <= index) setNumberOfModels(index+1); this.Model.set(index, x); } /** Set the partitioner. * @param S Dataset splitter */ public void setPartitioner(BaseDatasetSplitter S) { resetModel(); this.Partitioner = S; } /** * Checks if enough models are defined. Throw error otherwise * @param n Number of models required */ protected void checkModelCount(int n) { if (NModels() < n) throw new Error("Insufficent number of models. Need: " +n+" - Available: "+NModels()); for (int i=0; i<n; i++) { if (Model.get(i) == null) { if (GenericModel == null) { throw new Error("Model " + i + " not defined."); } else { Model.set(i, GenericModel.clone()); } } } } @Override protected void train_protected(Dataset TrainingData) { Partitioner.train(TrainingData); List<Dataset> SplitData = Partitioner.split(TrainingData); setNumberOfModels(SplitData.size()); checkModelCount(SplitData.size()); for (int i=0; i<SplitData.size(); i++) { if (SplitData.get(i).NEntries() == 0) System.err.println("WARNING: No entries provided to train submodel #" + i); else Model.get(i).train(SplitData.get(i), true); } TrainingData.combine(SplitData); trained=true; } @Override public void run_protected(Dataset Data) { // Determine / act on split int[] label = Partitioner.label(Data); List<Dataset> SplitData = new LinkedList<>(); for (int i=0; i <= NumberUtils.max(label); i++) { SplitData.add(Data.emptyClone()); } Iterator<BaseEntry> iter = Data.getEntries().iterator(); int i=0; while (iter.hasNext()) { BaseEntry E = iter.next(); SplitData.get(label[i]).addEntry(E); i++; iter.remove(); } // Run the models checkModelCount(SplitData.size()); for (i=0; i<SplitData.size(); i++) if (SplitData.get(i).NEntries() > 0) if (Model.get(i).isTrained()) Model.get(i).run(SplitData.get(i)); else throw new Error("ERROR: Submodel #" + i + "has not yet been trained"); // Combine results (preserving order) List<Iterator<BaseEntry>> iters = new ArrayList<>(SplitData.size()); for (i=0; i < SplitData.size(); i++) { iters.add(SplitData.get(i).getEntries().iterator()); } for (i=0; i < label.length; i++) { Data.addEntry(iters.get(label[i]).next()); } } @Override public String printCommand(List<String> Command) throws Exception { if (Command.isEmpty()) return super.printCommand(Command); // Handle extra commands for split models switch (Command.get(0).toLowerCase()) { case "submodel": { return MultiModelUtility.handleSubmodelPrintCommand(this, Command); } case "splitter": return "Splitter type: " + Partitioner.getClass().getSimpleName(); default: return super.printCommand(Command); } } @Override protected String printModel_protected() { String output = ""; for (int i=0; i < NModels(); i++) { output += "Submodel #" + i + ":\n"; output += getModel(i).printModel(); } return output; } @Override public List<String> printModelDescriptionDetails(boolean htmlFormat) { List<String> output = super.printModelDescriptionDetails(htmlFormat); // Print out partitioner and its details String[] partr = Partitioner.printDescription(htmlFormat).split("\n"); partr[0] = "Partitioner: " + partr[0]; output.addAll(Arrays.asList(partr)); // Print out submodel details List<String> splitNames = Partitioner.getSplitNames(); for (int i=0; i<NModels(); i++) { String[] submodel = getModel(i).printDescription(htmlFormat).split("\n"); submodel[0] = splitNames.get(i) + ": " + submodel[0]; output.addAll(Arrays.asList(submodel)); } return output; } @Override public Object runCommand(List<Object> Command) throws Exception { if (Command.isEmpty()) return super.runCommand(Command); String Action = Command.get(0).toString().toLowerCase(); switch (Action) { case "submodel": return MultiModelUtility.handleSubmodelCommand(this, Command.subList(1, Command.size())); case "splitter": { String Method; List<Object> MethodOptions; try { Method = Command.get(1).toString(); MethodOptions = Command.subList(2, Command.size()); } catch (Exception e) { throw new Exception("splitter <method> <options...>"); } BaseDatasetSplitter splitter = (BaseDatasetSplitter) CommandHandler.instantiateClass("data.utilities.splitters." + Method, MethodOptions); setPartitioner(splitter); return null; } default: return super.runCommand(Command); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.distributed.internal.membership.gms.messenger; import static org.apache.geode.distributed.internal.membership.gms.GMSUtil.replaceStrings; import static org.apache.geode.internal.DataSerializableFixedID.FIND_COORDINATOR_REQ; import static org.apache.geode.internal.DataSerializableFixedID.FIND_COORDINATOR_RESP; import static org.apache.geode.internal.DataSerializableFixedID.JOIN_REQUEST; import static org.apache.geode.internal.DataSerializableFixedID.JOIN_RESPONSE; import it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap; import org.apache.geode.DataSerializer; import org.apache.geode.ForcedDisconnectException; import org.apache.geode.GemFireConfigException; import org.apache.geode.GemFireIOException; import org.apache.geode.SystemConnectException; import org.apache.geode.distributed.DistributedMember; import org.apache.geode.distributed.DistributedSystemDisconnectedException; import org.apache.geode.distributed.DurableClientAttributes; import org.apache.geode.distributed.internal.DMStats; import org.apache.geode.distributed.internal.DistributionConfig; import org.apache.geode.distributed.internal.DistributionManager; import org.apache.geode.distributed.internal.DistributionMessage; import org.apache.geode.distributed.internal.DistributionStats; import org.apache.geode.distributed.internal.HighPriorityDistributionMessage; import org.apache.geode.distributed.internal.membership.InternalDistributedMember; import org.apache.geode.distributed.internal.membership.MemberAttributes; import org.apache.geode.distributed.internal.membership.NetView; import org.apache.geode.distributed.internal.membership.QuorumChecker; import org.apache.geode.distributed.internal.membership.gms.GMSMember; import org.apache.geode.distributed.internal.membership.gms.Services; import org.apache.geode.distributed.internal.membership.gms.interfaces.MessageHandler; import org.apache.geode.distributed.internal.membership.gms.interfaces.Messenger; import org.apache.geode.distributed.internal.membership.gms.locator.FindCoordinatorRequest; import org.apache.geode.distributed.internal.membership.gms.locator.FindCoordinatorResponse; import org.apache.geode.distributed.internal.membership.gms.messages.JoinRequestMessage; import org.apache.geode.distributed.internal.membership.gms.messages.JoinResponseMessage; import org.apache.geode.internal.ClassPathLoader; import org.apache.geode.internal.HeapDataOutputStream; import org.apache.geode.internal.InternalDataSerializer; import org.apache.geode.internal.OSProcess; import org.apache.geode.internal.Version; import org.apache.geode.internal.VersionedDataInputStream; import org.apache.geode.internal.admin.remote.RemoteTransportConfig; import org.apache.geode.internal.cache.DirectReplyMessage; import org.apache.geode.internal.cache.DistributedCacheOperation; import org.apache.geode.internal.i18n.LocalizedStrings; import org.apache.geode.internal.logging.log4j.AlertAppender; import org.apache.geode.internal.logging.log4j.LocalizedMessage; import org.apache.geode.internal.net.SocketCreator; import org.apache.geode.internal.tcp.MemberShunnedException; import org.apache.logging.log4j.Logger; import org.jgroups.Address; import org.jgroups.Event; import org.jgroups.JChannel; import org.jgroups.Message; import org.jgroups.Message.Flag; import org.jgroups.Message.TransientFlag; import org.jgroups.ReceiverAdapter; import org.jgroups.View; import org.jgroups.ViewId; import org.jgroups.conf.ClassConfigurator; import org.jgroups.protocols.UDP; import org.jgroups.protocols.pbcast.NAKACK2; import org.jgroups.protocols.pbcast.NakAckHeader2; import org.jgroups.stack.IpAddress; import org.jgroups.util.Digest; import org.jgroups.util.UUID; import java.io.BufferedReader; import java.io.ByteArrayInputStream; import java.io.DataInputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Random; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.stream.Collectors; @SuppressWarnings("StatementWithEmptyBody") public class JGroupsMessenger implements Messenger { private static final Logger logger = Services.getLogger(); /** * The location (in the product) of the locator Jgroups config file. */ private static final String DEFAULT_JGROUPS_TCP_CONFIG = "org/apache/geode/distributed/internal/membership/gms/messenger/jgroups-config.xml"; /** * The location (in the product) of the mcast Jgroups config file. */ private static final String JGROUPS_MCAST_CONFIG_FILE_NAME = "org/apache/geode/distributed/internal/membership/gms/messenger/jgroups-mcast.xml"; /** JG magic numbers for types added to the JG ClassConfigurator */ private static final short JGROUPS_TYPE_JGADDRESS = 2000; private static final short JGROUPS_PROTOCOL_TRANSPORT = 1000; public static boolean THROW_EXCEPTION_ON_START_HOOK; private String jgStackConfig; JChannel myChannel; InternalDistributedMember localAddress; JGAddress jgAddress; private Services services; /** handlers that receive certain classes of messages instead of the Manager */ private final Map<Class, MessageHandler> handlers = new ConcurrentHashMap<>(); private volatile NetView view; private final GMSPingPonger pingPonger = new GMSPingPonger(); protected final AtomicLong pongsReceived = new AtomicLong(0); /** tracks multicast messages that have been scheduled for processing */ protected final Map<DistributedMember, MessageTracker> scheduledMcastSeqnos = new HashMap<>(); protected short nackack2HeaderId; /** * A set that contains addresses that we have logged JGroups IOExceptions for in the current * membership view and possibly initiated suspect processing. This reduces the amount of suspect * processing initiated by IOExceptions and the amount of exceptions logged */ private final Set<Address> addressesWithIoExceptionsProcessed = Collections.synchronizedSet(new HashSet<Address>()); static { // register classes that we've added to jgroups that are put on the wire // or need a header ID ClassConfigurator.add(JGROUPS_TYPE_JGADDRESS, JGAddress.class); ClassConfigurator.addProtocol(JGROUPS_PROTOCOL_TRANSPORT, Transport.class); } private GMSEncrypt encrypt; @Override @edu.umd.cs.findbugs.annotations.SuppressWarnings( value = "ST_WRITE_TO_STATIC_FROM_INSTANCE_METHOD") public void init(Services s) { this.services = s; RemoteTransportConfig transport = services.getConfig().getTransport(); DistributionConfig dc = services.getConfig().getDistributionConfig(); boolean b = dc.getEnableNetworkPartitionDetection(); System.setProperty("jgroups.resolve_dns", String.valueOf(!b)); InputStream is; String r; if (transport.isMcastEnabled()) { r = JGROUPS_MCAST_CONFIG_FILE_NAME; } else { r = DEFAULT_JGROUPS_TCP_CONFIG; } is = ClassPathLoader.getLatest().getResourceAsStream(getClass(), r); if (is == null) { throw new GemFireConfigException( LocalizedStrings.GroupMembershipService_CANNOT_FIND_0.toLocalizedString(r)); } String properties; try { // PlainConfigurator config = PlainConfigurator.getInstance(is); // properties = config.getProtocolStackString(); StringBuilder sb = new StringBuilder(3000); BufferedReader br; br = new BufferedReader(new InputStreamReader(is, "US-ASCII")); String input; while ((input = br.readLine()) != null) { sb.append(input); } br.close(); properties = sb.toString(); } catch (Exception ex) { throw new GemFireConfigException( LocalizedStrings.GroupMembershipService_AN_EXCEPTION_WAS_THROWN_WHILE_READING_JGROUPS_CONFIG .toLocalizedString(), ex); } if (properties.startsWith("<!--")) { int commentEnd = properties.indexOf("-->"); properties = properties.substring(commentEnd + 3); } if (transport.isMcastEnabled()) { properties = replaceStrings(properties, "MCAST_PORT", String.valueOf(transport.getMcastId().getPort())); properties = replaceStrings(properties, "MCAST_ADDRESS", dc.getMcastAddress().getHostAddress()); properties = replaceStrings(properties, "MCAST_TTL", String.valueOf(dc.getMcastTtl())); properties = replaceStrings(properties, "MCAST_SEND_BUFFER_SIZE", String.valueOf(dc.getMcastSendBufferSize())); properties = replaceStrings(properties, "MCAST_RECV_BUFFER_SIZE", String.valueOf(dc.getMcastRecvBufferSize())); properties = replaceStrings(properties, "MCAST_RETRANSMIT_INTERVAL", "" + Integer .getInteger(DistributionConfig.GEMFIRE_PREFIX + "mcast-retransmit-interval", 500)); properties = replaceStrings(properties, "RETRANSMIT_LIMIT", String.valueOf(dc.getUdpFragmentSize() - 256)); } if (transport.isMcastEnabled() || transport.isTcpDisabled() || (dc.getUdpRecvBufferSize() != DistributionConfig.DEFAULT_UDP_RECV_BUFFER_SIZE)) { properties = replaceStrings(properties, "UDP_RECV_BUFFER_SIZE", "" + dc.getUdpRecvBufferSize()); } else { properties = replaceStrings(properties, "UDP_RECV_BUFFER_SIZE", "" + DistributionConfig.DEFAULT_UDP_RECV_BUFFER_SIZE_REDUCED); } properties = replaceStrings(properties, "UDP_SEND_BUFFER_SIZE", "" + dc.getUdpSendBufferSize()); String str = transport.getBindAddress(); // JGroups UDP protocol requires a bind address if (str == null || str.length() == 0) { try { str = SocketCreator.getLocalHost().getHostAddress(); } catch (UnknownHostException e) { throw new GemFireConfigException(e.getMessage(), e); } } properties = replaceStrings(properties, "BIND_ADDR_SETTING", "bind_addr=\"" + str + "\""); int port = Integer.getInteger(DistributionConfig.GEMFIRE_PREFIX + "jg-bind-port", 0); if (port != 0) { properties = replaceStrings(properties, "MEMBERSHIP_PORT_RANGE_START", "" + port); properties = replaceStrings(properties, "MEMBERSHIP_PORT_RANGE", "" + 0); } else { int[] ports = dc.getMembershipPortRange(); properties = replaceStrings(properties, "MEMBERSHIP_PORT_RANGE_START", "" + ports[0]); properties = replaceStrings(properties, "MEMBERSHIP_PORT_RANGE", "" + (ports[1] - ports[0])); } properties = replaceStrings(properties, "UDP_FRAGMENT_SIZE", "" + dc.getUdpFragmentSize()); properties = replaceStrings(properties, "FC_MAX_CREDITS", "" + dc.getMcastFlowControl().getByteAllowance()); properties = replaceStrings(properties, "FC_THRESHOLD", "" + dc.getMcastFlowControl().getRechargeThreshold()); properties = replaceStrings(properties, "FC_MAX_BLOCK", "" + dc.getMcastFlowControl().getRechargeBlockMs()); this.jgStackConfig = properties; if (!dc.getSecurityUDPDHAlgo().isEmpty()) { try { this.encrypt = new GMSEncrypt(services); logger.info("Initializing GMSEncrypt "); } catch (Exception e) { throw new GemFireConfigException("problem initializing encryption protocol", e); } } } @Override @edu.umd.cs.findbugs.annotations.SuppressWarnings( value = "ST_WRITE_TO_STATIC_FROM_INSTANCE_METHOD") public void start() { // create the configuration XML string for JGroups String properties = this.jgStackConfig; long start = System.currentTimeMillis(); // start the jgroups channel and establish the membership ID boolean reconnecting = false; try { Object oldChannel = services.getConfig().getTransport().getOldDSMembershipInfo(); if (oldChannel != null) { logger.debug("Reusing JGroups channel from previous system", properties); myChannel = (JChannel) oldChannel; // scrub the old channel ViewId vid = new ViewId(new JGAddress(), 0); List<Address> members = new ArrayList<>(); members.add(new UUID(0, 0));// TODO open a JGroups JIRA for GEODE-3034 View jgv = new View(vid, members); this.myChannel.down(new Event(Event.VIEW_CHANGE, jgv)); UUID logicalAddress = (UUID) myChannel.getAddress(); if (logicalAddress instanceof JGAddress) { ((JGAddress) logicalAddress).setVmViewId(-1); } reconnecting = true; } else { logger.debug("JGroups configuration: {}", properties); checkForIPv6(); InputStream is = new ByteArrayInputStream(properties.getBytes("UTF-8")); myChannel = new JChannel(is); } } catch (Exception e) { throw new GemFireConfigException("unable to create jgroups channel", e); } // give the stats to the jchannel statistics recorder StatRecorder sr = (StatRecorder) myChannel.getProtocolStack().findProtocol(StatRecorder.class); if (sr != null) { sr.setServices(services); } Transport transport = (Transport) myChannel.getProtocolStack().getTransport(); transport.setMessenger(this); nackack2HeaderId = ClassConfigurator.getProtocolId(NAKACK2.class); try { myChannel.setReceiver(null); myChannel.setReceiver(new JGroupsReceiver()); if (!reconnecting) { myChannel.connect("AG"); // apache g***** (whatever we end up calling it) } } catch (Exception e) { myChannel.close(); throw new SystemConnectException("unable to create jgroups channel", e); } if (JGroupsMessenger.THROW_EXCEPTION_ON_START_HOOK) { JGroupsMessenger.THROW_EXCEPTION_ON_START_HOOK = false; throw new SystemConnectException("failing for test"); } establishLocalAddress(); logger.info("JGroups channel {} (took {}ms)", (reconnecting ? "reinitialized" : "created"), System.currentTimeMillis() - start); } /** * JGroups picks an IPv6 address if preferIPv4Stack is false or not set and preferIPv6Addresses is * not set or is true. We want it to use an IPv4 address for a dual-IP stack so that both IPv4 and * IPv6 messaging work */ private void checkForIPv6() throws Exception { boolean preferIpV6Addr = Boolean.getBoolean("java.net.preferIPv6Addresses"); if (!preferIpV6Addr) { logger.debug("forcing JGroups to think IPv4 is being used so it will choose an IPv4 address"); Field m = org.jgroups.util.Util.class.getDeclaredField("ip_stack_type"); m.setAccessible(true); m.set(null, org.jgroups.util.StackType.IPv4); } } @Override public void started() {} @Override public void stop() { if (this.myChannel != null) { if ((services.isShutdownDueToForcedDisconnect() && services.isAutoReconnectEnabled()) || services.getManager().isReconnectingDS()) { // leave the channel open for reconnect attempts } else { this.myChannel.close(); } } } @Override public void stopped() {} @Override public void memberSuspected(InternalDistributedMember initiator, InternalDistributedMember suspect, String reason) {} @Override public void installView(NetView v) { this.view = v; if (this.jgAddress.getVmViewId() < 0) { this.jgAddress.setVmViewId(this.localAddress.getVmViewId()); } List<JGAddress> mbrs = new ArrayList<>(v.size()); mbrs.addAll(v.getMembers().stream().map(JGAddress::new).collect(Collectors.toList())); ViewId vid = new ViewId(new JGAddress(v.getCoordinator()), v.getViewId()); View jgv = new View(vid, new ArrayList<>(mbrs)); logger.trace("installing JGroups view: {}", jgv); this.myChannel.down(new Event(Event.VIEW_CHANGE, jgv)); addressesWithIoExceptionsProcessed.clear(); if (encrypt != null) { encrypt.installView(v); } synchronized (scheduledMcastSeqnos) { for (DistributedMember mbr : v.getCrashedMembers()) { scheduledMcastSeqnos.remove(mbr); } for (DistributedMember mbr : v.getShutdownMembers()) { scheduledMcastSeqnos.remove(mbr); } } } /** * If JGroups is unable to send a message it may mean that the network is down. If so we need to * initiate suspect processing on the recipient. * <p> * see Transport._send() */ @SuppressWarnings("UnusedParameters") public void handleJGroupsIOException(IOException e, Address dest) { if (services.getManager().shutdownInProgress()) { // GEODE-634 - don't log IOExceptions during // shutdown return; } if (addressesWithIoExceptionsProcessed.contains(dest)) { return; } addressesWithIoExceptionsProcessed.add(dest); NetView v = this.view; JGAddress jgMbr = (JGAddress) dest; if (jgMbr != null && v != null) { List<InternalDistributedMember> members = v.getMembers(); InternalDistributedMember recipient = null; for (InternalDistributedMember mbr : members) { GMSMember gmsMbr = ((GMSMember) mbr.getNetMember()); if (jgMbr.getUUIDLsbs() == gmsMbr.getUuidLSBs() && jgMbr.getUUIDMsbs() == gmsMbr.getUuidMSBs() && jgMbr.getVmViewId() == gmsMbr.getVmViewId()) { recipient = mbr; break; } } if (recipient != null) { services.getHealthMonitor().suspect(recipient, "Unable to send messages to this member via JGroups"); } } } private void establishLocalAddress() { UUID logicalAddress = (UUID) myChannel.getAddress(); logicalAddress = logicalAddress.copy(); IpAddress ipaddr = (IpAddress) myChannel.down(new Event(Event.GET_PHYSICAL_ADDRESS)); if (ipaddr != null) { this.jgAddress = new JGAddress(logicalAddress, ipaddr); } else { UDP udp = (UDP) myChannel.getProtocolStack().getTransport(); try { Method getAddress = UDP.class.getDeclaredMethod("getPhysicalAddress"); getAddress.setAccessible(true); ipaddr = (IpAddress) getAddress.invoke(udp, new Object[0]); this.jgAddress = new JGAddress(logicalAddress, ipaddr); } catch (NoSuchMethodException | InvocationTargetException | IllegalAccessException e) { logger .info("Unable to find getPhysicallAddress method in UDP - parsing its address instead"); } // if (this.jgAddress == null) { // String addr = udp.getLocalPhysicalAddress(); // int cidx = addr.lastIndexOf(':'); // IPv6 literals might have colons // String host = addr.substring(0, cidx); // int jgport = Integer.parseInt(addr.substring(cidx+1, addr.length())); // try { // this.jgAddress = new JGAddress(logicalAddress, new IpAddress(InetAddress.getByName(host), // jgport)); // } catch (UnknownHostException e) { // myChannel.disconnect(); // throw new SystemConnectException("unable to initialize jgroups address", e); // } // } } // install the address in the JGroups channel protocols myChannel.down(new Event(Event.SET_LOCAL_ADDRESS, this.jgAddress)); DistributionConfig config = services.getConfig().getDistributionConfig(); boolean isLocator = (services.getConfig().getTransport().getVmKind() == DistributionManager.LOCATOR_DM_TYPE) || !services.getConfig().getDistributionConfig().getStartLocator().isEmpty(); // establish the DistributedSystem's address DurableClientAttributes dca = null; if (config.getDurableClientId() != null) { dca = new DurableClientAttributes(config.getDurableClientId(), config.getDurableClientTimeout()); } MemberAttributes attr = new MemberAttributes(-1/* dcPort - not known at this time */, OSProcess.getId(), services.getConfig().getTransport().getVmKind(), -1/* view id - not known at this time */, config.getName(), MemberAttributes.parseGroups(config.getRoles(), config.getGroups()), dca); localAddress = new InternalDistributedMember(jgAddress.getInetAddress(), jgAddress.getPort(), config.getEnableNetworkPartitionDetection(), isLocator, attr); // add the JGroups logical address to the GMSMember UUID uuid = this.jgAddress; GMSMember gmsMember = (GMSMember) localAddress.getNetMember(); gmsMember.setUUID(uuid); gmsMember.setMemberWeight((byte) (services.getConfig().getMemberWeight() & 0xff)); gmsMember.setNetworkPartitionDetectionEnabled( services.getConfig().getDistributionConfig().getEnableNetworkPartitionDetection()); } @Override public void beSick() {} @Override public void playDead() {} @Override public void beHealthy() {} @Override public void addHandler(Class c, MessageHandler h) { handlers.put(c, h); } @Override public boolean testMulticast(long timeout) throws InterruptedException { long pongsSnapshot = pongsReceived.longValue(); JGAddress dest = null; try { // noinspection ConstantConditions pingPonger.sendPingMessage(myChannel, jgAddress, dest); } catch (Exception e) { logger.warn("unable to send multicast message: {}", (jgAddress == null ? "multicast recipients" : jgAddress), e.getMessage()); return false; } long giveupTime = System.currentTimeMillis() + timeout; while (pongsReceived.longValue() == pongsSnapshot && System.currentTimeMillis() < giveupTime) { Thread.sleep(100); } return pongsReceived.longValue() > pongsSnapshot; } @Override public void getMessageState(InternalDistributedMember target, Map state, boolean includeMulticast) { if (includeMulticast) { NAKACK2 nakack = (NAKACK2) myChannel.getProtocolStack().findProtocol("NAKACK2"); if (nakack != null) { long seqno = nakack.getCurrentSeqno(); state.put("JGroups.mcastState", Long.valueOf(seqno)); } } } @Override public void waitForMessageState(InternalDistributedMember sender, Map state) throws InterruptedException { Long seqno = (Long) state.get("JGroups.mcastState"); if (seqno == null) { return; } long timeout = services.getConfig().getDistributionConfig().getAckWaitThreshold() * 1000L; long startTime = System.currentTimeMillis(); long warnTime = startTime + timeout; long quitTime = warnTime + timeout - 1000L; boolean warned = false; for (;;) { String received = "none"; long highSeqno = 0; synchronized (scheduledMcastSeqnos) { MessageTracker tracker = scheduledMcastSeqnos.get(sender); if (tracker == null) { // no longer in the membership view break; } highSeqno = tracker.get(); } if (logger.isDebugEnabled()) { logger.debug( "waiting for multicast messages from {}. Current seqno={} and expected seqno={}", sender, highSeqno, seqno); } if (highSeqno >= seqno.longValue()) { break; } long now = System.currentTimeMillis(); if (!warned && now >= warnTime) { warned = true; received = String.valueOf(highSeqno); logger.warn( "{} seconds have elapsed while waiting for multicast messages from {}. Received {} but expecting at least {}.", Long.toString((warnTime - startTime) / 1000L), sender, received, seqno); } if (now >= quitTime) { throw new GemFireIOException("Multicast operations from " + sender + " did not distribute within " + (now - startTime) + " milliseconds"); } Thread.sleep(50); } } @Override public Set<InternalDistributedMember> sendUnreliably(DistributionMessage msg) { return send(msg, false); } @Override public Set<InternalDistributedMember> send(DistributionMessage msg) { return send(msg, true); } private Set<InternalDistributedMember> send(DistributionMessage msg, boolean reliably) { // perform the same jgroups messaging as in 8.2's GMSMembershipManager.send() method // BUT: when marshalling messages we need to include the version of the product and // localAddress at the beginning of the message. These should be used in the receiver // code to create a versioned input stream, read the sender address, then read the message // and set its sender address DMStats theStats = services.getStatistics(); NetView oldView = this.view; if (!myChannel.isConnected()) { logger.info("JGroupsMessenger channel is closed - messaging is not possible"); throw new DistributedSystemDisconnectedException("Distributed System is shutting down"); } filterOutgoingMessage(msg); // JGroupsMessenger does not support direct-replies, so register // the message's processor if necessary if ((msg instanceof DirectReplyMessage) && msg.isDirectAck() && msg.getProcessorId() <= 0) { ((DirectReplyMessage) msg).registerProcessor(); } InternalDistributedMember[] destinations = msg.getRecipients(); boolean allDestinations = msg.forAll(); boolean useMcast = false; if (services.getConfig().getTransport().isMcastEnabled()) { if (msg.getMulticast() || allDestinations) { useMcast = services.getManager().isMulticastAllowed(); } } if (logger.isDebugEnabled() && reliably) { String recips = useMcast ? "multicast" : Arrays.toString(msg.getRecipients()); logger.debug("sending via JGroups: [{}] recipients: {}", msg, recips); } JGAddress local = this.jgAddress; if (useMcast) { long startSer = theStats.startMsgSerialization(); Message jmsg = createJGMessage(msg, local, Version.CURRENT_ORDINAL); theStats.endMsgSerialization(startSer); Exception problem; try { jmsg.setTransientFlag(TransientFlag.DONT_LOOPBACK); if (!reliably) { jmsg.setFlag(Message.Flag.NO_RELIABILITY); } theStats.incSentBytes(jmsg.getLength()); logger.trace("Sending JGroups message: {}", jmsg); myChannel.send(jmsg); } catch (Exception e) { logger.debug("caught unexpected exception", e); Throwable cause = e.getCause(); if (cause instanceof ForcedDisconnectException) { problem = (Exception) cause; } else { problem = e; } if (services.getShutdownCause() != null) { Throwable shutdownCause = services.getShutdownCause(); // If ForcedDisconnectException occurred then report it as actual // problem. if (shutdownCause instanceof ForcedDisconnectException) { problem = (Exception) shutdownCause; } else { Throwable ne = problem; while (ne.getCause() != null) { ne = ne.getCause(); } ne.initCause(services.getShutdownCause()); } } final String channelClosed = LocalizedStrings.GroupMembershipService_CHANNEL_CLOSED.toLocalizedString(); // services.getManager().membershipFailure(channelClosed, problem); throw new DistributedSystemDisconnectedException(channelClosed, problem); } } // useMcast else { // ! useMcast int len = destinations.length; List<GMSMember> calculatedMembers; // explicit list of members int calculatedLen; // == calculatedMembers.len if (len == 1 && destinations[0] == DistributionMessage.ALL_RECIPIENTS) { // send to all // Grab a copy of the current membership NetView v = services.getJoinLeave().getView(); // Construct the list calculatedLen = v.size(); calculatedMembers = new LinkedList<GMSMember>(); for (int i = 0; i < calculatedLen; i++) { InternalDistributedMember m = (InternalDistributedMember) v.get(i); calculatedMembers.add((GMSMember) m.getNetMember()); } } // send to all else { // send to explicit list calculatedLen = len; calculatedMembers = new LinkedList<GMSMember>(); for (int i = 0; i < calculatedLen; i++) { calculatedMembers.add((GMSMember) destinations[i].getNetMember()); } } // send to explicit list Int2ObjectOpenHashMap<Message> messages = new Int2ObjectOpenHashMap<>(); long startSer = theStats.startMsgSerialization(); boolean firstMessage = true; for (GMSMember mbr : calculatedMembers) { short version = mbr.getVersionOrdinal(); if (!messages.containsKey(version)) { Message jmsg = createJGMessage(msg, local, version); messages.put(version, jmsg); if (firstMessage) { theStats.incSentBytes(jmsg.getLength()); firstMessage = false; } } } theStats.endMsgSerialization(startSer); Collections.shuffle(calculatedMembers); int i = 0; for (GMSMember mbr : calculatedMembers) { JGAddress to = new JGAddress(mbr); short version = mbr.getVersionOrdinal(); Message jmsg = messages.get(version); Exception problem = null; try { Message tmp = (i < (calculatedLen - 1)) ? jmsg.copy(true) : jmsg; if (!reliably) { jmsg.setFlag(Message.Flag.NO_RELIABILITY); } tmp.setDest(to); tmp.setSrc(this.jgAddress); logger.trace("Unicasting to {}", to); myChannel.send(tmp); } catch (Exception e) { problem = e; } if (problem != null) { Throwable cause = services.getShutdownCause(); if (cause != null) { // If ForcedDisconnectException occurred then report it as actual // problem. if (cause instanceof ForcedDisconnectException) { problem = (Exception) cause; } else { Throwable ne = problem; while (ne.getCause() != null) { ne = ne.getCause(); } ne.initCause(cause); } } final String channelClosed = LocalizedStrings.GroupMembershipService_CHANNEL_CLOSED.toLocalizedString(); // services.getManager().membershipFailure(channelClosed, problem); throw new DistributedSystemDisconnectedException(channelClosed, problem); } } // send individually } // !useMcast // The contract is that every destination enumerated in the // message should have received the message. If one left // (i.e., left the view), we signal it here. if (msg.forAll()) { return Collections.emptySet(); } Set<InternalDistributedMember> result = new HashSet<>(); NetView newView = this.view; if (newView != null && newView != oldView) { for (InternalDistributedMember d : destinations) { if (!newView.contains(d)) { logger.debug("messenger: member has left the view: {} view is now {}", d, newView); result.add(d); } } } return result; } /** * This is the constructor to use to create a JGroups message holding a GemFire * DistributionMessage. It sets the appropriate flags in the Message and properly serializes the * DistributionMessage for the recipient's product version * * @param gfmsg the DistributionMessage * @param src the sender address * @param version the version of the recipient * @return the new message */ Message createJGMessage(DistributionMessage gfmsg, JGAddress src, short version) { if (gfmsg instanceof DirectReplyMessage) { ((DirectReplyMessage) gfmsg).registerProcessor(); } Message msg = new Message(); msg.setDest(null); msg.setSrc(src); setMessageFlags(gfmsg, msg); try { long start = services.getStatistics().startMsgSerialization(); HeapDataOutputStream out_stream = new HeapDataOutputStream(Version.fromOrdinalOrCurrent(version)); Version.CURRENT.writeOrdinal(out_stream, true); if (encrypt != null) { out_stream.writeBoolean(true); writeEncryptedMessage(gfmsg, version, out_stream); } else { out_stream.writeBoolean(false); serializeMessage(gfmsg, out_stream); } msg.setBuffer(out_stream.toByteArray()); services.getStatistics().endMsgSerialization(start); } catch (IOException | GemFireIOException ex) { logger.warn("Error serializing message", ex); if (ex instanceof GemFireIOException) { throw (GemFireIOException) ex; } else { GemFireIOException ioe = new GemFireIOException("Error serializing message"); ioe.initCause(ex); throw ioe; } } catch (Exception ex) { logger.warn("Error serializing message", ex); GemFireIOException ioe = new GemFireIOException("Error serializing message"); ioe.initCause(ex.getCause()); throw ioe; } return msg; } void writeEncryptedMessage(DistributionMessage gfmsg, short version, HeapDataOutputStream out) throws Exception { long start = services.getStatistics().startUDPMsgEncryption(); try { InternalDataSerializer.writeDSFIDHeader(gfmsg.getDSFID(), out); byte[] pk = null; int requestId = 0; InternalDistributedMember pkMbr = null; switch (gfmsg.getDSFID()) { case FIND_COORDINATOR_REQ: case JOIN_REQUEST: // need to append mine PK pk = encrypt.getPublicKey(localAddress); pkMbr = gfmsg.getRecipients()[0]; requestId = getRequestId(gfmsg, true); break; case FIND_COORDINATOR_RESP: case JOIN_RESPONSE: pkMbr = gfmsg.getRecipients()[0]; requestId = getRequestId(gfmsg, false); default: break; } logger.debug("writeEncryptedMessage gfmsg.getDSFID() = {} for {} with requestid {}", gfmsg.getDSFID(), pkMbr, requestId); out.writeInt(requestId); if (pk != null) { InternalDataSerializer.writeByteArray(pk, out); } HeapDataOutputStream out_stream = new HeapDataOutputStream(Version.fromOrdinalOrCurrent(version)); byte[] messageBytes = serializeMessage(gfmsg, out_stream); if (pkMbr != null) { // using members private key messageBytes = encrypt.encryptData(messageBytes, pkMbr); } else { // using cluster secret key messageBytes = encrypt.encryptData(messageBytes); } InternalDataSerializer.writeByteArray(messageBytes, out); } finally { services.getStatistics().endUDPMsgEncryption(start); } } int getRequestId(DistributionMessage gfmsg, boolean add) { int requestId = 0; if (gfmsg instanceof FindCoordinatorRequest) { requestId = ((FindCoordinatorRequest) gfmsg).getRequestId(); } else if (gfmsg instanceof JoinRequestMessage) { requestId = ((JoinRequestMessage) gfmsg).getRequestId(); } else if (gfmsg instanceof FindCoordinatorResponse) { requestId = ((FindCoordinatorResponse) gfmsg).getRequestId(); } else if (gfmsg instanceof JoinResponseMessage) { requestId = ((JoinResponseMessage) gfmsg).getRequestId(); } if (add) { addRequestId(requestId, gfmsg.getRecipients()[0]); } return requestId; } byte[] serializeMessage(DistributionMessage gfmsg, HeapDataOutputStream out_stream) throws IOException { GMSMember m = (GMSMember) this.localAddress.getNetMember(); m.writeEssentialData(out_stream); DataSerializer.writeObject(gfmsg, out_stream); return out_stream.toByteArray(); } void setMessageFlags(DistributionMessage gfmsg, Message msg) { // Bundling is mostly only useful if we're doing no-ack work, // which is fairly rare msg.setFlag(Flag.DONT_BUNDLE); if (gfmsg.getProcessorType() == DistributionManager.HIGH_PRIORITY_EXECUTOR || gfmsg instanceof HighPriorityDistributionMessage || AlertAppender.isThreadAlerting()) { msg.setFlag(Flag.OOB); msg.setFlag(Flag.NO_FC); msg.setFlag(Flag.SKIP_BARRIER); } if (gfmsg instanceof DistributedCacheOperation.CacheOperationMessage) { // we don't want to see our own cache operation messages msg.setTransientFlag(Message.TransientFlag.DONT_LOOPBACK); } } /** * deserialize a jgroups payload. If it's a DistributionMessage find the ID of the sender and * establish it as the message's sender */ Object readJGMessage(Message jgmsg) { Object result = null; int messageLength = jgmsg.getLength(); if (logger.isTraceEnabled()) { logger.trace("deserializing a message of length " + messageLength); } if (messageLength == 0) { // jgroups messages with no payload are used for protocol interchange, such // as STABLE_GOSSIP logger.trace("message length is zero - ignoring"); return null; } Exception problem = null; byte[] buf = jgmsg.getRawBuffer(); try { long start = services.getStatistics().startMsgDeserialization(); DataInputStream dis = new DataInputStream(new ByteArrayInputStream(buf, jgmsg.getOffset(), jgmsg.getLength())); short ordinal = Version.readOrdinal(dis); // logger.info("JGroupsMessenger read ordinal {} version is {}. My version is {}", // ordinal, Version.fromOrdinalOrCurrent(ordinal), Version.CURRENT); if (ordinal < Version.CURRENT_ORDINAL) { dis = new VersionedDataInputStream(dis, Version.fromOrdinalNoThrow(ordinal, true)); } // read boolean isEncrypted = dis.readBoolean(); if (isEncrypted && encrypt == null) { throw new GemFireConfigException("Got remote message as encrypted"); } if (isEncrypted) { result = readEncryptedMessage(dis, ordinal, encrypt); } else { result = deserializeMessage(dis, ordinal); } services.getStatistics().endMsgDeserialization(start); } catch (ClassNotFoundException | IOException | RuntimeException e) { problem = e; } catch (Exception e) { problem = e; } if (problem != null) { logger.error(LocalizedMessage.create( LocalizedStrings.GroupMembershipService_EXCEPTION_DESERIALIZING_MESSAGE_PAYLOAD_0, jgmsg), problem); return null; } return result; } void setSender(DistributionMessage dm, GMSMember m, short ordinal) { InternalDistributedMember sender = null; // JoinRequestMessages are sent with an ID that may have been // reused from a previous life by way of auto-reconnect, // so we don't want to find a canonical reference for the // request's sender ID if (dm.getDSFID() == JOIN_REQUEST) { sender = ((JoinRequestMessage) dm).getMemberID(); } else { sender = getMemberFromView(m, ordinal); } dm.setSender(sender); } @SuppressWarnings("resource") DistributionMessage readEncryptedMessage(DataInputStream dis, short ordinal, GMSEncrypt encryptLocal) throws Exception { int dfsid = InternalDataSerializer.readDSFIDHeader(dis); int requestId = dis.readInt(); long start = services.getStatistics().startUDPMsgDecryption(); try { logger.debug("readEncryptedMessage Reading Request id " + dfsid + " and requestid is " + requestId + " myid " + this.localAddress); InternalDistributedMember pkMbr = null; boolean readPK = false; switch (dfsid) { case FIND_COORDINATOR_REQ: case JOIN_REQUEST: readPK = true; break; case FIND_COORDINATOR_RESP: case JOIN_RESPONSE: // this will have requestId to know the PK pkMbr = getRequestedMember(requestId); break; } byte[] data; byte[] pk = null; if (readPK) { // need to read PK pk = InternalDataSerializer.readByteArray(dis); // encrypt.setPublicKey(publickey, mbr); data = InternalDataSerializer.readByteArray(dis); // using prefixed pk from sender data = encryptLocal.decryptData(data, pk); } else { data = InternalDataSerializer.readByteArray(dis); // from cluster key if (pkMbr != null) { // using member public key data = encryptLocal.decryptData(data, pkMbr); } else { // from cluster key data = encryptLocal.decryptData(data); } } { DataInputStream in = new DataInputStream(new ByteArrayInputStream(data)); if (ordinal < Version.CURRENT_ORDINAL) { in = new VersionedDataInputStream(in, Version.fromOrdinalNoThrow(ordinal, true)); } DistributionMessage result = deserializeMessage(in, ordinal); if (pk != null) { logger.info("Setting public key for " + result.getSender() + " len " + pk.length); setPublicKey(pk, result.getSender()); } return result; } } catch (Exception e) { throw new Exception("Message id is " + dfsid, e); } finally { services.getStatistics().endUDPMsgDecryption(start); } } DistributionMessage deserializeMessage(DataInputStream in, short ordinal) throws ClassNotFoundException, IOException { GMSMember m = new GMSMember(); m.readEssentialData(in); DistributionMessage result = (DistributionMessage) DataSerializer.readObject(in); setSender(result, m, ordinal); return result; } /** look for certain messages that may need to be altered before being sent */ void filterOutgoingMessage(DistributionMessage m) { switch (m.getDSFID()) { case JOIN_RESPONSE: JoinResponseMessage jrsp = (JoinResponseMessage) m; if (jrsp.getRejectionMessage() == null && services.getConfig().getTransport().isMcastEnabled()) { // get the multicast message digest and pass it with the join response Digest digest = (Digest) this.myChannel.getProtocolStack().getTopProtocol() .down(Event.GET_DIGEST_EVT); HeapDataOutputStream hdos = new HeapDataOutputStream(500, Version.CURRENT); try { digest.writeTo(hdos); } catch (Exception e) { logger.fatal("Unable to serialize JGroups messaging digest", e); } jrsp.setMessengerData(hdos.toByteArray()); } break; default: break; } } void filterIncomingMessage(DistributionMessage m) { switch (m.getDSFID()) { case JOIN_RESPONSE: JoinResponseMessage jrsp = (JoinResponseMessage) m; if (jrsp.getRejectionMessage() == null && services.getConfig().getTransport().isMcastEnabled()) { byte[] serializedDigest = jrsp.getMessengerData(); ByteArrayInputStream bis = new ByteArrayInputStream(serializedDigest); DataInputStream dis = new DataInputStream(bis); try { Digest digest = new Digest(); digest.readFrom(dis); logger.trace("installing JGroups message digest {}", digest); this.myChannel.getProtocolStack().getTopProtocol() .down(new Event(Event.MERGE_DIGEST, digest)); jrsp.setMessengerData(null); } catch (Exception e) { logger.fatal("Unable to read JGroups messaging digest", e); } } break; default: break; } } @Override public InternalDistributedMember getMemberID() { return localAddress; } /** * returns the JGroups configuration string, for testing */ public String getJGroupsStackConfig() { return this.jgStackConfig; } /** * returns the pinger, for testing */ public GMSPingPonger getPingPonger() { return this.pingPonger; } /** * for unit testing we need to replace UDP with a fake UDP protocol */ public void setJGroupsStackConfigForTesting(String config) { this.jgStackConfig = config; } /** * returns the member ID for the given GMSMember object */ @SuppressWarnings("UnusedParameters") private InternalDistributedMember getMemberFromView(GMSMember jgId, short version) { return this.services.getJoinLeave().getMemberID(jgId); } @Override public void emergencyClose() { this.view = null; if (this.myChannel != null) { if ((services.isShutdownDueToForcedDisconnect() && services.isAutoReconnectEnabled()) || services.getManager().isReconnectingDS()) { } else { this.myChannel.disconnect(); } } } public QuorumChecker getQuorumChecker() { NetView view = this.view; if (view == null) { view = services.getJoinLeave().getView(); if (view == null) { view = services.getJoinLeave().getPreviousView(); if (view == null) { return null; } } } GMSQuorumChecker qc = new GMSQuorumChecker(view, services.getConfig().getLossThreshold(), this.myChannel); qc.initialize(); return qc; } /** * JGroupsReceiver receives incoming JGroups messages and passes them to a handler. It may be * accessed through JChannel.getReceiver(). */ class JGroupsReceiver extends ReceiverAdapter { @Override public void receive(Message jgmsg) { long startTime = DistributionStats.getStatTime(); try { if (services.getManager().shutdownInProgress()) { return; } if (logger.isTraceEnabled()) { logger.trace("JGroupsMessenger received {} headers: {}", jgmsg, jgmsg.getHeaders()); } // Respond to ping messages sent from other systems that are in a auto reconnect state byte[] contents = jgmsg.getBuffer(); if (contents == null) { return; } if (pingPonger.isPingMessage(contents)) { try { pingPonger.sendPongMessage(myChannel, jgAddress, jgmsg.getSrc()); } catch (Exception e) { logger.info("Failed sending Pong response to " + jgmsg.getSrc()); } return; } else if (pingPonger.isPongMessage(contents)) { pongsReceived.incrementAndGet(); return; } Object o = readJGMessage(jgmsg); if (o == null) { return; } DistributionMessage msg = (DistributionMessage) o; assert msg.getSender() != null; // admin-only VMs don't have caches, so we ignore cache operations // multicast to them, avoiding deserialization cost and classpath // problems if ((services.getConfig().getTransport() .getVmKind() == DistributionManager.ADMIN_ONLY_DM_TYPE) && (msg instanceof DistributedCacheOperation.CacheOperationMessage)) { return; } msg.resetTimestamp(); msg.setBytesRead(jgmsg.getLength()); try { if (logger.isTraceEnabled()) { logger.trace("JGroupsMessenger dispatching {} from {}", msg, msg.getSender()); } filterIncomingMessage(msg); getMessageHandler(msg).processMessage(msg); // record the scheduling of broadcast messages NakAckHeader2 header = (NakAckHeader2) jgmsg.getHeader(nackack2HeaderId); if (header != null && !jgmsg.isFlagSet(Flag.OOB)) { recordScheduledSeqno(msg.getSender(), header.getSeqno()); } } catch (MemberShunnedException e) { // message from non-member - ignore } } finally { long delta = DistributionStats.getStatTime() - startTime; JGroupsMessenger.this.services.getStatistics().incUDPDispatchRequestTime(delta); } } private void recordScheduledSeqno(DistributedMember member, long seqno) { synchronized (scheduledMcastSeqnos) { MessageTracker counter = scheduledMcastSeqnos.get(member); if (counter == null) { counter = new MessageTracker(seqno); scheduledMcastSeqnos.put(member, counter); } counter.record(seqno); } } /** * returns the handler that should process the given message. The default handler is the * membership manager */ private MessageHandler getMessageHandler(DistributionMessage msg) { Class<?> msgClazz = msg.getClass(); MessageHandler h = handlers.get(msgClazz); if (h == null) { for (Class<?> clazz : handlers.keySet()) { if (clazz.isAssignableFrom(msgClazz)) { h = handlers.get(clazz); handlers.put(msg.getClass(), h); break; } } } if (h == null) { h = services.getManager(); } return h; } } @Override public Set<InternalDistributedMember> send(DistributionMessage msg, NetView alternateView) { if (this.encrypt != null) { this.encrypt.installView(alternateView); } return send(msg, true); } @Override public byte[] getPublicKey(InternalDistributedMember mbr) { if (encrypt != null) { return encrypt.getPublicKey(mbr); } return null; } @Override public void setPublicKey(byte[] publickey, InternalDistributedMember mbr) { if (encrypt != null) { logger.debug("Setting PK for member " + mbr); encrypt.setPublicKey(publickey, mbr); } } @Override public void setClusterSecretKey(byte[] clusterSecretKey) { if (encrypt != null) { logger.debug("Setting cluster key"); encrypt.addClusterKey(clusterSecretKey); } } @Override public byte[] getClusterSecretKey() { if (encrypt != null) { return encrypt.getClusterSecretKey(); } return null; } private AtomicInteger requestId = new AtomicInteger((new Random().nextInt())); private HashMap<Integer, InternalDistributedMember> requestIdVsRecipients = new HashMap<>(); InternalDistributedMember getRequestedMember(int requestId) { return requestIdVsRecipients.remove(requestId); } void addRequestId(int requestId, InternalDistributedMember mbr) { requestIdVsRecipients.put(requestId, mbr); } @Override public int getRequestId() { return requestId.incrementAndGet(); } @Override public void initClusterKey() { if (encrypt != null) { try { logger.info("Initializing cluster key"); encrypt.initClusterSecretKey(); } catch (Exception e) { throw new RuntimeException("unable to create cluster key ", e); } } } static class MessageTracker { long highestSeqno; MessageTracker(long seqno) { highestSeqno = seqno; } long get() { return highestSeqno; } void record(long seqno) { if (seqno > highestSeqno) { highestSeqno = seqno; } } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.translog; import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.IndexFormatTooNewException; import org.apache.lucene.index.IndexFormatTooOldException; import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.InputStreamDataInput; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.io.stream.ByteBufferStreamInput; import org.elasticsearch.common.io.stream.InputStreamStreamInput; import java.io.Closeable; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.channels.Channels; import java.nio.channels.FileChannel; import java.nio.file.Files; import java.nio.file.Path; import java.util.concurrent.atomic.AtomicBoolean; /** * A base class for all classes that allows reading ops from translog files */ public abstract class TranslogReader implements Closeable, Comparable<TranslogReader> { public static final int UNKNOWN_OP_COUNT = -1; private static final byte LUCENE_CODEC_HEADER_BYTE = 0x3f; private static final byte UNVERSIONED_TRANSLOG_HEADER_BYTE = 0x00; protected final long generation; protected final ChannelReference channelReference; protected final FileChannel channel; protected final AtomicBoolean closed = new AtomicBoolean(false); protected final long firstOperationOffset; public TranslogReader(long generation, ChannelReference channelReference, long firstOperationOffset) { this.generation = generation; this.channelReference = channelReference; this.channel = channelReference.getChannel(); this.firstOperationOffset = firstOperationOffset; } public long getGeneration() { return this.generation; } public abstract long sizeInBytes(); abstract public int totalOperations(); public final long getFirstOperationOffset() { return firstOperationOffset; } public Translog.Operation read(Translog.Location location) throws IOException { assert location.generation == generation : "read location's translog generation [" + location.generation + "] is not [" + generation + "]"; ByteBuffer buffer = ByteBuffer.allocate(location.size); try (BufferedChecksumStreamInput checksumStreamInput = checksummedStream(buffer, location.translogLocation, location.size, null)) { return read(checksumStreamInput); } } /** read the size of the op (i.e., number of bytes, including the op size) written at the given position */ private final int readSize(ByteBuffer reusableBuffer, long position) { // read op size from disk assert reusableBuffer.capacity() >= 4 : "reusable buffer must have capacity >=4 when reading opSize. got [" + reusableBuffer.capacity() + "]"; try { reusableBuffer.clear(); reusableBuffer.limit(4); readBytes(reusableBuffer, position); reusableBuffer.flip(); // Add an extra 4 to account for the operation size integer itself final int size = reusableBuffer.getInt() + 4; final long maxSize = sizeInBytes() - position; if (size < 0 || size > maxSize) { throw new TranslogCorruptedException("operation size is corrupted must be [0.." + maxSize + "] but was: " + size); } return size; } catch (IOException e) { throw new ElasticsearchException("unexpected exception reading from translog snapshot of " + this.channelReference.getPath(), e); } } public Translog.Snapshot newSnapshot() { final ByteBuffer reusableBuffer = ByteBuffer.allocate(1024); final int totalOperations = totalOperations(); channelReference.incRef(); return newReaderSnapshot(totalOperations, reusableBuffer); } /** * reads an operation at the given position and returns it. The buffer length is equal to the number * of bytes reads. */ private final BufferedChecksumStreamInput checksummedStream(ByteBuffer reusableBuffer, long position, int opSize, BufferedChecksumStreamInput reuse) throws IOException { final ByteBuffer buffer; if (reusableBuffer.capacity() >= opSize) { buffer = reusableBuffer; } else { buffer = ByteBuffer.allocate(opSize); } buffer.clear(); buffer.limit(opSize); readBytes(buffer, position); buffer.flip(); return new BufferedChecksumStreamInput(new ByteBufferStreamInput(buffer), reuse); } protected Translog.Operation read(BufferedChecksumStreamInput inStream) throws IOException { return Translog.readOperation(inStream); } /** * reads bytes at position into the given buffer, filling it. */ abstract protected void readBytes(ByteBuffer buffer, long position) throws IOException; @Override public void close() throws IOException { if (closed.compareAndSet(false, true)) { doClose(); } } protected void doClose() throws IOException { channelReference.decRef(); } protected void ensureOpen() { if (closed.get()) { throw new AlreadyClosedException("translog [" + getGeneration() + "] is already closed"); } } @Override public String toString() { return "translog [" + generation + "][" + channelReference.getPath() + "]"; } @Override public int compareTo(TranslogReader o) { return Long.compare(getGeneration(), o.getGeneration()); } /** * Given a file, return a VersionedTranslogStream based on an * optionally-existing header in the file. If the file does not exist, or * has zero length, returns the latest version. If the header does not * exist, assumes Version 0 of the translog file format. * <p/> * * @throws IOException */ public static ImmutableTranslogReader open(ChannelReference channelReference, Checkpoint checkpoint, String translogUUID) throws IOException { final FileChannel channel = channelReference.getChannel(); final Path path = channelReference.getPath(); assert channelReference.getGeneration() == checkpoint.generation : "expected generation: " + channelReference.getGeneration() + " but got: " + checkpoint.generation; try { if (checkpoint.offset == 0 && checkpoint.numOps == TranslogReader.UNKNOWN_OP_COUNT) { // only old files can be empty return new LegacyTranslogReader(channelReference.getGeneration(), channelReference, 0); } InputStreamStreamInput headerStream = new InputStreamStreamInput(Channels.newInputStream(channel)); // don't close // Lucene's CodecUtil writes a magic number of 0x3FD76C17 with the // header, in binary this looks like: // // binary: 0011 1111 1101 0111 0110 1100 0001 0111 // hex : 3 f d 7 6 c 1 7 // // With version 0 of the translog, the first byte is the // Operation.Type, which will always be between 0-4, so we know if // we grab the first byte, it can be: // 0x3f => Lucene's magic number, so we can assume it's version 1 or later // 0x00 => version 0 of the translog // // otherwise the first byte of the translog is corrupted and we // should bail byte b1 = headerStream.readByte(); if (b1 == LUCENE_CODEC_HEADER_BYTE) { // Read 3 more bytes, meaning a whole integer has been read byte b2 = headerStream.readByte(); byte b3 = headerStream.readByte(); byte b4 = headerStream.readByte(); // Convert the 4 bytes that were read into an integer int header = ((b1 & 0xFF) << 24) + ((b2 & 0xFF) << 16) + ((b3 & 0xFF) << 8) + ((b4 & 0xFF) << 0); // We confirm CodecUtil's CODEC_MAGIC number (0x3FD76C17) // ourselves here, because it allows us to read the first // byte separately if (header != CodecUtil.CODEC_MAGIC) { throw new TranslogCorruptedException("translog looks like version 1 or later, but has corrupted header"); } // Confirm the rest of the header using CodecUtil, extracting // the translog version int version = CodecUtil.checkHeaderNoMagic(new InputStreamDataInput(headerStream), TranslogWriter.TRANSLOG_CODEC, 1, Integer.MAX_VALUE); switch (version) { case TranslogWriter.VERSION_CHECKSUMS: assert checkpoint.numOps == TranslogReader.UNKNOWN_OP_COUNT : "expected unknown op count but got: " + checkpoint.numOps; assert checkpoint.offset == Files.size(path) : "offset(" + checkpoint.offset + ") != file_size(" + Files.size(path) + ") for: " + path; // legacy - we still have to support it somehow return new LegacyTranslogReaderBase(channelReference.getGeneration(), channelReference, CodecUtil.headerLength(TranslogWriter.TRANSLOG_CODEC), checkpoint.offset); case TranslogWriter.VERSION_CHECKPOINTS: assert path.getFileName().toString().endsWith(Translog.TRANSLOG_FILE_SUFFIX) : "new file ends with old suffix: " + path; assert checkpoint.numOps > TranslogReader.UNKNOWN_OP_COUNT: "expected at least 0 operatin but got: " + checkpoint.numOps; assert checkpoint.offset <= channel.size() : "checkpoint is inconsistent with channel length: " + channel.size() + " " + checkpoint; int len = headerStream.readInt(); if (len > channel.size()) { throw new TranslogCorruptedException("uuid length can't be larger than the translog"); } BytesRef ref = new BytesRef(len); ref.length = len; headerStream.read(ref.bytes, ref.offset, ref.length); BytesRef uuidBytes = new BytesRef(translogUUID); if (uuidBytes.bytesEquals(ref) == false) { throw new TranslogCorruptedException("expected shard UUID [" + uuidBytes + "] but got: [" + ref + "] this translog file belongs to a different translog"); } return new ImmutableTranslogReader(channelReference.getGeneration(), channelReference, ref.length + CodecUtil.headerLength(TranslogWriter.TRANSLOG_CODEC) + RamUsageEstimator.NUM_BYTES_INT, checkpoint.offset, checkpoint.numOps); default: throw new TranslogCorruptedException("No known translog stream version: " + version + " path:" + path); } } else if (b1 == UNVERSIONED_TRANSLOG_HEADER_BYTE) { assert checkpoint.numOps == TranslogReader.UNKNOWN_OP_COUNT : "expected unknown op count but got: " + checkpoint.numOps; assert checkpoint.offset == Files.size(path) : "offset(" + checkpoint.offset + ") != file_size(" + Files.size(path) + ") for: " + path; return new LegacyTranslogReader(channelReference.getGeneration(), channelReference, checkpoint.offset); } else { throw new TranslogCorruptedException("Invalid first byte in translog file, got: " + Long.toHexString(b1) + ", expected 0x00 or 0x3f"); } } catch (CorruptIndexException | IndexFormatTooOldException | IndexFormatTooNewException e) { throw new TranslogCorruptedException("Translog header corrupted", e); } } public Path path() { return channelReference.getPath(); } protected Translog.Snapshot newReaderSnapshot(int totalOperations, ByteBuffer reusableBuffer) { return new ReaderSnapshot(totalOperations, reusableBuffer); } class ReaderSnapshot implements Translog.Snapshot { private final AtomicBoolean closed; private final int totalOperations; private final ByteBuffer reusableBuffer; long position; int readOperations; private BufferedChecksumStreamInput reuse; public ReaderSnapshot(int totalOperations, ByteBuffer reusableBuffer) { this.totalOperations = totalOperations; this.reusableBuffer = reusableBuffer; closed = new AtomicBoolean(false); position = firstOperationOffset; readOperations = 0; reuse = null; } @Override public final int estimatedTotalOperations() { return totalOperations; } @Override public Translog.Operation next() throws IOException { if (readOperations < totalOperations) { assert readOperations < totalOperations : "readOpeartions must be less than totalOperations"; return readOperation(); } else { return null; } } protected final Translog.Operation readOperation() throws IOException { final int opSize = readSize(reusableBuffer, position); reuse = checksummedStream(reusableBuffer, position, opSize, reuse); Translog.Operation op = read(reuse); position += opSize; readOperations++; return op; } @Override public void close() { if (closed.compareAndSet(false, true)) { channelReference.decRef(); } } } }
/* * Copyright 2015-2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.security.jackson2; import java.io.IOException; import java.util.ArrayList; import com.fasterxml.jackson.annotation.JsonClassDescription; import com.fasterxml.jackson.annotation.JsonInclude.Include; import com.fasterxml.jackson.annotation.JsonInclude.Value; import com.fasterxml.jackson.core.JsonProcessingException; import org.json.JSONException; import org.junit.Test; import org.skyscreamer.jsonassert.JSONAssert; import org.springframework.security.authentication.UsernamePasswordAuthenticationToken; import org.springframework.security.core.authority.SimpleGrantedAuthority; import org.springframework.security.core.userdetails.User; import static org.assertj.core.api.Assertions.assertThat; /** * @author Jitendra Singh * @author Greg Turnquist * @author Onur Kagan Ozcan * @since 4.2 */ public class UsernamePasswordAuthenticationTokenMixinTests extends AbstractMixinTests { private static final String AUTHENTICATED_JSON = "{" + "\"@class\": \"org.springframework.security.authentication.UsernamePasswordAuthenticationToken\"," + "\"principal\": " + UserDeserializerTests.USER_JSON + ", " + "\"credentials\": \"1234\", " + "\"authenticated\": true, " + "\"details\": null, " + "\"authorities\": " + SimpleGrantedAuthorityMixinTests.AUTHORITIES_ARRAYLIST_JSON + "}"; public static final String AUTHENTICATED_STRINGPRINCIPAL_JSON = AUTHENTICATED_JSON .replace(UserDeserializerTests.USER_JSON, "\"admin\""); private static final String NON_USER_PRINCIPAL_JSON = "{" + "\"@class\": \"org.springframework.security.jackson2.UsernamePasswordAuthenticationTokenMixinTests$NonUserPrincipal\", " + "\"username\": \"admin\"" + "}"; private static final String AUTHENTICATED_STRINGDETAILS_JSON = AUTHENTICATED_JSON.replace("\"details\": null, ", "\"details\": \"details\", "); private static final String AUTHENTICATED_NON_USER_PRINCIPAL_JSON = AUTHENTICATED_JSON .replace(UserDeserializerTests.USER_JSON, NON_USER_PRINCIPAL_JSON) .replaceAll(UserDeserializerTests.USER_PASSWORD, "null") .replace(SimpleGrantedAuthorityMixinTests.AUTHORITIES_ARRAYLIST_JSON, SimpleGrantedAuthorityMixinTests.NO_AUTHORITIES_ARRAYLIST_JSON); private static final String UNAUTHENTICATED_STRINGPRINCIPAL_JSON = AUTHENTICATED_STRINGPRINCIPAL_JSON .replace("\"authenticated\": true, ", "\"authenticated\": false, ") .replace(SimpleGrantedAuthorityMixinTests.AUTHORITIES_ARRAYLIST_JSON, SimpleGrantedAuthorityMixinTests.EMPTY_AUTHORITIES_ARRAYLIST_JSON); @Test public void serializeUnauthenticatedUsernamePasswordAuthenticationTokenMixinTest() throws JsonProcessingException, JSONException { UsernamePasswordAuthenticationToken token = new UsernamePasswordAuthenticationToken("admin", "1234"); String serializedJson = this.mapper.writeValueAsString(token); JSONAssert.assertEquals(UNAUTHENTICATED_STRINGPRINCIPAL_JSON, serializedJson, true); } @Test public void serializeAuthenticatedUsernamePasswordAuthenticationTokenMixinTest() throws JsonProcessingException, JSONException { User user = createDefaultUser(); UsernamePasswordAuthenticationToken token = new UsernamePasswordAuthenticationToken(user.getUsername(), user.getPassword(), user.getAuthorities()); String serializedJson = this.mapper.writeValueAsString(token); JSONAssert.assertEquals(AUTHENTICATED_STRINGPRINCIPAL_JSON, serializedJson, true); } @Test public void deserializeUnauthenticatedUsernamePasswordAuthenticationTokenMixinTest() throws IOException { UsernamePasswordAuthenticationToken token = this.mapper.readValue(UNAUTHENTICATED_STRINGPRINCIPAL_JSON, UsernamePasswordAuthenticationToken.class); assertThat(token).isNotNull(); assertThat(token.isAuthenticated()).isEqualTo(false); assertThat(token.getAuthorities()).isNotNull().hasSize(0); } @Test public void deserializeAuthenticatedUsernamePasswordAuthenticationTokenMixinTest() throws IOException { UsernamePasswordAuthenticationToken expectedToken = createToken(); UsernamePasswordAuthenticationToken token = this.mapper.readValue(AUTHENTICATED_STRINGPRINCIPAL_JSON, UsernamePasswordAuthenticationToken.class); assertThat(token).isNotNull(); assertThat(token.isAuthenticated()).isTrue(); assertThat(token.getAuthorities()).isEqualTo(expectedToken.getAuthorities()); } @Test public void serializeAuthenticatedUsernamePasswordAuthenticationTokenMixinWithUserTest() throws JsonProcessingException, JSONException { UsernamePasswordAuthenticationToken token = createToken(); String actualJson = this.mapper.writeValueAsString(token); JSONAssert.assertEquals(AUTHENTICATED_JSON, actualJson, true); } @Test public void deserializeAuthenticatedUsernamePasswordAuthenticationTokenWithUserTest() throws IOException { UsernamePasswordAuthenticationToken token = this.mapper.readValue(AUTHENTICATED_JSON, UsernamePasswordAuthenticationToken.class); assertThat(token).isNotNull(); assertThat(token.getPrincipal()).isNotNull().isInstanceOf(User.class); assertThat(((User) token.getPrincipal()).getAuthorities()).isNotNull().hasSize(1) .contains(new SimpleGrantedAuthority("ROLE_USER")); assertThat(token.isAuthenticated()).isEqualTo(true); assertThat(token.getAuthorities()).hasSize(1).contains(new SimpleGrantedAuthority("ROLE_USER")); } @Test public void serializeAuthenticatedUsernamePasswordAuthenticationTokenMixinAfterEraseCredentialInvoked() throws JsonProcessingException, JSONException { UsernamePasswordAuthenticationToken token = createToken(); token.eraseCredentials(); String actualJson = this.mapper.writeValueAsString(token); JSONAssert.assertEquals(AUTHENTICATED_JSON.replaceAll(UserDeserializerTests.USER_PASSWORD, "null"), actualJson, true); } @Test public void serializeAuthenticatedUsernamePasswordAuthenticationTokenMixinWithNonUserPrincipalTest() throws JsonProcessingException, JSONException { NonUserPrincipal principal = new NonUserPrincipal(); principal.setUsername("admin"); UsernamePasswordAuthenticationToken token = new UsernamePasswordAuthenticationToken(principal, null, new ArrayList<>()); String actualJson = this.mapper.writeValueAsString(token); JSONAssert.assertEquals(AUTHENTICATED_NON_USER_PRINCIPAL_JSON, actualJson, true); } @Test public void deserializeAuthenticatedUsernamePasswordAuthenticationTokenWithNonUserPrincipalTest() throws IOException { UsernamePasswordAuthenticationToken token = this.mapper.readValue(AUTHENTICATED_NON_USER_PRINCIPAL_JSON, UsernamePasswordAuthenticationToken.class); assertThat(token).isNotNull(); assertThat(token.getPrincipal()).isNotNull().isInstanceOf(NonUserPrincipal.class); } @Test public void deserializeAuthenticatedUsernamePasswordAuthenticationTokenWithDetailsTest() throws IOException { UsernamePasswordAuthenticationToken token = this.mapper.readValue(AUTHENTICATED_STRINGDETAILS_JSON, UsernamePasswordAuthenticationToken.class); assertThat(token).isNotNull(); assertThat(token.getPrincipal()).isNotNull().isInstanceOf(User.class); assertThat(((User) token.getPrincipal()).getAuthorities()).isNotNull().hasSize(1) .contains(new SimpleGrantedAuthority("ROLE_USER")); assertThat(token.isAuthenticated()).isEqualTo(true); assertThat(token.getAuthorities()).hasSize(1).contains(new SimpleGrantedAuthority("ROLE_USER")); assertThat(token.getDetails()).isExactlyInstanceOf(String.class).isEqualTo("details"); } @Test public void serializingThenDeserializingWithNoCredentialsOrDetailsShouldWork() throws IOException { UsernamePasswordAuthenticationToken original = new UsernamePasswordAuthenticationToken("Frodo", null); String serialized = this.mapper.writeValueAsString(original); UsernamePasswordAuthenticationToken deserialized = this.mapper.readValue(serialized, UsernamePasswordAuthenticationToken.class); assertThat(deserialized).isEqualTo(original); } @Test public void serializingThenDeserializingWithConfiguredObjectMapperShouldWork() throws IOException { this.mapper.setDefaultPropertyInclusion(Value.construct(Include.ALWAYS, Include.NON_NULL)) .setSerializationInclusion(Include.NON_ABSENT); UsernamePasswordAuthenticationToken original = new UsernamePasswordAuthenticationToken("Frodo", null); String serialized = this.mapper.writeValueAsString(original); UsernamePasswordAuthenticationToken deserialized = this.mapper.readValue(serialized, UsernamePasswordAuthenticationToken.class); assertThat(deserialized).isEqualTo(original); } private UsernamePasswordAuthenticationToken createToken() { User user = createDefaultUser(); UsernamePasswordAuthenticationToken token = new UsernamePasswordAuthenticationToken(user, user.getPassword(), user.getAuthorities()); return token; } @JsonClassDescription public static class NonUserPrincipal { private String username; public String getUsername() { return this.username; } public void setUsername(String username) { this.username = username; } } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.jps.model.serialization.java; import com.intellij.openapi.util.JDOMUtil; import com.intellij.openapi.util.text.StringUtil; import org.jdom.Element; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.jps.model.JpsElementFactory; import org.jetbrains.jps.model.JpsProject; import org.jetbrains.jps.model.JpsSimpleElement; import org.jetbrains.jps.model.JpsUrlList; import org.jetbrains.jps.model.java.*; import org.jetbrains.jps.model.library.JpsMavenRepositoryLibraryDescriptor; import org.jetbrains.jps.model.library.JpsOrderRootType; import org.jetbrains.jps.model.library.JpsRepositoryLibraryType; import org.jetbrains.jps.model.module.JpsDependencyElement; import org.jetbrains.jps.model.module.JpsModule; import org.jetbrains.jps.model.module.JpsModuleReference; import org.jetbrains.jps.model.module.JpsModuleSourceRootType; import org.jetbrains.jps.model.serialization.JDomSerializationUtil; import org.jetbrains.jps.model.serialization.JpsModelSerializerExtension; import org.jetbrains.jps.model.serialization.JpsProjectExtensionSerializer; import org.jetbrains.jps.model.serialization.artifact.JpsPackagingElementSerializer; import org.jetbrains.jps.model.serialization.java.compiler.*; import org.jetbrains.jps.model.serialization.library.JpsLibraryPropertiesSerializer; import org.jetbrains.jps.model.serialization.library.JpsLibraryRootTypeSerializer; import org.jetbrains.jps.model.serialization.module.JpsModuleRootModelSerializer; import org.jetbrains.jps.model.serialization.module.JpsModuleSourceRootPropertiesSerializer; import java.util.Arrays; import java.util.Collections; import java.util.List; /** * @author nik */ public class JpsJavaModelSerializerExtension extends JpsModelSerializerExtension { public static final String EXPORTED_ATTRIBUTE = "exported"; public static final String SCOPE_ATTRIBUTE = "scope"; public static final String OUTPUT_TAG = "output"; public static final String URL_ATTRIBUTE = "url"; public static final String LANGUAGE_LEVEL_ATTRIBUTE = "languageLevel"; public static final String EXPLODED_TAG = "exploded"; public static final String EXCLUDE_EXPLODED_TAG = "exclude-exploded"; public static final String TEST_OUTPUT_TAG = "output-test"; public static final String INHERIT_COMPILER_OUTPUT_ATTRIBUTE = "inherit-compiler-output"; public static final String EXCLUDE_OUTPUT_TAG = "exclude-output"; private static final String ANNOTATION_PATHS_TAG = "annotation-paths"; private static final String JAVADOC_PATHS_TAG = "javadoc-paths"; private static final String MODULE_LANGUAGE_LEVEL_ATTRIBUTE = "LANGUAGE_LEVEL"; public static final String ROOT_TAG = "root"; private static final String RELATIVE_OUTPUT_PATH_ATTRIBUTE = "relativeOutputPath"; private static final String IS_GENERATED_ATTRIBUTE = "generated"; public static final JavaSourceRootPropertiesSerializer JAVA_SOURCE_ROOT_PROPERTIES_SERIALIZER = new JavaSourceRootPropertiesSerializer(JavaSourceRootType.SOURCE, JpsModuleRootModelSerializer.JAVA_SOURCE_ROOT_TYPE_ID); @Override public void loadRootModel(@NotNull JpsModule module, @NotNull Element rootModel) { loadExplodedDirectoryExtension(module, rootModel); loadJavaModuleExtension(module, rootModel); } @Override public void saveRootModel(@NotNull JpsModule module, @NotNull Element rootModel) { saveExplodedDirectoryExtension(module, rootModel); saveJavaModuleExtension(module, rootModel); } @Override public void loadModuleOptions(@NotNull JpsModule module, @NotNull Element rootElement) { Element testModuleProperties = JDomSerializationUtil.findComponent(rootElement, "TestModuleProperties"); if (testModuleProperties != null) { String productionModuleName = testModuleProperties.getAttributeValue("production-module"); if (productionModuleName != null) { getService().setTestModuleProperties(module, JpsElementFactory.getInstance().createModuleReference(productionModuleName)); } } } @NotNull @Override public List<? extends JpsProjectExtensionSerializer> getProjectExtensionSerializers() { return Arrays.asList(new JavaProjectExtensionSerializer(), new JpsJavaCompilerConfigurationSerializer(), new JpsJavaCompilerNotNullableSerializer(), new JpsCompilerValidationExcludeSerializer(), new JpsJavaCompilerWorkspaceConfigurationSerializer(), new JpsJavaCompilerOptionsSerializer("JavacSettings", "Javac"), new JpsEclipseCompilerOptionsSerializer("EclipseCompilerSettings", "Eclipse"), new RmicCompilerOptionsSerializer("RmicSettings", "Rmic")); } @NotNull @Override public List<? extends JpsModuleSourceRootPropertiesSerializer<?>> getModuleSourceRootPropertiesSerializers() { return Arrays.asList(JAVA_SOURCE_ROOT_PROPERTIES_SERIALIZER, new JavaSourceRootPropertiesSerializer(JavaSourceRootType.TEST_SOURCE, JpsModuleRootModelSerializer.JAVA_TEST_ROOT_TYPE_ID), new JavaResourceRootPropertiesSerializer(JavaResourceRootType.RESOURCE, "java-resource"), new JavaResourceRootPropertiesSerializer(JavaResourceRootType.TEST_RESOURCE, "java-test-resource")); } @Override public void loadModuleDependencyProperties(JpsDependencyElement dependency, Element entry) { boolean exported = entry.getAttributeValue(EXPORTED_ATTRIBUTE) != null; String scopeName = entry.getAttributeValue(SCOPE_ATTRIBUTE); JpsJavaDependencyScope scope; try { scope = scopeName != null ? JpsJavaDependencyScope.valueOf(scopeName) : JpsJavaDependencyScope.COMPILE; } catch (IllegalArgumentException e) { scope = JpsJavaDependencyScope.COMPILE; } final JpsJavaDependencyExtension extension = getService().getOrCreateDependencyExtension(dependency); extension.setExported(exported); extension.setScope(scope); } @Override public void saveModuleDependencyProperties(JpsDependencyElement dependency, Element orderEntry) { JpsJavaDependencyExtension extension = getService().getDependencyExtension(dependency); if (extension != null) { if (extension.isExported()) { orderEntry.setAttribute(EXPORTED_ATTRIBUTE, ""); } JpsJavaDependencyScope scope = extension.getScope(); if (scope != JpsJavaDependencyScope.COMPILE) { orderEntry.setAttribute(SCOPE_ATTRIBUTE, scope.name()); } } } @Override public List<JpsLibraryRootTypeSerializer> getLibraryRootTypeSerializers() { return Arrays.asList(new JpsLibraryRootTypeSerializer("JAVADOC", JpsOrderRootType.DOCUMENTATION, true), new JpsLibraryRootTypeSerializer("ANNOTATIONS", JpsAnnotationRootType.INSTANCE, false), new JpsLibraryRootTypeSerializer("NATIVE", JpsNativeLibraryRootType.INSTANCE, false)); } @NotNull @Override public List<JpsLibraryRootTypeSerializer> getSdkRootTypeSerializers() { return Arrays.asList(new JpsLibraryRootTypeSerializer("javadocPath", JpsOrderRootType.DOCUMENTATION, true), new JpsLibraryRootTypeSerializer("annotationsPath", JpsAnnotationRootType.INSTANCE, true)); } @NotNull @Override public List<? extends JpsPackagingElementSerializer<?>> getPackagingElementSerializers() { return Arrays.asList(new JpsModuleOutputPackagingElementSerializer(), new JpsTestModuleOutputPackagingElementSerializer()); } @NotNull public List<? extends JpsLibraryPropertiesSerializer<?>> getLibraryPropertiesSerializers() { return Collections.singletonList(new JpsRepositoryLibraryPropertiesSerializer()); } private static void loadExplodedDirectoryExtension(JpsModule module, Element rootModelComponent) { final Element exploded = rootModelComponent.getChild(EXPLODED_TAG); if (exploded != null) { final ExplodedDirectoryModuleExtension extension = getService().getOrCreateExplodedDirectoryExtension(module); extension.setExcludeExploded(rootModelComponent.getChild(EXCLUDE_EXPLODED_TAG) != null); extension.setExplodedUrl(exploded.getAttributeValue(URL_ATTRIBUTE)); } } private static void saveExplodedDirectoryExtension(JpsModule module, Element rootModelElement) { ExplodedDirectoryModuleExtension extension = getService().getExplodedDirectoryExtension(module); if (extension != null) { if (extension.isExcludeExploded()) { rootModelElement.addContent(0, new Element(EXCLUDE_EXPLODED_TAG)); } rootModelElement.addContent(0, new Element(EXPLODED_TAG).setAttribute(URL_ATTRIBUTE, extension.getExplodedUrl())); } } private static void loadJavaModuleExtension(JpsModule module, Element rootModelComponent) { final JpsJavaModuleExtension extension = getService().getOrCreateModuleExtension(module); final Element outputTag = rootModelComponent.getChild(OUTPUT_TAG); String outputUrl = outputTag != null ? outputTag.getAttributeValue(URL_ATTRIBUTE) : null; extension.setOutputUrl(outputUrl); final Element testOutputTag = rootModelComponent.getChild(TEST_OUTPUT_TAG); String testOutputUrl = testOutputTag != null ? testOutputTag.getAttributeValue(URL_ATTRIBUTE) : null; extension.setTestOutputUrl(StringUtil.isEmpty(testOutputUrl) ? outputUrl : testOutputUrl); extension.setInheritOutput(Boolean.parseBoolean(rootModelComponent.getAttributeValue(INHERIT_COMPILER_OUTPUT_ATTRIBUTE))); extension.setExcludeOutput(rootModelComponent.getChild(EXCLUDE_OUTPUT_TAG) != null); final String languageLevel = rootModelComponent.getAttributeValue(MODULE_LANGUAGE_LEVEL_ATTRIBUTE); if (languageLevel != null) { extension.setLanguageLevel(LanguageLevel.valueOf(languageLevel)); } loadAdditionalRoots(rootModelComponent, ANNOTATION_PATHS_TAG, extension.getAnnotationRoots()); loadAdditionalRoots(rootModelComponent, JAVADOC_PATHS_TAG, extension.getJavadocRoots()); } private static void saveJavaModuleExtension(JpsModule module, Element rootModelComponent) { JpsJavaModuleExtension extension = getService().getModuleExtension(module); if (extension == null) return; if (extension.isExcludeOutput()) { rootModelComponent.addContent(0, new Element(EXCLUDE_OUTPUT_TAG)); } String testOutputUrl = extension.getTestOutputUrl(); if (testOutputUrl != null) { rootModelComponent.addContent(0, new Element(TEST_OUTPUT_TAG).setAttribute(URL_ATTRIBUTE, testOutputUrl)); } String outputUrl = extension.getOutputUrl(); if (outputUrl != null) { rootModelComponent.addContent(0, new Element(OUTPUT_TAG).setAttribute(URL_ATTRIBUTE, outputUrl)); } LanguageLevel languageLevel = extension.getLanguageLevel(); if (languageLevel != null) { rootModelComponent.setAttribute(MODULE_LANGUAGE_LEVEL_ATTRIBUTE, languageLevel.name()); } if (extension.isInheritOutput()) { rootModelComponent.setAttribute(INHERIT_COMPILER_OUTPUT_ATTRIBUTE, "true"); } saveAdditionalRoots(rootModelComponent, JAVADOC_PATHS_TAG, extension.getJavadocRoots()); saveAdditionalRoots(rootModelComponent, ANNOTATION_PATHS_TAG, extension.getAnnotationRoots()); } private static void loadAdditionalRoots(Element rootModelComponent, final String rootsTagName, final JpsUrlList result) { final Element roots = rootModelComponent.getChild(rootsTagName); for (Element root : JDOMUtil.getChildren(roots, ROOT_TAG)) { result.addUrl(root.getAttributeValue(URL_ATTRIBUTE)); } } private static void saveAdditionalRoots(Element rootModelComponent, final String rootsTagName, final JpsUrlList list) { List<String> urls = list.getUrls(); if (!urls.isEmpty()) { Element roots = new Element(rootsTagName); for (String url : urls) { roots.addContent(new Element(ROOT_TAG).setAttribute(URL_ATTRIBUTE, url)); } rootModelComponent.addContent(roots); } } private static JpsJavaExtensionService getService() { return JpsJavaExtensionService.getInstance(); } private static class JpsModuleOutputPackagingElementSerializer extends JpsPackagingElementSerializer<JpsProductionModuleOutputPackagingElement> { private JpsModuleOutputPackagingElementSerializer() { super("module-output", JpsProductionModuleOutputPackagingElement.class); } @Override public JpsProductionModuleOutputPackagingElement load(Element element) { JpsModuleReference reference = JpsElementFactory.getInstance().createModuleReference(element.getAttributeValue("name")); return getService().createProductionModuleOutput(reference); } @Override public void save(JpsProductionModuleOutputPackagingElement element, Element tag) { tag.setAttribute("name", element.getModuleReference().getModuleName()); } } private static class JpsTestModuleOutputPackagingElementSerializer extends JpsPackagingElementSerializer<JpsTestModuleOutputPackagingElement> { private JpsTestModuleOutputPackagingElementSerializer() { super("module-test-output", JpsTestModuleOutputPackagingElement.class); } @Override public JpsTestModuleOutputPackagingElement load(Element element) { JpsModuleReference reference = JpsElementFactory.getInstance().createModuleReference(element.getAttributeValue("name")); return getService().createTestModuleOutput(reference); } @Override public void save(JpsTestModuleOutputPackagingElement element, Element tag) { tag.setAttribute("name", element.getModuleReference().getModuleName()); } } private static class JavaProjectExtensionSerializer extends JpsProjectExtensionSerializer { public JavaProjectExtensionSerializer() { super(null, "ProjectRootManager"); } @Override public void loadExtension(@NotNull JpsProject project, @NotNull Element componentTag) { JpsJavaProjectExtension extension = getService().getOrCreateProjectExtension(project); final Element output = componentTag.getChild(OUTPUT_TAG); if (output != null) { String url = output.getAttributeValue(URL_ATTRIBUTE); if (url != null) { extension.setOutputUrl(url); } } String languageLevel = componentTag.getAttributeValue(LANGUAGE_LEVEL_ATTRIBUTE); if (languageLevel != null) { extension.setLanguageLevel(LanguageLevel.valueOf(languageLevel)); } } @Override public void saveExtension(@NotNull JpsProject project, @NotNull Element componentTag) { JpsJavaProjectExtension extension = getService().getProjectExtension(project); if (extension == null) return; String outputUrl = extension.getOutputUrl(); if (outputUrl != null) { componentTag.addContent(new Element(OUTPUT_TAG).setAttribute(URL_ATTRIBUTE, outputUrl)); } LanguageLevel level = extension.getLanguageLevel(); componentTag.setAttribute(LANGUAGE_LEVEL_ATTRIBUTE, level.name()); componentTag.setAttribute("assert-keyword", Boolean.toString(level.compareTo(LanguageLevel.JDK_1_4) >= 0)); componentTag.setAttribute("jdk-15", Boolean.toString(level.compareTo(LanguageLevel.JDK_1_5) >= 0)); } } private static class JavaSourceRootPropertiesSerializer extends JpsModuleSourceRootPropertiesSerializer<JavaSourceRootProperties> { private JavaSourceRootPropertiesSerializer(JpsModuleSourceRootType<JavaSourceRootProperties> type, String typeId) { super(type, typeId); } @Override public JavaSourceRootProperties loadProperties(@NotNull Element sourceRootTag) { String packagePrefix = StringUtil.notNullize(sourceRootTag.getAttributeValue(JpsModuleRootModelSerializer.PACKAGE_PREFIX_ATTRIBUTE)); boolean isGenerated = Boolean.parseBoolean(sourceRootTag.getAttributeValue(IS_GENERATED_ATTRIBUTE)); return getService().createSourceRootProperties(packagePrefix, isGenerated); } @Override public void saveProperties(@NotNull JavaSourceRootProperties properties, @NotNull Element sourceRootTag) { String isTestSource = Boolean.toString(getType().equals(JavaSourceRootType.TEST_SOURCE)); sourceRootTag.setAttribute(JpsModuleRootModelSerializer.IS_TEST_SOURCE_ATTRIBUTE, isTestSource); String packagePrefix = properties.getPackagePrefix(); if (!packagePrefix.isEmpty()) { sourceRootTag.setAttribute(JpsModuleRootModelSerializer.PACKAGE_PREFIX_ATTRIBUTE, packagePrefix); } if (properties.isForGeneratedSources()) { sourceRootTag.setAttribute(IS_GENERATED_ATTRIBUTE, Boolean.TRUE.toString()); } } } private static class JavaResourceRootPropertiesSerializer extends JpsModuleSourceRootPropertiesSerializer<JavaResourceRootProperties> { private JavaResourceRootPropertiesSerializer(JpsModuleSourceRootType<JavaResourceRootProperties> type, String typeId) { super(type, typeId); } @Override public JavaResourceRootProperties loadProperties(@NotNull Element sourceRootTag) { String relativeOutputPath = StringUtil.notNullize(sourceRootTag.getAttributeValue(RELATIVE_OUTPUT_PATH_ATTRIBUTE)); boolean isGenerated = Boolean.parseBoolean(sourceRootTag.getAttributeValue(IS_GENERATED_ATTRIBUTE)); return getService().createResourceRootProperties(relativeOutputPath, isGenerated); } @Override public void saveProperties(@NotNull JavaResourceRootProperties properties, @NotNull Element sourceRootTag) { String relativeOutputPath = properties.getRelativeOutputPath(); if (!relativeOutputPath.isEmpty()) { sourceRootTag.setAttribute(RELATIVE_OUTPUT_PATH_ATTRIBUTE, relativeOutputPath); } if (properties.isForGeneratedSources()) { sourceRootTag.setAttribute(IS_GENERATED_ATTRIBUTE, Boolean.TRUE.toString()); } } } private static class JpsRepositoryLibraryPropertiesSerializer extends JpsLibraryPropertiesSerializer<JpsSimpleElement<JpsMavenRepositoryLibraryDescriptor>> { private static final String MAVEN_ID_ATTRIBUTE = "maven-id"; public JpsRepositoryLibraryPropertiesSerializer() { super(JpsRepositoryLibraryType.INSTANCE, JpsRepositoryLibraryType.INSTANCE.getTypeId()); } @Override public JpsSimpleElement<JpsMavenRepositoryLibraryDescriptor> loadProperties(@Nullable Element elem) { return JpsElementFactory.getInstance().createSimpleElement(new JpsMavenRepositoryLibraryDescriptor( elem != null? elem.getAttributeValue(MAVEN_ID_ATTRIBUTE, (String)null) : null )); } @Override public void saveProperties(JpsSimpleElement<JpsMavenRepositoryLibraryDescriptor> properties, Element element) { final String mavenId = properties.getData().getMavenId(); if (mavenId != null) { element.setAttribute(MAVEN_ID_ATTRIBUTE, mavenId); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.calcite.plan; import org.apache.calcite.config.CalciteSystemProperty; import org.apache.calcite.plan.hep.HepPlanner; import org.apache.calcite.plan.hep.HepProgram; import org.apache.calcite.plan.hep.HepProgramBuilder; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.core.RelFactories; import org.apache.calcite.rel.rules.CoreRules; import org.apache.calcite.sql2rel.RelFieldTrimmer; import org.apache.calcite.tools.RelBuilder; import org.apache.calcite.util.Pair; import org.apache.calcite.util.graph.DefaultDirectedGraph; import org.apache.calcite.util.graph.DefaultEdge; import org.apache.calcite.util.graph.DirectedGraph; import org.apache.calcite.util.graph.Graphs; import org.apache.calcite.util.graph.TopologicalOrderIterator; import com.google.common.base.Suppliers; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.collect.Sets; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.function.Supplier; /** * Utility methods for using * materialized views and lattices for queries. */ public abstract class RelOptMaterializations { /** * Returns a list of RelNode transformed from all possible combination of * materialized view uses. Big queries will likely have more than one * transformed RelNode, e.g., (t1 group by c1) join (t2 group by c2). * @param rel the original RelNode * @param materializations the materialized view list * @return the list of transformed RelNode together with their corresponding * materialized views used in the transformation. */ public static List<Pair<RelNode, List<RelOptMaterialization>>> useMaterializedViews( final RelNode rel, List<RelOptMaterialization> materializations) { final List<RelOptMaterialization> applicableMaterializations = getApplicableMaterializations(rel, materializations); final List<Pair<RelNode, List<RelOptMaterialization>>> applied = new ArrayList<>(); applied.add(Pair.of(rel, ImmutableList.of())); for (RelOptMaterialization m : applicableMaterializations) { int count = applied.size(); for (int i = 0; i < count; i++) { Pair<RelNode, List<RelOptMaterialization>> current = applied.get(i); List<RelNode> sub = substitute(current.left, m); if (!sub.isEmpty()) { ImmutableList.Builder<RelOptMaterialization> builder = ImmutableList.builder(); builder.addAll(current.right); builder.add(m); List<RelOptMaterialization> uses = builder.build(); for (RelNode rel2 : sub) { applied.add(Pair.of(rel2, uses)); } } } } return applied.subList(1, applied.size()); } /** * Returns a list of RelNode transformed from all possible lattice uses. * @param rel the original RelNode * @param lattices the lattice list * @return the list of transformed RelNode together with their corresponding * lattice used in the transformation. */ public static List<Pair<RelNode, RelOptLattice>> useLattices( final RelNode rel, List<RelOptLattice> lattices) { final Set<RelOptTable> queryTables = RelOptUtil.findTables(rel); // Use a lattice if the query uses at least the central (fact) table of the // lattice. final List<Pair<RelNode, RelOptLattice>> latticeUses = new ArrayList<>(); final Set<List<String>> queryTableNames = Sets.newHashSet( Iterables.transform(queryTables, RelOptTable::getQualifiedName)); // Remember leaf-join form of root so we convert at most once. final Supplier<RelNode> leafJoinRoot = Suppliers.memoize(() -> RelOptMaterialization.toLeafJoinForm(rel))::get; for (RelOptLattice lattice : lattices) { if (queryTableNames.contains(lattice.rootTable().getQualifiedName())) { RelNode rel2 = lattice.rewrite(leafJoinRoot.get()); if (rel2 != null) { if (CalciteSystemProperty.DEBUG.value()) { System.out.println("use lattice:\n" + RelOptUtil.toString(rel2)); } latticeUses.add(Pair.of(rel2, lattice)); } } } return latticeUses; } /** * Returns a list of materializations that can potentially be used by the query. */ public static List<RelOptMaterialization> getApplicableMaterializations( RelNode rel, List<RelOptMaterialization> materializations) { DirectedGraph<List<String>, DefaultEdge> usesGraph = DefaultDirectedGraph.create(); final Map<List<String>, RelOptMaterialization> qnameMap = new HashMap<>(); for (RelOptMaterialization materialization : materializations) { // If materialization is a tile in a lattice, we will deal with it shortly. if (materialization.qualifiedTableName != null && materialization.starTable == null) { final List<String> qname = materialization.qualifiedTableName; qnameMap.put(qname, materialization); for (RelOptTable usedTable : RelOptUtil.findTables(materialization.queryRel)) { usesGraph.addVertex(qname); usesGraph.addVertex(usedTable.getQualifiedName()); usesGraph.addEdge(usedTable.getQualifiedName(), qname); } } } // Use a materialization if uses at least one of the tables are used by // the query. (Simple rule that includes some materializations we won't // actually use.) // For example, given materializations: // T = Emps Join Depts // T2 = T Group by C1 // the graph will contain // (T, Emps), (T, Depts), (T2, T) // and therefore we can deduce T2 uses Emps. final Graphs.FrozenGraph<List<String>, DefaultEdge> frozenGraph = Graphs.makeImmutable(usesGraph); final Set<RelOptTable> queryTablesUsed = RelOptUtil.findTables(rel); final List<RelOptMaterialization> applicableMaterializations = new ArrayList<>(); for (List<String> qname : TopologicalOrderIterator.of(usesGraph)) { RelOptMaterialization materialization = qnameMap.get(qname); if (materialization != null && usesTable(materialization.qualifiedTableName, queryTablesUsed, frozenGraph)) { applicableMaterializations.add(materialization); } } return applicableMaterializations; } private static List<RelNode> substitute( RelNode root, RelOptMaterialization materialization) { // First, if the materialization is in terms of a star table, rewrite // the query in terms of the star table. if (materialization.starTable != null) { RelNode newRoot = RelOptMaterialization.tryUseStar(root, materialization.starRelOptTable); if (newRoot != null) { root = newRoot; } } // Push filters to the bottom, and combine projects on top. RelNode target = materialization.queryRel; // try to trim unused field in relational expressions. root = trimUnusedfields(root); target = trimUnusedfields(target); HepProgram program = new HepProgramBuilder() .addRuleInstance(CoreRules.FILTER_PROJECT_TRANSPOSE) .addRuleInstance(CoreRules.FILTER_MERGE) .addRuleInstance(CoreRules.FILTER_INTO_JOIN) .addRuleInstance(CoreRules.JOIN_CONDITION_PUSH) .addRuleInstance(CoreRules.FILTER_AGGREGATE_TRANSPOSE) .addRuleInstance(CoreRules.PROJECT_MERGE) .addRuleInstance(CoreRules.PROJECT_REMOVE) .addRuleInstance(CoreRules.PROJECT_JOIN_TRANSPOSE) .addRuleInstance(CoreRules.PROJECT_SET_OP_TRANSPOSE) .addRuleInstance(CoreRules.FILTER_TO_CALC) .addRuleInstance(CoreRules.PROJECT_TO_CALC) .addRuleInstance(CoreRules.FILTER_CALC_MERGE) .addRuleInstance(CoreRules.PROJECT_CALC_MERGE) .addRuleInstance(CoreRules.CALC_MERGE) .build(); // We must use the same HEP planner for the two optimizations below. // Thus different nodes with the same digest will share the same vertex in // the plan graph. This is important for the matching process. final HepPlanner hepPlanner = new HepPlanner(program); hepPlanner.setRoot(target); target = hepPlanner.findBestExp(); hepPlanner.setRoot(root); root = hepPlanner.findBestExp(); return new SubstitutionVisitor(target, root).go(materialization.tableRel); } /** * Trim unused fields in relational expressions. */ private static RelNode trimUnusedfields(RelNode relNode) { final List<RelOptTable> relOptTables = RelOptUtil.findAllTables(relNode); RelOptSchema relOptSchema = null; if (relOptTables.size() != 0) { relOptSchema = relOptTables.get(0).getRelOptSchema(); } final RelBuilder relBuilder = RelFactories.LOGICAL_BUILDER.create( relNode.getCluster(), relOptSchema); final RelFieldTrimmer relFieldTrimmer = new RelFieldTrimmer(null, relBuilder); final RelNode rel = relFieldTrimmer.trim(relNode); return rel; } /** * Returns whether {@code table} uses one or more of the tables in * {@code usedTables}. */ private static boolean usesTable( List<String> qualifiedName, Set<RelOptTable> usedTables, Graphs.FrozenGraph<List<String>, DefaultEdge> usesGraph) { for (RelOptTable queryTable : usedTables) { if (usesGraph.getShortestDistance(queryTable.getQualifiedName(), qualifiedName) != -1) { return true; } } return false; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.lang3.time; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.lang.reflect.Constructor; import java.lang.reflect.Modifier; import java.text.ParseException; import java.util.Calendar; import java.util.Date; import java.util.Locale; import java.util.TimeZone; import org.apache.commons.lang3.test.SystemDefaultsSwitch; import org.apache.commons.lang3.test.SystemDefaults; import org.junit.Rule; import org.junit.Test; /** * TestCase for DateFormatUtils. */ @SuppressWarnings("deprecation") // tests lots of deprecated items public class DateFormatUtilsTest { @Rule public SystemDefaultsSwitch defaults = new SystemDefaultsSwitch(); //----------------------------------------------------------------------- @Test public void testConstructor() { assertNotNull(new DateFormatUtils()); final Constructor<?>[] cons = DateFormatUtils.class.getDeclaredConstructors(); assertEquals(1, cons.length); assertTrue(Modifier.isPublic(cons[0].getModifiers())); assertTrue(Modifier.isPublic(DateFormatUtils.class.getModifiers())); assertFalse(Modifier.isFinal(DateFormatUtils.class.getModifiers())); } //----------------------------------------------------------------------- @Test public void testFormat() { final Calendar c = Calendar.getInstance(TimeZone.getTimeZone("UTC")); c.set(2005, Calendar.JANUARY, 1, 12, 0, 0); c.setTimeZone(TimeZone.getDefault()); final StringBuilder buffer = new StringBuilder (); final int year = c.get(Calendar.YEAR); final int month = c.get(Calendar.MONTH) + 1; final int day = c.get(Calendar.DAY_OF_MONTH); final int hour = c.get(Calendar.HOUR_OF_DAY); buffer.append (year); buffer.append(month); buffer.append(day); buffer.append(hour); assertEquals(buffer.toString(), DateFormatUtils.format(c.getTime(), "yyyyMdH")); assertEquals(buffer.toString(), DateFormatUtils.format(c.getTime().getTime(), "yyyyMdH")); assertEquals(buffer.toString(), DateFormatUtils.format(c.getTime(), "yyyyMdH", Locale.US)); assertEquals(buffer.toString(), DateFormatUtils.format(c.getTime().getTime(), "yyyyMdH", Locale.US)); } //----------------------------------------------------------------------- @Test public void testFormatCalendar() { final Calendar c = Calendar.getInstance(TimeZone.getTimeZone("UTC")); c.set(2005, Calendar.JANUARY, 1, 12, 0, 0); c.setTimeZone(TimeZone.getDefault()); final StringBuilder buffer = new StringBuilder (); final int year = c.get(Calendar.YEAR); final int month = c.get(Calendar.MONTH) + 1; final int day = c.get(Calendar.DAY_OF_MONTH); final int hour = c.get(Calendar.HOUR_OF_DAY); buffer.append (year); buffer.append(month); buffer.append(day); buffer.append(hour); assertEquals(buffer.toString(), DateFormatUtils.format(c, "yyyyMdH")); assertEquals(buffer.toString(), DateFormatUtils.format(c.getTime(), "yyyyMdH")); assertEquals(buffer.toString(), DateFormatUtils.format(c, "yyyyMdH", Locale.US)); assertEquals(buffer.toString(), DateFormatUtils.format(c.getTime(), "yyyyMdH", Locale.US)); } @Test public void testFormatUTC() { final Calendar c = Calendar.getInstance(TimeZone.getTimeZone("UTC")); c.set(2005, Calendar.JANUARY, 1, 12, 0, 0); assertEquals ("2005-01-01T12:00:00", DateFormatUtils.formatUTC(c.getTime(), DateFormatUtils.ISO_DATETIME_FORMAT.getPattern())); assertEquals ("2005-01-01T12:00:00", DateFormatUtils.formatUTC(c.getTime().getTime(), DateFormatUtils.ISO_DATETIME_FORMAT.getPattern())); assertEquals ("2005-01-01T12:00:00", DateFormatUtils.formatUTC(c.getTime(), DateFormatUtils.ISO_DATETIME_FORMAT.getPattern(), Locale.US)); assertEquals ("2005-01-01T12:00:00", DateFormatUtils.formatUTC(c.getTime().getTime(), DateFormatUtils.ISO_DATETIME_FORMAT.getPattern(), Locale.US)); } private void assertFormats(final String expectedValue, final String pattern, final TimeZone timeZone, final Calendar cal) { assertEquals(expectedValue, DateFormatUtils.format(cal.getTime(), pattern, timeZone)); assertEquals(expectedValue, DateFormatUtils.format(cal.getTime().getTime(), pattern, timeZone)); assertEquals(expectedValue, DateFormatUtils.format(cal, pattern, timeZone)); } private Calendar createFebruaryTestDate(final TimeZone timeZone) { final Calendar cal = Calendar.getInstance(timeZone); cal.set(2002, Calendar.FEBRUARY, 23, 9, 11, 12); return cal; } private Calendar createJuneTestDate(final TimeZone timeZone) { final Calendar cal = Calendar.getInstance(timeZone); cal.set(2003, Calendar.JUNE, 8, 10, 11, 12); return cal; } private void testGmtMinus3(final String expectedValue, final String pattern) { final TimeZone timeZone = TimeZone.getTimeZone("GMT-3"); assertFormats(expectedValue, pattern, timeZone, createFebruaryTestDate(timeZone)); } private void testUTC(final String expectedValue, final String pattern) { final TimeZone timeZone = TimeZone.getTimeZone("UTC"); assertFormats(expectedValue, pattern, timeZone, createFebruaryTestDate(timeZone)); } @Test public void testDateTimeISO() throws Exception { testGmtMinus3("2002-02-23T09:11:12", DateFormatUtils.ISO_DATETIME_FORMAT.getPattern()); testGmtMinus3("2002-02-23T09:11:12-03:00", DateFormatUtils.ISO_DATETIME_TIME_ZONE_FORMAT.getPattern()); testUTC("2002-02-23T09:11:12Z", DateFormatUtils.ISO_DATETIME_TIME_ZONE_FORMAT.getPattern()); } @Test public void testDateISO() { testGmtMinus3("2002-02-23", DateFormatUtils.ISO_DATE_FORMAT.getPattern()); testGmtMinus3("2002-02-23-03:00", DateFormatUtils.ISO_DATE_TIME_ZONE_FORMAT.getPattern()); testUTC("2002-02-23Z", DateFormatUtils.ISO_DATE_TIME_ZONE_FORMAT.getPattern()); } @Test public void testTimeISO() { testGmtMinus3("T09:11:12", DateFormatUtils.ISO_TIME_FORMAT.getPattern()); testGmtMinus3("T09:11:12-03:00", DateFormatUtils.ISO_TIME_TIME_ZONE_FORMAT.getPattern()); testUTC("T09:11:12Z", DateFormatUtils.ISO_TIME_TIME_ZONE_FORMAT.getPattern()); } @Test public void testTimeNoTISO() { testGmtMinus3("09:11:12", DateFormatUtils.ISO_TIME_NO_T_FORMAT.getPattern()); testGmtMinus3("09:11:12-03:00", DateFormatUtils.ISO_TIME_NO_T_TIME_ZONE_FORMAT.getPattern()); testUTC("09:11:12Z", DateFormatUtils.ISO_TIME_NO_T_TIME_ZONE_FORMAT.getPattern()); } @SystemDefaults(locale="en") @Test public void testSMTP() { TimeZone timeZone = TimeZone.getTimeZone("GMT-3"); Calendar june = createJuneTestDate(timeZone); assertFormats("Sun, 08 Jun 2003 10:11:12 -0300", DateFormatUtils.SMTP_DATETIME_FORMAT.getPattern(), timeZone, june); timeZone = TimeZone.getTimeZone("UTC"); june = createJuneTestDate(timeZone); assertFormats("Sun, 08 Jun 2003 10:11:12 +0000", DateFormatUtils.SMTP_DATETIME_FORMAT.getPattern(), timeZone, june); } /* public void testLang312() { String pattern = "dd/MM/yyyy"; String expected = "19/04/1948"; TimeZone timeZone = TimeZone.getTimeZone("CET"); Locale locale = Locale.GERMANY; // show Calendar is good Calendar cal = Calendar.getInstance(timeZone, locale); cal.set(1948, 3, 19); assertEquals(expected, DateFormatUtils.format( cal.getTime(), pattern, timeZone, locale ) ); Date date = new Date(48, 3, 19); // test JDK java.text.SimpleDateFormat sdf = new java.text.SimpleDateFormat(pattern, locale); sdf.setTimeZone(timeZone); // There's nothing we can do if the JDK fails, so just going to print a warning in this case // assertEquals(expected, sdf.format( date ) ); if( ! expected.equals( sdf.format( date ) ) ) { System.out.println("WARNING: JDK test failed - testLang312()"); } // test Commons assertEquals(expected, DateFormatUtils.format( date, pattern, timeZone, locale ) ); } */ @Test public void testLANG1000() throws Exception { final String date = "2013-11-18T12:48:05Z"; DateFormatUtils.ISO_DATETIME_TIME_ZONE_FORMAT.parse(date); } @SystemDefaults(timezone="UTC") @Test public void testLang530() throws ParseException { final Date d = new Date(); final String isoDateStr = DateFormatUtils.ISO_DATETIME_TIME_ZONE_FORMAT.format(d); final Date d2 = DateUtils.parseDate(isoDateStr, new String[] { DateFormatUtils.ISO_DATETIME_TIME_ZONE_FORMAT.getPattern() }); // the format loses milliseconds so have to reintroduce them assertEquals("Date not equal to itself ISO formatted and parsed", d.getTime(), d2.getTime() + d.getTime() % 1000); } /** * According to LANG-916 (https://issues.apache.org/jira/browse/LANG-916), * the format method did contain a bug: it did not use the TimeZone data. * * This method test that the bug is fixed. */ @Test public void testLang916() throws Exception { final Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("Europe/Paris")); cal.clear(); cal.set(2009, 9, 16, 8, 42, 16); // Long. { final String value = DateFormatUtils.format(cal.getTimeInMillis(), DateFormatUtils.ISO_DATETIME_TIME_ZONE_FORMAT.getPattern(), TimeZone.getTimeZone("Europe/Paris")); assertEquals("long", "2009-10-16T08:42:16+02:00", value); } { final String value = DateFormatUtils.format(cal.getTimeInMillis(), DateFormatUtils.ISO_DATETIME_TIME_ZONE_FORMAT.getPattern(), TimeZone.getTimeZone("Asia/Kolkata")); assertEquals("long", "2009-10-16T12:12:16+05:30", value); } { final String value = DateFormatUtils.format(cal.getTimeInMillis(), DateFormatUtils.ISO_DATETIME_TIME_ZONE_FORMAT.getPattern(), TimeZone.getTimeZone("Europe/London")); assertEquals("long", "2009-10-16T07:42:16+01:00", value); } // Calendar. { final String value = DateFormatUtils.format(cal, DateFormatUtils.ISO_DATETIME_TIME_ZONE_FORMAT.getPattern(), TimeZone.getTimeZone("Europe/Paris")); assertEquals("calendar", "2009-10-16T08:42:16+02:00", value); } { final String value = DateFormatUtils.format(cal, DateFormatUtils.ISO_DATETIME_TIME_ZONE_FORMAT.getPattern(), TimeZone.getTimeZone("Asia/Kolkata")); assertEquals("calendar", "2009-10-16T12:12:16+05:30", value); } { final String value = DateFormatUtils.format(cal, DateFormatUtils.ISO_DATETIME_TIME_ZONE_FORMAT.getPattern(), TimeZone.getTimeZone("Europe/London")); assertEquals("calendar", "2009-10-16T07:42:16+01:00", value); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache license, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the license for the specific language governing permissions and * limitations under the license. */ package org.apache.logging.log4j.core.layout; import java.io.IOException; import java.util.List; import java.util.Map; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Marker; import org.apache.logging.log4j.MarkerManager; import org.apache.logging.log4j.ThreadContext; import org.apache.logging.log4j.core.Appender; import org.apache.logging.log4j.core.BasicConfigurationFactory; import org.apache.logging.log4j.core.Logger; import org.apache.logging.log4j.core.LoggerContext; import org.apache.logging.log4j.core.config.ConfigurationFactory; import org.apache.logging.log4j.core.impl.Log4jLogEvent; import org.apache.logging.log4j.core.jackson.Log4jXmlObjectMapper; import org.apache.logging.log4j.core.util.Constants; import org.apache.logging.log4j.core.util.Throwables; import org.apache.logging.log4j.message.SimpleMessage; import org.apache.logging.log4j.spi.AbstractLogger; import org.apache.logging.log4j.test.appender.ListAppender; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.databind.JsonMappingException; import static org.junit.Assert.*; /** * Tests {@link XmlLayout}. */ public class XmlLayoutTest { private static final String body = "<Message>empty mdc</Message>"; static ConfigurationFactory cf = new BasicConfigurationFactory(); private static final String markerTag = "<Marker name=\"EVENT\"/>"; @AfterClass public static void cleanupClass() { ConfigurationFactory.removeConfigurationFactory(cf); ThreadContext.clearAll(); } @BeforeClass public static void setupClass() { ThreadContext.clearAll(); ConfigurationFactory.setConfigurationFactory(cf); final LoggerContext ctx = (LoggerContext) LogManager.getContext(); ctx.reconfigure(); } LoggerContext ctx = (LoggerContext) LogManager.getContext(); Logger rootLogger = this.ctx.getLogger(""); private void checkAttribute(final String name, final String value, final boolean compact, final String str) { Assert.assertTrue(str, str.contains(name + "=\"" + value + "\"")); } private void checkAttributeName(final String name, final boolean compact, final String str) { Assert.assertTrue(str, str.contains(name + "=\"")); } private void checkContains(final String expected, final List<String> list) { for (final String string : list) { final String trimedLine = string.trim(); if (trimedLine.contains(expected)) { return; } } Assert.fail("Cannot find " + expected + " in " + list); } private void checkElement(final String key, final String value, final boolean compact, final String str) { // <item key="MDC.A" value="A_Value"/> assertTrue(str, str.contains(String.format("<item key=\"%s\" value=\"%s\"/>", key, value))); } private void checkElementName(final String name, final boolean compact, final String str, final boolean withAttributes, final boolean withChildren) { // simple checks, don't try to be too smart here, we're just looking for the names and basic shape. // start final String startStr = withAttributes ? "<" + name + " " : "<" + name + ">"; final int startPos = str.indexOf(startStr); Assert.assertTrue(str, startPos >= 0); // end final String endStr = withChildren ? "</" + name + ">" : "/>"; final int endPos = str.indexOf(endStr, startPos + startStr.length()); Assert.assertTrue(str, endPos >= 0); } /** * @param includeSource TODO * @param compact * @param includeContext TODO * @throws IOException * @throws JsonParseException * @throws JsonMappingException */ private void testAllFeatures(final boolean includeSource, final boolean compact, final boolean includeContext) throws IOException, JsonParseException, JsonMappingException { final Log4jLogEvent expected = LogEventFixtures.createLogEvent(); final XmlLayout layout = XmlLayout.createLayout(includeSource, includeContext, false, compact, Constants.UTF_8); final String str = layout.toSerializable(expected); // System.out.println(str); assertEquals(str, !compact, str.contains("\n")); assertEquals(str, includeSource, str.contains("Source")); assertEquals(str, includeContext, str.contains("ContextMap")); final Log4jLogEvent actual = new Log4jXmlObjectMapper().readValue(str, Log4jLogEvent.class); LogEventFixtures.assertEqualLogEvents(expected, actual, includeSource, includeContext); if (includeContext) { this.checkElement("MDC.A", "A_Value", compact, str); this.checkElement("MDC.B", "B_Value", compact, str); } // assertNull(actual.getThrown()); // check some attrs assertTrue(str, str.contains("loggerFqcn=\"f.q.c.n\"")); assertTrue(str, str.contains("loggerName=\"a.B\"")); // make sure short names are used assertTrue(str, str.contains("<Event ")); assertTrue(str, str.contains("class=")); assertTrue(str, str.contains("method=")); assertTrue(str, str.contains("file=")); assertTrue(str, str.contains("line=")); // // make sure the names we want are used this.checkAttributeName("timeMillis", compact, str); this.checkAttributeName("thread", compact, str); // and not threadName this.checkAttributeName("level", compact, str); this.checkAttributeName("loggerName", compact, str); this.checkElementName("Marker", compact, str, true, true); this.checkAttributeName("name", compact, str); this.checkElementName("Parents", compact, str, false, true); this.checkElementName("Message", compact, str, false, true); this.checkElementName("Thrown", compact, str, true, true); this.checkElementName("Cause", compact, str, true, true); this.checkAttributeName("class", compact, str); this.checkAttributeName("method", compact, str); this.checkAttributeName("file", compact, str); this.checkAttributeName("line", compact, str); this.checkAttributeName("exact", compact, str); this.checkAttributeName("location", compact, str); this.checkAttributeName("version", compact, str); this.checkAttributeName("commonElementCount", compact, str); this.checkAttributeName("message", compact, str); this.checkAttributeName("localizedMessage", compact, str); this.checkElementName("ExtendedStackTrace", compact, str, false, true); if (Throwables.isGetSuppressedAvailable()) { this.checkElementName("Suppressed", compact, str, false, true); } this.checkAttributeName("loggerFqcn", compact, str); this.checkAttributeName("endOfBatch", compact, str); if (includeContext) { this.checkElementName("ContextMap", compact, str, false, true); } this.checkElementName("ContextStack", compact, str, false, true); if (includeSource) { this.checkElementName("Source", compact, str, true, false); } // check some attrs this.checkAttribute("loggerFqcn", "f.q.c.n", compact, str); this.checkAttribute("loggerName", "a.B", compact, str); } @Test public void testContentType() { final XmlLayout layout = XmlLayout.createDefaultLayout(); assertEquals("text/xml; charset=UTF-8", layout.getContentType()); } @Test public void testDefaultCharset() { final XmlLayout layout = XmlLayout.createDefaultLayout(); assertEquals(Constants.UTF_8, layout.getCharset()); } /** * Test case for MDC conversion pattern. */ @Test public void testLayout() throws Exception { final Map<String, Appender> appenders = this.rootLogger.getAppenders(); for (final Appender appender : appenders.values()) { this.rootLogger.removeAppender(appender); } // set up appender final XmlLayout layout = XmlLayout.createLayout(true, true, true, false, null); final ListAppender appender = new ListAppender("List", null, layout, true, false); appender.start(); // set appender on root and set level to debug this.rootLogger.addAppender(appender); this.rootLogger.setLevel(Level.DEBUG); // output starting message this.rootLogger.debug("starting mdc pattern test"); this.rootLogger.debug("empty mdc"); ThreadContext.put("key1", "value1"); ThreadContext.put("key2", "value2"); this.rootLogger.debug("filled mdc"); ThreadContext.remove("key1"); ThreadContext.remove("key2"); this.rootLogger.error("finished mdc pattern test", new NullPointerException("test")); final Marker marker = MarkerManager.getMarker("EVENT"); this.rootLogger.error(marker, "marker test"); appender.stop(); final List<String> list = appender.getMessages(); final String string = list.get(0); assertTrue("Incorrect header: " + string, string.equals("<?xml version=\"1.0\" encoding=\"UTF-8\"?>")); assertTrue("Incorrect footer", list.get(list.size() - 1).equals("</Events>")); this.checkContains("loggerFqcn=\"" + AbstractLogger.class.getName() + "\"", list); this.checkContains("level=\"DEBUG\"", list); this.checkContains(">starting mdc pattern test</Message>", list); // this.checkContains("<Message>starting mdc pattern test</Message>", list); // <Marker xmlns="" _class="org.apache.logging.log4j.MarkerManager..Log4jMarker" name="EVENT"/> this.checkContains("<Marker", list); this.checkContains("name=\"EVENT\"/>", list); for (final Appender app : appenders.values()) { this.rootLogger.addAppender(app); } } @Test public void testLayoutLoggerName() { final XmlLayout layout = XmlLayout.createLayout(false, true, true, false, null); final Log4jLogEvent event = Log4jLogEvent.createEvent("a.B", null, "f.q.c.n", Level.DEBUG, new SimpleMessage("M"), null, null, null, null, "threadName", null, 1); final String str = layout.toSerializable(event); assertTrue(str, str.contains("loggerName=\"a.B\"")); } @Test public void testLocationOffCompactOffMdcOff() throws Exception { this.testAllFeatures(false, false, false); } @Test public void testLocationOnCompactOnMdcOn() throws Exception { this.testAllFeatures(true, true, true); } }
package com.sitescout.dsp.api.model.dto.stats; import com.fasterxml.jackson.annotation.JsonFilter; import com.fasterxml.jackson.annotation.JsonView; import com.sitescout.dsp.api.model.Views; import com.sitescout.dsp.api.util.csv.CsvProperties; import com.sitescout.dsp.api.util.csv.CsvType; import com.sitescout.dsp.api.util.csv.CsvViews; @CsvProperties({"impressionsBid", "impressionsWon", "effectiveCPM", "actualCPM", "auctionsSpend", "dataEffectiveCPM", "dataSpend", "totalEffectiveCPM", "totalSpend", "siteScoutRevenue", "clicks", "clickthruRate", "costPerClick", "offerClicks", "offerClickthruRate", "conversions", "conversionRate", "viewthruConversions", "profitPerClick", "costPerAcquisition", "revenuePerMille", "revenue"}) @JsonFilter("statsFilter") public class StatsDTO { // General @CsvType(CsvType.Value.NUMBER) private int impressionsBid; @CsvType(CsvType.Value.NUMBER) private int impressionsWon; @CsvViews({CsvViews.Value.ADVERTISER, CsvViews.Value.ADMIN}) @CsvType(CsvType.Value.CURRENCY) @JsonView(Views.StatsViews.Advertiser.class) private double effectiveCPM; @CsvViews({CsvViews.Value.ADVERTISER, CsvViews.Value.ADMIN}) @CsvType(CsvType.Value.CURRENCY) @JsonView(Views.StatsViews.Advertiser.class) private double auctionsSpend; // Clicks @CsvType(CsvType.Value.NUMBER) private int clicks; @CsvType(CsvType.Value.PERCENT) private double clickthruRate; @CsvViews({CsvViews.Value.ADVERTISER, CsvViews.Value.ADMIN}) @CsvType(CsvType.Value.CURRENCY) @JsonView(Views.StatsViews.Advertiser.class) private double costPerClick; // Offers @CsvType(CsvType.Value.NUMBER) private int offerClicks; @CsvType(CsvType.Value.PERCENT) private double offerClickthruRate; @CsvType(CsvType.Value.NUMBER) private int conversions; @CsvType(CsvType.Value.PERCENT) private double conversionRate; @CsvType(CsvType.Value.NUMBER) private int viewthruConversions; @CsvType(CsvType.Value.CURRENCY) private double profitPerClick; @CsvViews({CsvViews.Value.ADVERTISER, CsvViews.Value.ADMIN}) @CsvType(CsvType.Value.CURRENCY) @JsonView(Views.StatsViews.Advertiser.class) private double costPerAcquisition; @CsvViews({CsvViews.Value.ADVERTISER, CsvViews.Value.ADMIN}) @CsvType(CsvType.Value.CURRENCY) @JsonView(Views.StatsViews.Advertiser.class) private double revenuePerMille; @CsvViews({CsvViews.Value.ADVERTISER, CsvViews.Value.ADMIN}) @CsvType(CsvType.Value.CURRENCY) @JsonView(Views.StatsViews.Advertiser.class) private double revenue; // Totals @CsvViews({CsvViews.Value.ADVERTISER, CsvViews.Value.ADMIN}) @CsvType(CsvType.Value.CURRENCY) @JsonView(Views.StatsViews.Advertiser.class) private double totalEffectiveCPM; @CsvViews({CsvViews.Value.ADVERTISER, CsvViews.Value.ADMIN}) @CsvType(CsvType.Value.CURRENCY) @JsonView(Views.StatsViews.Advertiser.class) private double totalSpend; // Admin @CsvViews(CsvViews.Value.ADMIN) @CsvType(CsvType.Value.CURRENCY) @JsonView(Views.StatsViews.Admin.class) private double siteScoutRevenue; @CsvViews(CsvViews.Value.ADMIN) @CsvType(CsvType.Value.CURRENCY) @JsonView(Views.StatsViews.Admin.class) private double actualCPM; // Data @CsvType(CsvType.Value.CURRENCY) private double dataEffectiveCPM; @CsvViews({CsvViews.Value.ADVERTISER, CsvViews.Value.ADMIN}) @CsvType(CsvType.Value.CURRENCY) @JsonView(Views.StatsViews.Advertiser.class) private double dataSpend; private Integer hour; public int getImpressionsBid() { return impressionsBid; } public void setImpressionsBid(int impressionsBid) { this.impressionsBid = impressionsBid; } public int getImpressionsWon() { return impressionsWon; } public void setImpressionsWon(int impressionsWon) { this.impressionsWon = impressionsWon; } public double getEffectiveCPM() { return effectiveCPM; } public void setEffectiveCPM(double effectiveCPM) { this.effectiveCPM = effectiveCPM; } public double getAuctionsSpend() { return auctionsSpend; } public void setAuctionsSpend(double auctionsSpend) { this.auctionsSpend = auctionsSpend; } public int getClicks() { return clicks; } public void setClicks(int clicks) { this.clicks = clicks; } public double getClickthruRate() { return clickthruRate; } public void setClickthruRate(double clickthruRate) { this.clickthruRate = clickthruRate; } public double getCostPerClick() { return costPerClick; } public void setCostPerClick(double costPerClick) { this.costPerClick = costPerClick; } public int getOfferClicks() { return offerClicks; } public void setOfferClicks(int offerClicks) { this.offerClicks = offerClicks; } public double getOfferClickthruRate() { return offerClickthruRate; } public void setOfferClickthruRate(double offerClickthruRate) { this.offerClickthruRate = offerClickthruRate; } public int getConversions() { return conversions; } public void setConversions(int conversions) { this.conversions = conversions; } public double getConversionRate() { return conversionRate; } public void setConversionRate(double conversionRate) { this.conversionRate = conversionRate; } public int getViewthruConversions() { return viewthruConversions; } public void setViewthruConversions(int viewthruConversions) { this.viewthruConversions = viewthruConversions; } public double getProfitPerClick() { return profitPerClick; } public void setProfitPerClick(double profitPerClick) { this.profitPerClick = profitPerClick; } public double getCostPerAcquisition() { return costPerAcquisition; } public void setCostPerAcquisition(double costPerAcquisition) { this.costPerAcquisition = costPerAcquisition; } public double getRevenuePerMille() { return revenuePerMille; } public void setRevenuePerMille(double revenuePerMille) { this.revenuePerMille = revenuePerMille; } public double getRevenue() { return revenue; } public void setRevenue(double revenue) { this.revenue = revenue; } public double getTotalEffectiveCPM() { return totalEffectiveCPM; } public void setTotalEffectiveCPM(double totalEffectiveCPM) { this.totalEffectiveCPM = totalEffectiveCPM; } public double getTotalSpend() { return totalSpend; } public void setTotalSpend(double totalSpend) { this.totalSpend = totalSpend; } public double getSiteScoutRevenue() { return siteScoutRevenue; } public void setSiteScoutRevenue(double siteScoutRevenue) { this.siteScoutRevenue = siteScoutRevenue; } public double getActualCPM() { return actualCPM; } public void setActualCPM(double actualCPM) { this.actualCPM = actualCPM; } public double getDataEffectiveCPM() { return dataEffectiveCPM; } public void setDataEffectiveCPM(double dataEffectiveCPM) { this.dataEffectiveCPM = dataEffectiveCPM; } public double getDataSpend() { return dataSpend; } public void setDataSpend(double dataSpend) { this.dataSpend = dataSpend; } public Integer getHour() { return hour; } public void setHour(Integer hour) { this.hour = hour; } }
/** * Copyright (C) 2013 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.financial.analytics.curve.exposure; import static org.testng.AssertJUnit.assertEquals; import java.util.List; import org.testng.annotations.Test; import com.opengamma.core.position.Trade; import com.opengamma.financial.security.bond.CorporateBondSecurity; import com.opengamma.financial.security.bond.GovernmentBondSecurity; import com.opengamma.financial.security.bond.MunicipalBondSecurity; import com.opengamma.financial.security.capfloor.CapFloorCMSSpreadSecurity; import com.opengamma.financial.security.capfloor.CapFloorSecurity; import com.opengamma.financial.security.cash.CashSecurity; import com.opengamma.financial.security.cashflow.CashFlowSecurity; import com.opengamma.financial.security.cds.CreditDefaultSwapIndexSecurity; import com.opengamma.financial.security.cds.LegacyFixedRecoveryCDSSecurity; import com.opengamma.financial.security.cds.LegacyRecoveryLockCDSSecurity; import com.opengamma.financial.security.cds.LegacyVanillaCDSSecurity; import com.opengamma.financial.security.cds.StandardFixedRecoveryCDSSecurity; import com.opengamma.financial.security.cds.StandardRecoveryLockCDSSecurity; import com.opengamma.financial.security.cds.StandardVanillaCDSSecurity; import com.opengamma.financial.security.deposit.ContinuousZeroDepositSecurity; import com.opengamma.financial.security.deposit.PeriodicZeroDepositSecurity; import com.opengamma.financial.security.deposit.SimpleZeroDepositSecurity; import com.opengamma.financial.security.equity.EquitySecurity; import com.opengamma.financial.security.equity.EquityVarianceSwapSecurity; import com.opengamma.financial.security.forward.AgricultureForwardSecurity; import com.opengamma.financial.security.forward.EnergyForwardSecurity; import com.opengamma.financial.security.forward.MetalForwardSecurity; import com.opengamma.financial.security.fra.FRASecurity; import com.opengamma.financial.security.future.AgricultureFutureSecurity; import com.opengamma.financial.security.future.BondFutureSecurity; import com.opengamma.financial.security.future.DeliverableSwapFutureSecurity; import com.opengamma.financial.security.future.EnergyFutureSecurity; import com.opengamma.financial.security.future.EquityFutureSecurity; import com.opengamma.financial.security.future.EquityIndexDividendFutureSecurity; import com.opengamma.financial.security.future.FXFutureSecurity; import com.opengamma.financial.security.future.FederalFundsFutureSecurity; import com.opengamma.financial.security.future.IndexFutureSecurity; import com.opengamma.financial.security.future.InterestRateFutureSecurity; import com.opengamma.financial.security.future.MetalFutureSecurity; import com.opengamma.financial.security.future.StockFutureSecurity; import com.opengamma.financial.security.fx.FXForwardSecurity; import com.opengamma.financial.security.fx.FXVolatilitySwapSecurity; import com.opengamma.financial.security.fx.NonDeliverableFXForwardSecurity; import com.opengamma.financial.security.option.BondFutureOptionSecurity; import com.opengamma.financial.security.option.CommodityFutureOptionSecurity; import com.opengamma.financial.security.option.CreditDefaultSwapOptionSecurity; import com.opengamma.financial.security.option.EquityBarrierOptionSecurity; import com.opengamma.financial.security.option.EquityIndexDividendFutureOptionSecurity; import com.opengamma.financial.security.option.EquityIndexFutureOptionSecurity; import com.opengamma.financial.security.option.EquityIndexOptionSecurity; import com.opengamma.financial.security.option.EquityOptionSecurity; import com.opengamma.financial.security.option.FXBarrierOptionSecurity; import com.opengamma.financial.security.option.FXDigitalOptionSecurity; import com.opengamma.financial.security.option.FXOptionSecurity; import com.opengamma.financial.security.option.FxFutureOptionSecurity; import com.opengamma.financial.security.option.IRFutureOptionSecurity; import com.opengamma.financial.security.option.NonDeliverableFXDigitalOptionSecurity; import com.opengamma.financial.security.option.NonDeliverableFXOptionSecurity; import com.opengamma.financial.security.option.SwaptionSecurity; import com.opengamma.financial.security.swap.ForwardSwapSecurity; import com.opengamma.financial.security.swap.SwapSecurity; import com.opengamma.financial.security.swap.YearOnYearInflationSwapSecurity; import com.opengamma.financial.security.swap.ZeroCouponInflationSwapSecurity; import com.opengamma.id.ExternalId; import com.opengamma.util.test.TestGroup; /** * Unit test for SecurityTypeExposureFunction. */ @Test(groups = TestGroup.UNIT) public class SecurityTypeExposureFunctionTest { private static final String SCHEME = ExposureFunction.SECURITY_IDENTIFIER; private static final ExposureFunction EXPOSURE_FUNCTION = new SecurityTypeExposureFunction(); @Test public void testAgriculturalFutureSecurity() { final AgricultureFutureSecurity future = ExposureFunctionTestHelper.getAgricultureFutureSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(future); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "FUTURE"), ids.get(0)); } @Test public void testBondFutureSecurity() { final BondFutureSecurity future = ExposureFunctionTestHelper.getBondFutureSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(future); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "FUTURE"), ids.get(0)); } @Test public void testCashSecurity() { final CashSecurity cash = ExposureFunctionTestHelper.getCashSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(cash); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "CASH"), ids.get(0)); } @Test public void testCapFloorCMSSpreadSecurity() { final CapFloorCMSSpreadSecurity security = ExposureFunctionTestHelper.getCapFloorCMSSpreadSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(security); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "CAP-FLOOR CMS SPREAD"), ids.get(0)); } @Test public void testCapFloorSecurity() { final CapFloorSecurity security = ExposureFunctionTestHelper.getCapFloorSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(security); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "CAP-FLOOR"), ids.get(0)); } @Test public void testContinuousZeroDepositSecurity() { final ContinuousZeroDepositSecurity security = ExposureFunctionTestHelper.getContinuousZeroDepositSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(security); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "CONTINUOUS_ZERO_DEPOSIT"), ids.get(0)); } @Test public void testCorporateBondSecurity() { final CorporateBondSecurity security = ExposureFunctionTestHelper.getCorporateBondSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(security); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "BOND"), ids.get(0)); } @Test public void testEnergyFutureSecurity() { final EnergyFutureSecurity future = ExposureFunctionTestHelper.getEnergyFutureSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(future); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "FUTURE"), ids.get(0)); } @Test public void testEquityFutureSecurity() { final EquityFutureSecurity future = ExposureFunctionTestHelper.getEquityFutureSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(future); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "FUTURE"), ids.get(0)); } @Test public void testEquityIndexDividendFutureSecurity() { final EquityIndexDividendFutureSecurity future = ExposureFunctionTestHelper.getEquityIndexDividendFutureSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(future); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "FUTURE"), ids.get(0)); } @Test public void testFRASecurity() { final FRASecurity fra = ExposureFunctionTestHelper.getFRASecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(fra); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "FRA"), ids.get(0)); } @Test public void testFXFutureSecurity() { final FXFutureSecurity future = ExposureFunctionTestHelper.getFXFutureSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(future); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "FUTURE"), ids.get(0)); } @Test public void testIndexFutureSecurity() { final IndexFutureSecurity future = ExposureFunctionTestHelper.getIndexFutureSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(future); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "FUTURE"), ids.get(0)); } @Test public void testInterestRateFutureSecurity() { final InterestRateFutureSecurity future = ExposureFunctionTestHelper.getInterestRateFutureSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(future); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "FUTURE"), ids.get(0)); } @Test public void testFederalFundsFutureSecurity() { final FederalFundsFutureSecurity future = ExposureFunctionTestHelper.getFederalFundsFutureSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(future); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "FUTURE"), ids.get(0)); } @Test public void testMetalFutureSecurity() { final MetalFutureSecurity future = ExposureFunctionTestHelper.getMetalFutureSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(future); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "FUTURE"), ids.get(0)); } @Test public void testStockFutureSecurity() { final StockFutureSecurity future = ExposureFunctionTestHelper.getStockFutureSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(future); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "FUTURE"), ids.get(0)); } @Test public void testAgricultureFutureSecurity() { final AgricultureFutureSecurity future = ExposureFunctionTestHelper.getAgricultureFutureSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(future); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "FUTURE"), ids.get(0)); } @Test public void testBondFutureOptionSecurity() { final BondFutureOptionSecurity security = ExposureFunctionTestHelper.getBondFutureOptionSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(security); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "BONDFUTURE_OPTION"), ids.get(0)); } @Test public void testCashFlowSecurity() { final CashFlowSecurity security = ExposureFunctionTestHelper.getCashFlowSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(security); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "CASHFLOW"), ids.get(0)); } @Test public void testEnergyFutureOptionSecurity() { final CommodityFutureOptionSecurity security = ExposureFunctionTestHelper.getEnergyFutureOptionSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(security); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "COMMODITYFUTURE_OPTION"), ids.get(0)); } @Test public void testEquityBarrierOptionSecurity() { final EquityBarrierOptionSecurity security = ExposureFunctionTestHelper.getEquityBarrierOptionSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(security); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "EQUITY_BARRIER_OPTION"), ids.get(0)); } @Test public void testEquitySecurity() { final EquitySecurity security = ExposureFunctionTestHelper.getEquitySecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(security); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "EQUITY"), ids.get(0)); } @Test public void testAgricultureForwardSecurity() { final AgricultureForwardSecurity security = ExposureFunctionTestHelper.getAgricultureForwardSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(security); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "COMMODITY_FORWARD"), ids.get(0)); } @Test public void testCreditDefaultSwapIndexSecurity() { final CreditDefaultSwapIndexSecurity security = ExposureFunctionTestHelper.getCreditDefaultSwapIndexSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(security); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "CDS_INDEX"), ids.get(0)); } @Test public void testCreditDefaultSwapOptionSecurity() { final CreditDefaultSwapOptionSecurity security = ExposureFunctionTestHelper.getCreditDefaultSwapOptionSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(security); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "CREDIT_DEFAULT_SWAP_OPTION"), ids.get(0)); } @Test public void testDeliverableSwapSecurity() { final DeliverableSwapFutureSecurity security = ExposureFunctionTestHelper.getDeliverableSwapFutureSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(security); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "FUTURE"), ids.get(0)); } @Test public void testEnergyForwardSecurity() { final EnergyForwardSecurity security = ExposureFunctionTestHelper.getEnergyForwardSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(security); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "COMMODITY_FORWARD"), ids.get(0)); } @Test public void testEquityIndexDividendFutureOptionSecurity() { final EquityIndexDividendFutureOptionSecurity security = ExposureFunctionTestHelper.getEquityIndexDividendFutureOptionSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(security); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "EQUITY_INDEX_DIVIDEND_FUTURE_OPTION"), ids.get(0)); } @Test public void testEquityIndexFutureOptionSecurity() { final EquityIndexFutureOptionSecurity security = ExposureFunctionTestHelper.getEquityIndexFutureOptionSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(security); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "EQUITY_INDEX_FUTURE_OPTION"), ids.get(0)); } @Test public void testEquityIndexOptionSecurity() { final EquityIndexOptionSecurity security = ExposureFunctionTestHelper.getEquityIndexOptionSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(security); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "EQUITY_INDEX_OPTION"), ids.get(0)); } @Test public void testEquityOptionSecurity() { final EquityOptionSecurity security = ExposureFunctionTestHelper.getEquityOptionSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(security); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "EQUITY_OPTION"), ids.get(0)); } @Test public void testEquityVarianceSwapSecurity() { final EquityVarianceSwapSecurity security = ExposureFunctionTestHelper.getEquityVarianceSwapSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(security); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "EQUITY VARIANCE SWAP"), ids.get(0)); } @Test public void testFixedFloatSwapSecurity() { final SwapSecurity security = ExposureFunctionTestHelper.getPayFixedFloatSwapSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(security); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "SWAP"), ids.get(0)); } @Test public void testFloatFloatSwapSecurity() { final SwapSecurity security = ExposureFunctionTestHelper.getFloatFloatSwapSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(security); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "SWAP"), ids.get(0)); } @Test public void testForwardFixedFloatSwapSecurity() { final ForwardSwapSecurity security = ExposureFunctionTestHelper.getPayForwardFixedFloatSwapSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(security); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "SWAP"), ids.get(0)); } @Test public void testForwardFloatFloatSwapSecurity() { final ForwardSwapSecurity security = ExposureFunctionTestHelper.getForwardFloatFloatSwapSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(security); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "SWAP"), ids.get(0)); } @Test public void testForwardXCcySwapSecurity() { final ForwardSwapSecurity security = ExposureFunctionTestHelper.getForwardXCcySwapSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(security); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "SWAP"), ids.get(0)); } @Test public void testFXBarrierOptionSecurity() { final FXBarrierOptionSecurity security = ExposureFunctionTestHelper.getFXBarrierOptionSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(security); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "FX_BARRIER_OPTION"), ids.get(0)); } @Test public void testFXDigitalOptionSecurity() { final FXDigitalOptionSecurity security = ExposureFunctionTestHelper.getFXDigitalOptionSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(security); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "FX_DIGITAL_OPTION"), ids.get(0)); } @Test public void testFXForwardSecurity() { final FXForwardSecurity security = ExposureFunctionTestHelper.getFXForwardSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(security); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "FX_FORWARD"), ids.get(0)); } @Test public void testFXFutureOptionSecurity() { final FxFutureOptionSecurity security = ExposureFunctionTestHelper.getFXFutureOptionSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(security); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "FXFUTURE_OPTION"), ids.get(0)); } @Test public void testFXOptionSecurity() { final FXOptionSecurity security = ExposureFunctionTestHelper.getFXOptionSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(security); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "FX_OPTION"), ids.get(0)); } @Test public void testFXVolatilitySecurity() { final FXVolatilitySwapSecurity security = ExposureFunctionTestHelper.getFXVolatilitySwapSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(security); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "FX_VOLATILITY_SWAP"), ids.get(0)); } @Test public void testGovernmentBondSecurity() { final GovernmentBondSecurity security = ExposureFunctionTestHelper.getGovernmentBondSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(security); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "BOND"), ids.get(0)); } @Test public void testInterestRateFutureOptionSecurity() { final IRFutureOptionSecurity security = ExposureFunctionTestHelper.getInterestRateFutureOptionSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(security); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "IRFUTURE_OPTION"), ids.get(0)); } @Test public void testLegacyFixedRecoveryCDSSecurity() { final LegacyFixedRecoveryCDSSecurity security = ExposureFunctionTestHelper.getLegacyFixedRecoveryCDSSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(security); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "LEGACY_FIXED_RECOVERY_CDS"), ids.get(0)); } @Test public void testLegacyRecoveryLockCDSSecurity() { final LegacyRecoveryLockCDSSecurity security = ExposureFunctionTestHelper.getLegacyRecoveryLockCDSSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(security); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "LEGACY_RECOVERY_LOCK_CDS"), ids.get(0)); } @Test public void testLegacyVanillaCDSSecurity() { final LegacyVanillaCDSSecurity security = ExposureFunctionTestHelper.getLegacyVanillaCDSSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(security); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "LEGACY_VANILLA_CDS"), ids.get(0)); } @Test public void testMetalForwardSecurity() { final MetalForwardSecurity security = ExposureFunctionTestHelper.getMetalForwardSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(security); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "COMMODITY_FORWARD"), ids.get(0)); } @Test public void testMunicipalBondSecurity() { final MunicipalBondSecurity security = ExposureFunctionTestHelper.getMunicipalBondSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(security); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "BOND"), ids.get(0)); } @Test public void testNonDeliverableFXDigitalOptionSecurity() { final NonDeliverableFXDigitalOptionSecurity security = ExposureFunctionTestHelper.getNonDeliverableFXDigitalOptionSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(security); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "NONDELIVERABLE_FX_DIGITAL_OPTION"), ids.get(0)); } @Test public void testNonDeliverableFXForwardSecurity() { final NonDeliverableFXForwardSecurity security = ExposureFunctionTestHelper.getNonDeliverableFXForwardSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(security); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "NONDELIVERABLE_FX_FORWARD"), ids.get(0)); } @Test public void testNonDeliverableFXOptionSecurity() { final NonDeliverableFXOptionSecurity security = ExposureFunctionTestHelper.getNonDeliverableFXOptionSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(security); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "NONDELIVERABLE_FX_OPTION"), ids.get(0)); } @Test public void testPeriodicZeroDepositSecurity() { final PeriodicZeroDepositSecurity security = ExposureFunctionTestHelper.getPeriodicZeroDepositSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(security); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "PERIODIC_ZERO_DEPOSIT"), ids.get(0)); } @Test public void testSimpleZeroDepositSecurity() { final SimpleZeroDepositSecurity security = ExposureFunctionTestHelper.getSimpleZeroDepositSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(security); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "SIMPLE_ZERO_DEPOSIT"), ids.get(0)); } @Test public void testStandardFixedRecoveryCDSSecurity() { final StandardFixedRecoveryCDSSecurity security = ExposureFunctionTestHelper.getStandardFixedRecoveryCDSSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(security); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "STANDARD_FIXED_RECOVERY_CDS"), ids.get(0)); } @Test public void testStandardRecoveryLockCDSSecurity() { final StandardRecoveryLockCDSSecurity security = ExposureFunctionTestHelper.getStandardRecoveryLockCDSSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(security); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "STANDARD_RECOVERY_LOCK_CDS"), ids.get(0)); } @Test public void testStandardVanillaCDSSecurity() { final StandardVanillaCDSSecurity security = ExposureFunctionTestHelper.getStandardVanillaCDSSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(security); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "STANDARD_VANILLA_CDS"), ids.get(0)); } @Test public void testSwaptionSecurity() { final SwaptionSecurity security = ExposureFunctionTestHelper.getPaySwaptionSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(security); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "SWAPTION"), ids.get(0)); } @Test public void testXCcySwapSecurity() { final SwapSecurity security = ExposureFunctionTestHelper.getXCcySwapSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(security); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "SWAP"), ids.get(0)); } @Test public void testPayYoYInflationSwapSecurity() { final YearOnYearInflationSwapSecurity security = ExposureFunctionTestHelper.getPayYoYInflationSwapSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(security); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "YEAR_ON_YEAR_INFLATION_SWAP"), ids.get(0)); } @Test public void testReceiveYoYInflationSwapSecurity() { final YearOnYearInflationSwapSecurity security = ExposureFunctionTestHelper.getPayYoYInflationSwapSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(security); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "YEAR_ON_YEAR_INFLATION_SWAP"), ids.get(0)); } @Test public void testPayZeroCouponInflationSwapSecurity() { final ZeroCouponInflationSwapSecurity security = ExposureFunctionTestHelper.getPayZeroCouponInflationSwapSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(security); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "ZERO_COUPON_INFLATION_SWAP"), ids.get(0)); } @Test public void testReceiveZeroCouponInflationSwapSecurity() { final ZeroCouponInflationSwapSecurity security = ExposureFunctionTestHelper.getReceiveZeroCouponInflationSwapSecurity(); Trade trade = ExposureFunctionTestHelper.getTrade(security); List<ExternalId> ids = EXPOSURE_FUNCTION.getIds(trade); assertEquals(1, ids.size()); assertEquals(ExternalId.of(SCHEME, "ZERO_COUPON_INFLATION_SWAP"), ids.get(0)); } }
/* * Copyright 2013-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.jvm.java; import static java.nio.charset.StandardCharsets.UTF_8; import static java.util.jar.Attributes.Name.IMPLEMENTATION_VERSION; import static java.util.jar.Attributes.Name.MANIFEST_VERSION; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.hasItem; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import com.facebook.buck.event.BuckEventBusFactory; import com.facebook.buck.io.ProjectFilesystem; import com.facebook.buck.step.ExecutionContext; import com.facebook.buck.step.TestExecutionContext; import com.facebook.buck.testutil.FakeProjectFilesystem; import com.facebook.buck.testutil.TestConsole; import com.facebook.buck.testutil.Zip; import com.facebook.buck.testutil.integration.TemporaryPaths; import com.facebook.buck.zip.CustomZipOutputStream; import com.facebook.buck.zip.ZipConstants; import com.facebook.buck.zip.ZipOutputStreams; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.Sets; import org.apache.commons.compress.archivers.zip.ZipUtil; import org.junit.Rule; import org.junit.Test; import java.io.ByteArrayInputStream; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Date; import java.util.Map; import java.util.Set; import java.util.jar.Attributes; import java.util.jar.JarFile; import java.util.jar.JarInputStream; import java.util.jar.JarOutputStream; import java.util.jar.Manifest; import java.util.regex.Pattern; import java.util.zip.ZipEntry; import java.util.zip.ZipInputStream; import java.util.zip.ZipOutputStream; public class JarDirectoryStepTest { @Rule public TemporaryPaths folder = new TemporaryPaths(); @Test public void shouldNotThrowAnExceptionWhenAddingDuplicateEntries() throws IOException { Path zipup = folder.newFolder("zipup"); Path first = createZip(zipup.resolve("a.zip"), "example.txt"); Path second = createZip(zipup.resolve("b.zip"), "example.txt", "com/example/Main.class"); JarDirectoryStep step = new JarDirectoryStep( new ProjectFilesystem(zipup), Paths.get("output.jar"), ImmutableSortedSet.of(first.getFileName(), second.getFileName()), "com.example.Main", /* manifest file */ null); ExecutionContext context = TestExecutionContext.newInstance(); int returnCode = step.execute(context).getExitCode(); assertEquals(0, returnCode); Path zip = zipup.resolve("output.jar"); assertTrue(Files.exists(zip)); // "example.txt" "Main.class" and the MANIFEST.MF. assertZipFileCountIs(3, zip); assertZipContains(zip, "example.txt"); } @Test public void shouldNotifyEventBusWhenDuplicateClassesAreFound() throws IOException { Path jarDirectory = folder.newFolder("jarDir"); Path first = createZip( jarDirectory.resolve("a.jar"), "com/example/Main.class", "com/example/common/Helper.class"); Path second = createZip( jarDirectory.resolve("b.jar"), "com/example/common/Helper.class"); final Path outputPath = Paths.get("output.jar"); JarDirectoryStep step = new JarDirectoryStep( new ProjectFilesystem(jarDirectory), outputPath, ImmutableSortedSet.of(first.getFileName(), second.getFileName()), "com.example.Main", /* manifest file */ null); ExecutionContext context = TestExecutionContext.newInstance(); final BuckEventBusFactory.CapturingConsoleEventListener listener = new BuckEventBusFactory.CapturingConsoleEventListener(); context.getBuckEventBus().register(listener); step.execute(context); final String expectedMessage = String.format( "Duplicate found when adding 'com/example/common/Helper.class' to '%s' from '%s'", outputPath.toAbsolutePath(), second.toAbsolutePath()); assertThat(listener.getLogMessages(), hasItem(expectedMessage)); } @Test public void shouldFailIfMainClassMissing() throws IOException { Path zipup = folder.newFolder("zipup"); Path zip = createZip(zipup.resolve("a.zip"), "com/example/Main.class"); JarDirectoryStep step = new JarDirectoryStep( new ProjectFilesystem(zipup), Paths.get("output.jar"), ImmutableSortedSet.of(zip.getFileName()), "com.example.MissingMain", /* manifest file */ null); TestConsole console = new TestConsole(); ExecutionContext context = TestExecutionContext.newBuilder() .setConsole(console) .build(); int returnCode = step.execute(context).getExitCode(); assertEquals(1, returnCode); assertEquals( "ERROR: Main class com.example.MissingMain does not exist.\n", console.getTextWrittenToStdErr()); } @Test public void shouldNotComplainWhenDuplicateDirectoryNamesAreAdded() throws IOException { Path zipup = folder.newFolder(); Path first = createZip(zipup.resolve("first.zip"), "dir/example.txt", "dir/root1file.txt"); Path second = createZip( zipup.resolve("second.zip"), "dir/example.txt", "dir/root2file.txt", "com/example/Main.class"); JarDirectoryStep step = new JarDirectoryStep( new ProjectFilesystem(zipup), Paths.get("output.jar"), ImmutableSortedSet.of(first.getFileName(), second.getFileName()), "com.example.Main", /* manifest file */ null); ExecutionContext context = TestExecutionContext.newInstance(); int returnCode = step.execute(context).getExitCode(); assertEquals(0, returnCode); Path zip = zipup.resolve("output.jar"); // The three below plus the manifest and Main.class. assertZipFileCountIs(5, zip); assertZipContains(zip, "dir/example.txt", "dir/root1file.txt", "dir/root2file.txt"); } @Test public void entriesFromTheGivenManifestShouldOverrideThoseInTheJars() throws IOException { String expected = "1.4"; // Write the manifest, setting the implementation version Path tmp = folder.newFolder(); Manifest manifest = new Manifest(); manifest.getMainAttributes().putValue(MANIFEST_VERSION.toString(), "1.0"); manifest.getMainAttributes().putValue(IMPLEMENTATION_VERSION.toString(), expected); Path manifestFile = tmp.resolve("manifest"); try (OutputStream fos = Files.newOutputStream(manifestFile)) { manifest.write(fos); } // Write another manifest, setting the implementation version to something else manifest = new Manifest(); manifest.getMainAttributes().putValue(MANIFEST_VERSION.toString(), "1.0"); manifest.getMainAttributes().putValue(IMPLEMENTATION_VERSION.toString(), "1.0"); Path input = tmp.resolve("input.jar"); try (CustomZipOutputStream out = ZipOutputStreams.newOutputStream(input)) { ZipEntry entry = new ZipEntry("META-INF/MANIFEST.MF"); out.putNextEntry(entry); manifest.write(out); } Path output = tmp.resolve("output.jar"); JarDirectoryStep step = new JarDirectoryStep( new ProjectFilesystem(tmp), output, ImmutableSortedSet.of(Paths.get("input.jar")), /* main class */ null, tmp.resolve("manifest"), /* merge manifest */ true, /* blacklist */ ImmutableSet.<Pattern>of()); ExecutionContext context = TestExecutionContext.newInstance(); assertEquals(0, step.execute(context).getExitCode()); try (Zip zip = new Zip(output, false)) { byte[] rawManifest = zip.readFully("META-INF/MANIFEST.MF"); manifest = new Manifest(new ByteArrayInputStream(rawManifest)); String version = manifest.getMainAttributes().getValue(IMPLEMENTATION_VERSION); assertEquals(expected, version); } } @Test public void jarsShouldContainDirectoryEntries() throws IOException { Path zipup = folder.newFolder("dir-zip"); Path subdir = zipup.resolve("dir/subdir"); Files.createDirectories(subdir); Files.write(subdir.resolve("a.txt"), "cake".getBytes()); JarDirectoryStep step = new JarDirectoryStep( new ProjectFilesystem(zipup), Paths.get("output.jar"), ImmutableSortedSet.of(zipup), /* main class */ null, /* manifest file */ null); ExecutionContext context = TestExecutionContext.newInstance(); int returnCode = step.execute(context).getExitCode(); assertEquals(0, returnCode); Path zip = zipup.resolve("output.jar"); assertTrue(Files.exists(zip)); // Iterate over each of the entries, expecting to see the directory names as entries. Set<String> expected = Sets.newHashSet("dir/", "dir/subdir/"); try (ZipInputStream is = new ZipInputStream(Files.newInputStream(zip))) { for (ZipEntry entry = is.getNextEntry(); entry != null; entry = is.getNextEntry()) { expected.remove(entry.getName()); } } assertTrue("Didn't see entries for: " + expected, expected.isEmpty()); } @Test public void shouldNotMergeManifestsIfRequested() throws IOException { Manifest fromJar = createManifestWithExampleSection(ImmutableMap.of("Not-Seen", "ever")); Manifest fromUser = createManifestWithExampleSection(ImmutableMap.of("cake", "cheese")); Manifest seenManifest = jarDirectoryAndReadManifest(fromJar, fromUser, false); assertEquals(fromUser.getEntries(), seenManifest.getEntries()); } @Test public void shouldMergeManifestsIfAsked() throws IOException { Manifest fromJar = createManifestWithExampleSection(ImmutableMap.of("Not-Seen", "ever")); Manifest fromUser = createManifestWithExampleSection(ImmutableMap.of("cake", "cheese")); Manifest seenManifest = jarDirectoryAndReadManifest(fromJar, fromUser, true); Manifest expectedManifest = new Manifest(fromJar); Attributes attrs = new Attributes(); attrs.putValue("Not-Seen", "ever"); attrs.putValue("cake", "cheese"); expectedManifest.getEntries().put("example", attrs); assertEquals(expectedManifest.getEntries(), seenManifest.getEntries()); } @Test public void shouldNotIncludeFilesInBlacklist() throws IOException { Path zipup = folder.newFolder(); Path first = createZip( zipup.resolve("first.zip"), "dir/file1.txt", "dir/file2.txt", "com/example/Main.class"); JarDirectoryStep step = new JarDirectoryStep( new ProjectFilesystem(zipup), Paths.get("output.jar"), ImmutableSortedSet.of(first.getFileName()), "com.example.Main", /* manifest file */ null, /* merge manifests */ true, /* blacklist */ ImmutableSet.of(Pattern.compile(".*2.*"))); assertEquals(0, step.execute(TestExecutionContext.newInstance()).getExitCode()); Path zip = zipup.resolve("output.jar"); // 3 files in total: file1.txt, & com/example/Main.class & the manifest. assertZipFileCountIs(3, zip); assertZipContains(zip, "dir/file1.txt"); assertZipDoesNotContain(zip, "dir/file2.txt"); } @Test public void shouldNotIncludeFilesInClassesToRemoveFromJar() throws IOException { Path zipup = folder.newFolder(); Path first = createZip( zipup.resolve("first.zip"), "com/example/A.class", "com/example/B.class", "com/example/C.class"); JarDirectoryStep step = new JarDirectoryStep( new ProjectFilesystem(zipup), Paths.get("output.jar"), ImmutableSortedSet.of(first.getFileName()), "com.example.A", /* manifest file */ null, /* merge manifests */ true, /* blacklist */ ImmutableSet.of( Pattern.compile("com.example.B"), Pattern.compile("com.example.C"))); assertEquals(0, step.execute(TestExecutionContext.newInstance()).getExitCode()); Path zip = zipup.resolve("output.jar"); // 2 files in total: com/example/A/class & the manifest. assertZipFileCountIs(2, zip); assertZipContains(zip, "com/example/A.class"); assertZipDoesNotContain(zip, "com/example/B.class"); assertZipDoesNotContain(zip, "com/example/C.class"); } @Test public void timesAreSanitized() throws IOException { Path zipup = folder.newFolder("dir-zip"); // Create a jar file with a file and a directory. Path subdir = zipup.resolve("dir"); Files.createDirectories(subdir); Files.write(subdir.resolve("a.txt"), "cake".getBytes()); Path outputJar = folder.getRoot().resolve("output.jar"); JarDirectoryStep step = new JarDirectoryStep( new ProjectFilesystem(folder.getRoot()), outputJar, ImmutableSortedSet.of(zipup), /* main class */ null, /* manifest file */ null); ExecutionContext context = TestExecutionContext.newInstance(); int returnCode = step.execute(context).getExitCode(); assertEquals(0, returnCode); // Iterate over each of the entries, expecting to see all zeros in the time fields. assertTrue(Files.exists(outputJar)); Date dosEpoch = new Date(ZipUtil.dosToJavaTime(ZipConstants.DOS_FAKE_TIME)); try (ZipInputStream is = new ZipInputStream(new FileInputStream(outputJar.toFile()))) { for (ZipEntry entry = is.getNextEntry(); entry != null; entry = is.getNextEntry()) { assertEquals(entry.getName(), dosEpoch, new Date(entry.getTime())); } } } /** * From the constructor of {@link JarInputStream}: * <p> * This implementation assumes the META-INF/MANIFEST.MF entry * should be either the first or the second entry (when preceded * by the dir META-INF/). It skips the META-INF/ and then * "consumes" the MANIFEST.MF to initialize the Manifest object. * <p> * A simple implementation of {@link JarDirectoryStep} would iterate over all entries to be * included, adding them to the output jar, while merging manifest files, writing the merged * manifest as the last item in the jar. That will generate jars the {@code JarInputStream} won't * be able to find the manifest for. */ @Test public void manifestShouldBeSecondEntryInJar() throws IOException { Path manifestPath = Paths.get(JarFile.MANIFEST_NAME); // Create a directory with a manifest in it and more than two files. Path dir = folder.newFolder(); Manifest dirManifest = new Manifest(); Attributes attrs = new Attributes(); attrs.putValue("From-Dir", "cheese"); dirManifest.getEntries().put("Section", attrs); Files.createDirectories(dir.resolve(manifestPath).getParent()); try (OutputStream out = Files.newOutputStream(dir.resolve(manifestPath))) { dirManifest.write(out); } Files.write(dir.resolve("A.txt"), "hello world".getBytes(UTF_8)); Files.write(dir.resolve("B.txt"), "hello world".getBytes(UTF_8)); Files.write(dir.resolve("aa.txt"), "hello world".getBytes(UTF_8)); Files.write(dir.resolve("bb.txt"), "hello world".getBytes(UTF_8)); // Create a jar with a manifest and more than two other files. Path inputJar = folder.newFile("example.jar"); try (ZipOutputStream zos = new ZipOutputStream(Files.newOutputStream(inputJar))) { byte[] data = "hello world".getBytes(UTF_8); ZipEntry entry = new ZipEntry("C.txt"); zos.putNextEntry(entry); zos.write(data, 0, data.length); zos.closeEntry(); entry = new ZipEntry("cc.txt"); zos.putNextEntry(entry); zos.write(data, 0, data.length); zos.closeEntry(); entry = new ZipEntry("META-INF/"); zos.putNextEntry(entry); zos.closeEntry(); // Note: at end of the stream. Technically invalid. entry = new ZipEntry(JarFile.MANIFEST_NAME); zos.putNextEntry(entry); Manifest zipManifest = new Manifest(); attrs = new Attributes(); attrs.putValue("From-Zip", "peas"); zipManifest.getEntries().put("Section", attrs); zipManifest.write(zos); zos.closeEntry(); } // Merge and check that the manifest includes everything Path output = folder.newFile("output.jar"); JarDirectoryStep step = new JarDirectoryStep( new FakeProjectFilesystem(folder.getRoot()), output, ImmutableSortedSet.of(dir, inputJar), null, null); int exitCode = step.execute(TestExecutionContext.newInstance()).getExitCode(); assertEquals(0, exitCode); Manifest manifest; try (InputStream is = Files.newInputStream(output); JarInputStream jis = new JarInputStream(is)) { manifest = jis.getManifest(); } assertNotNull(manifest); Attributes readAttributes = manifest.getAttributes("Section"); assertEquals(2, readAttributes.size()); assertEquals("cheese", readAttributes.getValue("From-Dir")); assertEquals("peas", readAttributes.getValue("From-Zip")); } private Manifest createManifestWithExampleSection(Map<String, String> attributes) { Manifest manifest = new Manifest(); Attributes attrs = new Attributes(); for (Map.Entry<String, String> stringStringEntry : attributes.entrySet()) { attrs.put(new Attributes.Name(stringStringEntry.getKey()), stringStringEntry.getValue()); } manifest.getEntries().put("example", attrs); return manifest; } private Manifest jarDirectoryAndReadManifest( Manifest fromJar, Manifest fromUser, boolean mergeEntries) throws IOException { // Create a jar with a manifest we'd expect to see merged. Path originalJar = folder.newFile("unexpected.jar"); JarOutputStream ignored = new JarOutputStream(Files.newOutputStream(originalJar), fromJar); ignored.close(); // Now create the actual manifest Path manifestFile = folder.newFile("actual_manfiest.mf"); try (OutputStream os = Files.newOutputStream(manifestFile)) { fromUser.write(os); } Path tmp = folder.newFolder(); Path output = tmp.resolve("example.jar"); JarDirectoryStep step = new JarDirectoryStep( new ProjectFilesystem(tmp), output, ImmutableSortedSet.of(originalJar), /* main class */ null, manifestFile, mergeEntries, /* blacklist */ ImmutableSet.<Pattern>of()); ExecutionContext context = TestExecutionContext.newInstance(); step.execute(context); // Now verify that the created manifest matches the expected one. try (JarInputStream jis = new JarInputStream(Files.newInputStream(output))) { return jis.getManifest(); } } private Path createZip(Path zipFile, String... fileNames) throws IOException { try (Zip zip = new Zip(zipFile, true)) { for (String fileName : fileNames) { zip.add(fileName, ""); } } return zipFile; } private void assertZipFileCountIs(int expected, Path zip) throws IOException { Set<String> fileNames = getFileNames(zip); assertEquals(fileNames.toString(), expected, fileNames.size()); } private void assertZipContains(Path zip, String... files) throws IOException { final Set<String> contents = getFileNames(zip); for (String file : files) { assertTrue(String.format("%s -> %s", file, contents), contents.contains(file)); } } private void assertZipDoesNotContain(Path zip, String... files) throws IOException { final Set<String> contents = getFileNames(zip); for (String file : files) { assertFalse(String.format("%s -> %s", file, contents), contents.contains(file)); } } private Set<String> getFileNames(Path zipFile) throws IOException { try (Zip zip = new Zip(zipFile, false)) { return zip.getFileNames(); } } }
/* Licensed to Diennea S.r.l. under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. Diennea S.r.l. licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package herddb.core; import static herddb.core.TestUtils.execute; import static herddb.core.TestUtils.executeUpdate; import static herddb.core.TestUtils.scan; import herddb.mem.MemoryCommitLogManager; import herddb.mem.MemoryDataStorageManager; import herddb.mem.MemoryMetadataStorageManager; import herddb.model.DataScanner; import herddb.model.StatementEvaluationContext; import herddb.model.TransactionContext; import herddb.model.commands.CreateTableSpaceStatement; import herddb.sql.CalcitePlanner; import herddb.sql.DDLSQLPlanner; import java.sql.Timestamp; import java.util.Arrays; import java.util.Collections; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import org.junit.Test; /** * * @author francesco.caliumi */ public class SimpleOperatorsTest { @Test public void simpleArithmeticOperationsTest() throws Exception { String nodeId = "localhost"; try (DBManager manager = new DBManager("localhost", new MemoryMetadataStorageManager(), new MemoryDataStorageManager(), new MemoryCommitLogManager(), null, null);) { manager.start(); CreateTableSpaceStatement st1 = new CreateTableSpaceStatement("tblspace1", Collections.singleton(nodeId), nodeId, 1, 0, 0); manager.executeStatement(st1, StatementEvaluationContext.DEFAULT_EVALUATION_CONTEXT(), TransactionContext.NO_TRANSACTION); manager.waitForTablespace("tblspace1", 10000); execute(manager, "CREATE TABLE tblspace1.tsql (k1 string primary key, n1 int, l1 long, t1 timestamp, nu string, b1 bool, d1 double)", Collections.emptyList()); assertEquals(1, executeUpdate( manager, "INSERT INTO tblspace1.tsql(k1,n1,l1,t1,nu,b1,d1) values(?,?,?,?,?,?,?)", Arrays.asList("mykey", Integer.valueOf(1), Long.valueOf(2), new java.sql.Timestamp(System.currentTimeMillis()), null, Boolean.valueOf(true), Double.valueOf(1.5))) .getUpdateCount()); // Simple constants try (DataScanner scan1 = scan(manager, "SELECT 0.5 FROM tblspace1.tsql", Collections.emptyList());) { assertEquals(0.5, scan1.consume().get(0).get(0)); } try (DataScanner scan1 = scan(manager, "SELECT 1 FROM tblspace1.tsql", Collections.emptyList());) { assertEquals(1L, ((Number) scan1.consume().get(0).get(0)).longValue()); } try (DataScanner scan1 = scan(manager, "SELECT 'asd' FROM tblspace1.tsql", Collections.emptyList());) { assertEquals("asd", scan1.consume().get(0).get(0).toString()); } try (DataScanner scan1 = scan(manager, "SELECT true FROM tblspace1.tsql", Collections.emptyList());) { assertEquals(true, scan1.consume().get(0).get(0)); } try (DataScanner scan1 = scan(manager, "SELECT false FROM tblspace1.tsql", Collections.emptyList());) { assertEquals(false, scan1.consume().get(0).get(0)); } try (DataScanner scan1 = scan(manager, "SELECT CURRENT_TIMESTAMP FROM tblspace1.tsql", Collections.emptyList());) { long instant = ((java.sql.Timestamp) scan1.consume().get(0).get(0)).getTime(); assertTrue(Math.abs(System.currentTimeMillis() - instant) < 200); } // Simple column access try (DataScanner scan1 = scan(manager, "SELECT k1 FROM tblspace1.tsql", Collections.emptyList());) { assertEquals("mykey", scan1.consume().get(0).get(0).toString()); } try (DataScanner scan1 = scan(manager, "SELECT n1 FROM tblspace1.tsql", Collections.emptyList());) { assertEquals(1, scan1.consume().get(0).get(0)); } try (DataScanner scan1 = scan(manager, "SELECT l1 FROM tblspace1.tsql", Collections.emptyList());) { assertEquals(2L, scan1.consume().get(0).get(0)); } try (DataScanner scan1 = scan(manager, "SELECT t1 FROM tblspace1.tsql", Collections.emptyList());) { final Timestamp currentTs = (java.sql.Timestamp) scan1.consume().get(0).get(0); System.out.println("currentTs:" + currentTs); long instant = currentTs.getTime(); long delta = System.currentTimeMillis() - instant; System.out.println("delta:" + delta); assertTrue("too slow ? " + delta, delta < 60000); } try (DataScanner scan1 = scan(manager, "SELECT nu FROM tblspace1.tsql", Collections.emptyList());) { assertEquals(null, scan1.consume().get(0).get(0)); } try (DataScanner scan1 = scan(manager, "SELECT b1 FROM tblspace1.tsql", Collections.emptyList());) { assertEquals(true, scan1.consume().get(0).get(0)); } try (DataScanner scan1 = scan(manager, "SELECT d1 FROM tblspace1.tsql", Collections.emptyList());) { assertEquals(1.5, scan1.consume().get(0).get(0)); } // Simple expressions try (DataScanner scan1 = scan(manager, "SELECT 4+3+2 FROM tblspace1.tsql", Collections.emptyList());) { assertEquals(9L, scan1.consume().get(0).get(0)); } try (DataScanner scan1 = scan(manager, "SELECT 7-3-2 FROM tblspace1.tsql", Collections.emptyList());) { assertEquals(2L, scan1.consume().get(0).get(0)); } try (DataScanner scan1 = scan(manager, "SELECT 1/2/2 FROM tblspace1.tsql", Collections.emptyList());) { assertEquals(0.25, scan1.consume().get(0).get(0)); } try (DataScanner scan1 = scan(manager, "SELECT 4*3*2 FROM tblspace1.tsql", Collections.emptyList());) { assertEquals(24L, scan1.consume().get(0).get(0)); } // Functions try (DataScanner scan1 = scan(manager, "SELECT lower('CiAo') FROM tblspace1.tsql", Collections.emptyList());) { assertEquals("ciao", scan1.consume().get(0).get(0).toString()); } try (DataScanner scan1 = scan(manager, "SELECT upper('CiAo') FROM tblspace1.tsql", Collections.emptyList());) { assertEquals("CIAO", scan1.consume().get(0).get(0).toString()); } try (DataScanner scan1 = scan(manager, "SELECT abs(-123) FROM tblspace1.tsql", Collections.emptyList());) { assertEquals(123L, ((Number) scan1.consume().get(0).get(0)).longValue()); } try (DataScanner scan1 = scan(manager, "SELECT abs(123) FROM tblspace1.tsql", Collections.emptyList());) { assertEquals(123L, ((Number) scan1.consume().get(0).get(0)).longValue()); } try (DataScanner scan1 = scan(manager, "SELECT abs(-123.5) FROM tblspace1.tsql", Collections.emptyList());) { assertEquals(123.5, scan1.consume().get(0).get(0)); } try (DataScanner scan1 = scan(manager, "SELECT abs(123.5) FROM tblspace1.tsql", Collections.emptyList());) { assertEquals(123.5, scan1.consume().get(0).get(0)); } try (DataScanner scan1 = scan(manager, "SELECT round(98765.98765) FROM tblspace1.tsql", Collections.emptyList());) { assertEquals(98766.0, scan1.consume().get(0).get(0)); } try (DataScanner scan1 = scan(manager, "SELECT round(98765.98765, 2) FROM tblspace1.tsql", Collections.emptyList());) { assertEquals(98765.99, scan1.consume().get(0).get(0)); } try (DataScanner scan1 = scan(manager, "SELECT round(98765.98765, -2) FROM tblspace1.tsql", Collections.emptyList());) { assertEquals(98800.0, scan1.consume().get(0).get(0)); } // Simple comparisons // Warning: jSQLParser doesn't handle this kind of expressions in select clause try (DataScanner scan1 = scan(manager, "SELECT * FROM tblspace1.tsql WHERE 1<2", Collections.emptyList());) { assertEquals(1, scan1.consume().size()); } try (DataScanner scan1 = scan(manager, "SELECT * FROM tblspace1.tsql WHERE 2<1", Collections.emptyList());) { assertEquals(0, scan1.consume().size()); } try (DataScanner scan1 = scan(manager, "SELECT * FROM tblspace1.tsql WHERE 1<1", Collections.emptyList());) { assertEquals(0, scan1.consume().size()); } try (DataScanner scan1 = scan(manager, "SELECT * FROM tblspace1.tsql WHERE 2>1", Collections.emptyList());) { assertEquals(1, scan1.consume().size()); } try (DataScanner scan1 = scan(manager, "SELECT * FROM tblspace1.tsql WHERE 1>2", Collections.emptyList());) { assertEquals(0, scan1.consume().size()); } try (DataScanner scan1 = scan(manager, "SELECT * FROM tblspace1.tsql WHERE 1>1", Collections.emptyList());) { assertEquals(0, scan1.consume().size()); } try (DataScanner scan1 = scan(manager, "SELECT * FROM tblspace1.tsql WHERE 1<=2", Collections.emptyList());) { assertEquals(1, scan1.consume().size()); } try (DataScanner scan1 = scan(manager, "SELECT * FROM tblspace1.tsql WHERE 2<=1", Collections.emptyList());) { assertEquals(0, scan1.consume().size()); } try (DataScanner scan1 = scan(manager, "SELECT * FROM tblspace1.tsql WHERE 1<=1", Collections.emptyList());) { assertEquals(1, scan1.consume().size()); } try (DataScanner scan1 = scan(manager, "SELECT * FROM tblspace1.tsql WHERE 2>=1", Collections.emptyList());) { assertEquals(1, scan1.consume().size()); } try (DataScanner scan1 = scan(manager, "SELECT * FROM tblspace1.tsql WHERE 1>=2", Collections.emptyList());) { assertEquals(0, scan1.consume().size()); } try (DataScanner scan1 = scan(manager, "SELECT * FROM tblspace1.tsql WHERE 1>=1", Collections.emptyList());) { assertEquals(1, scan1.consume().size()); } try (DataScanner scan1 = scan(manager, "SELECT * FROM tblspace1.tsql WHERE 2=1", Collections.emptyList());) { assertEquals(0, scan1.consume().size()); } try (DataScanner scan1 = scan(manager, "SELECT * FROM tblspace1.tsql WHERE 1=1", Collections.emptyList());) { assertEquals(1, scan1.consume().size()); } try (DataScanner scan1 = scan(manager, "SELECT * FROM tblspace1.tsql WHERE 1<>2", Collections.emptyList());) { assertEquals(1, scan1.consume().size()); } // Logic expressions try (DataScanner scan1 = scan(manager, "SELECT * FROM tblspace1.tsql WHERE (1>2) or (1>0)", Collections.emptyList());) { assertEquals(1, scan1.consume().size()); } try (DataScanner scan1 = scan(manager, "SELECT * FROM tblspace1.tsql WHERE (1>2) or not (1>0)", Collections.emptyList());) { assertEquals(0, scan1.consume().size()); } try (DataScanner scan1 = scan(manager, "SELECT * FROM tblspace1.tsql WHERE (1>2) and (1>0)", Collections.emptyList());) { assertEquals(0, scan1.consume().size()); } try (DataScanner scan1 = scan(manager, "SELECT * FROM tblspace1.tsql WHERE not (1>2) and (1>0)", Collections.emptyList());) { assertEquals(1, scan1.consume().size()); } // Null exprssion // Warning: Parser doesn't handle this kind of expressions in select clause try (DataScanner scan1 = scan(manager, "SELECT * FROM tblspace1.tsql WHERE null is null", Collections.emptyList());) { assertEquals(1, scan1.consume().size()); } try (DataScanner scan1 = scan(manager, "SELECT * FROM tblspace1.tsql WHERE null is not null", Collections.emptyList());) { assertEquals(0, scan1.consume().size()); } try (DataScanner scan1 = scan(manager, "SELECT * FROM tblspace1.tsql WHERE 1 is null", Collections.emptyList());) { assertEquals(0, scan1.consume().size()); } try (DataScanner scan1 = scan(manager, "SELECT * FROM tblspace1.tsql WHERE 1 is not null", Collections.emptyList());) { assertEquals(1, scan1.consume().size()); } // Case expressions try (DataScanner scan1 = scan(manager, "SELECT CASE " + " WHEN k1='mykey' THEN 1 " + " WHEN k1='mykeys' THEN 2 " + " ELSE 3 " + "END as mycase " + "FROM tblspace1.tsql", Collections.emptyList());) { assertEquals(1L, ((Number) scan1.consume().get(0).get(0)).longValue()); } try (DataScanner scan1 = scan(manager, "SELECT CASE " + " WHEN k1='mykeys' THEN 1 " + " WHEN k1='mykey' THEN 2 " + " ELSE 3 " + "END as mycase " + "FROM tblspace1.tsql", Collections.emptyList());) { assertEquals(2L, ((Number) scan1.consume().get(0).get(0)).longValue()); } try (DataScanner scan1 = scan(manager, "SELECT CASE " + " WHEN k1='mykeys' THEN 1 " + " WHEN k1='mykeyb' THEN 2 " + " ELSE 3 " + "END as mycase " + "FROM tblspace1.tsql", Collections.emptyList());) { assertEquals(3L, ((Number) scan1.consume().get(0).get(0)).longValue()); } // Like expressions // Warning: Parser doesn't handle this kind of expressions in select clause try (DataScanner scan1 = scan(manager, "SELECT * FROM tblspace1.tsql WHERE 'AbBbCc' LIKE '_b____'", Collections.emptyList());) { assertEquals(1, scan1.consume().size()); } try (DataScanner scan1 = scan(manager, "SELECT * FROM tblspace1.tsql WHERE 'AbBbCc' LIKE '_B____'", Collections.emptyList());) { assertEquals(0, scan1.consume().size()); } try (DataScanner scan1 = scan(manager, "SELECT * FROM tblspace1.tsql WHERE 'AbBbCc' LIKE '_b%'", Collections.emptyList());) { assertEquals(1, scan1.consume().size()); } try (DataScanner scan1 = scan(manager, "SELECT * FROM tblspace1.tsql WHERE 'AbBbCc' LIKE '_d%'", Collections.emptyList());) { assertEquals(0, scan1.consume().size()); } try (DataScanner scan1 = scan(manager, "SELECT * FROM tblspace1.tsql WHERE 'AbBbCc' LIKE 'AbBbCc'", Collections.emptyList());) { assertEquals(1, scan1.consume().size()); } try (DataScanner scan1 = scan(manager, "SELECT * FROM tblspace1.tsql WHERE 'AbBbCc' LIKE '%AbBbCc%'", Collections.emptyList());) { assertEquals(1, scan1.consume().size()); } // In expressions // Warning: jsqlParser doesn't handle this kind of expressions in select clause try (DataScanner scan1 = scan(manager, "SELECT * FROM tblspace1.tsql WHERE '1' in (1,2,3)", Collections.emptyList());) { if (manager.getPlanner() instanceof CalcitePlanner) { assertEquals(0, scan1.consume().size()); } else { assertEquals(1, scan1.consume().size()); } } if ((manager.getPlanner() instanceof DDLSQLPlanner)) { try (DataScanner scan1 = scan(manager, "SELECT * FROM tblspace1.tsql WHERE '1' in ('1',2,3)", Collections.emptyList());) { assertEquals(1, scan1.consume().size()); } try (DataScanner scan1 = scan(manager, "SELECT * FROM tblspace1.tsql WHERE 'b' in ('1',2,3)", Collections.emptyList());) { assertEquals(0, scan1.consume().size()); } } try (DataScanner scan1 = scan(manager, "SELECT * FROM tblspace1.tsql WHERE 'b' in (1)", Collections.emptyList());) { assertEquals(0, scan1.consume().size()); } // Between expressions // Warning: Parser doesn't handle this kind of expressions in select clause try (DataScanner scan1 = scan(manager, "SELECT * FROM tblspace1.tsql WHERE 3 BETWEEN 1 AND 5", Collections.emptyList());) { assertEquals(1, scan1.consume().size()); } try (DataScanner scan1 = scan(manager, "SELECT * FROM tblspace1.tsql WHERE 1 BETWEEN 1 AND 5", Collections.emptyList());) { assertEquals(1, scan1.consume().size()); } try (DataScanner scan1 = scan(manager, "SELECT * FROM tblspace1.tsql WHERE 5 BETWEEN 1 AND 5", Collections.emptyList());) { assertEquals(1, scan1.consume().size()); } try (DataScanner scan1 = scan(manager, "SELECT * FROM tblspace1.tsql WHERE 0 BETWEEN 1 AND 5", Collections.emptyList());) { assertEquals(0, scan1.consume().size()); } try (DataScanner scan1 = scan(manager, "SELECT * FROM tblspace1.tsql WHERE 6 BETWEEN 1 AND 5", Collections.emptyList());) { assertEquals(0, scan1.consume().size()); } try (DataScanner scan1 = scan(manager, "SELECT ((4+(3+2)-1)*2) FROM tblspace1.tsql", Collections.emptyList());) { assertEquals(16L, scan1.consume().get(0).get(0)); } try (DataScanner scan1 = scan(manager, "SELECT ((3/2)*3+(1/2)) FROM tblspace1.tsql", Collections.emptyList());) { assertEquals(5.0, scan1.consume().get(0).get(0)); } } } }
package com.samsung.android.sdk.iap.lib.helper; import android.app.Activity; import android.app.AlertDialog; import android.content.ComponentName; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.pm.PackageInfo; import android.content.pm.PackageManager; import android.content.pm.Signature; import android.net.Uri; import android.os.Build; import android.util.Log; import com.samsung.android.sdk.iap.lib.R; import com.samsung.android.sdk.iap.lib.activity.BaseActivity; import com.samsung.android.sdk.iap.lib.vo.ErrorVo; /** * Created by sangbum7.kim on 2017-08-17. */ public class HelperUtil { private static final String TAG = HelperUtil.class.getSimpleName(); /** * show dialog * @param _title * @param _message */ public static void showIapDialogIfNeeded ( final Activity _activity, String _title, String _message, final boolean _finishActivity, final Runnable _onClickRunable, boolean _showDialog ) { if( _showDialog == false ) { if( _finishActivity == true ) { try{ _activity.finish(); } catch( Exception _e ){ _e.printStackTrace(); } } return; } AlertDialog.Builder alert = new AlertDialog.Builder( _activity ); alert.setTitle( _title ); alert.setMessage( _message ); alert.setPositiveButton( android.R.string.ok, new DialogInterface.OnClickListener() { @Override public void onClick( DialogInterface _dialog, int _which ) { if( null != _onClickRunable ) { _onClickRunable.run(); } _dialog.dismiss(); if( true == _finishActivity ) { _activity.finish(); } } } ); if( true == _finishActivity ) { alert.setOnCancelListener( new DialogInterface.OnCancelListener() { @Override public void onCancel( DialogInterface dialog ) { _activity.finish(); } }); } try { alert.show(); } catch( Exception e ) { e.printStackTrace(); } } /** * Check that Apps package is installed * @param _context Context * @return If it is true Billing package is installed. otherwise, not installed. */ static public boolean isInstalledAppsPackage( Context _context ) { PackageManager pm = _context.getPackageManager(); try { //// TODO: 2017-08-16 Make sure the packageInfo is normal and set the version code PackageInfo packageInfo = pm.getPackageInfo(HelperDefine.GALAXY_PACKAGE_NAME, PackageManager.GET_META_DATA); Log.d(TAG, "isInstalledAppsPackage: versionCode " + packageInfo.versionCode); return packageInfo.versionCode >= HelperDefine.APPS_PACKAGE_VERSION; } catch( PackageManager.NameNotFoundException e ) { e.printStackTrace(); return false; } } static public boolean isEnabledAppsPackage(Context context) { //// TODO: 2017-08-16 Make sure the status is normal int status = context.getPackageManager().getApplicationEnabledSetting(HelperDefine.GALAXY_PACKAGE_NAME); Log.d(TAG, "isEnabledAppsPackage: status " + status); return !((status == PackageManager.COMPONENT_ENABLED_STATE_DISABLED) || (status == PackageManager.COMPONENT_ENABLED_STATE_DISABLED_USER)); } /** * check validation of installed Billing package in your device * @param _context * @return If it is true Billing package is valid. otherwise, is not valid. */ static public boolean isValidAppsPackage( Context _context ) { boolean result = true; try { Signature[] sigs = _context.getPackageManager().getPackageInfo( HelperDefine.GALAXY_PACKAGE_NAME, PackageManager.GET_SIGNATURES ).signatures; Log.d(TAG, "isValidAppsPackage: HASHCODE : " + sigs[0].hashCode()); if( sigs[0].hashCode() != HelperDefine.APPS_SIGNATURE_HASHCODE ) { result = false; } } catch( Exception e ) { e.printStackTrace(); result = false; } return result; } /** * SamsungAccount authentication * @param _activity */ static public void startAccountActivity( final Activity _activity ) { ComponentName com = new ComponentName( HelperDefine.GALAXY_PACKAGE_NAME, HelperDefine.IAP_PACKAGE_NAME + ".activity.AccountActivity" ); Context context = _activity.getApplicationContext(); Intent intent = new Intent(); intent.setComponent( com ); if(intent.resolveActivity(context.getPackageManager()) != null) { _activity.startActivityForResult(intent, HelperDefine.REQUEST_CODE_IS_ACCOUNT_CERTIFICATION); } } /** * go to about page of SamsungApps in order to install IAP package. */ static public void installAppsPackage( final BaseActivity _activity ) { // 1. When user click the OK button on the dialog, // go to SamsungApps IAP Detail page // ==================================================================== Runnable OkBtnRunnable = new Runnable() { @Override public void run() { Context context = _activity.getApplicationContext(); // Link of SamsungApps for IAP install // ------------------------------------------------------------ Uri appsDeepLink = Uri.parse( "samsungapps://StoreVersionInfo/"); // ------------------------------------------------------------ Intent intent = new Intent(); intent.setData( appsDeepLink ); if( Build.VERSION.SDK_INT >= HelperDefine.HONEYCOMB_MR1 ) { intent.addFlags( Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_CLEAR_TOP | HelperDefine.FLAG_INCLUDE_STOPPED_PACKAGES ); } else { intent.addFlags( Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_CLEAR_TOP ); } if(intent.resolveActivity(context.getPackageManager()) != null) { context.startActivity(intent); } } }; // ==================================================================== // 2. Set error in order to notify result to third-party application. // ==================================================================== ErrorVo errorVo = new ErrorVo(); _activity.setErrorVo( errorVo ); errorVo.setError( HelperDefine.IAP_PAYMENT_IS_CANCELED, _activity.getString(R.string.mids_sapps_pop_payment_canceled) ); // ==================================================================== // 3. Show information dialog // ==================================================================== HelperUtil.showIapDialogIfNeeded( _activity, _activity.getString( R.string.mids_sapps_header_update_galaxy_apps ), _activity.getString( R.string.mids_sapps_pop_a_new_version_is_available_galaxy_apps_will_be_updated_to_the_latest_version_to_complete_this_purchase ), true, OkBtnRunnable, true ); // ==================================================================== } }
/** * This file contains an implementation of a Binary Search Tree (BST) Any comparable data is allowed * within this tree (numbers, strings, comparable Objects, etc...). Supported operations include * adding, removing, height, and containment checks. Furthermore, multiple tree traversal Iterators * are provided including: 1) Preorder traversal 2) Inorder traversal 3) Postorder traversal 4) * Levelorder traversal * * @author William Fiset, william.alexandre.fiset@gmail.com */ package com.williamfiset.datastructures.binarysearchtree; public class BinarySearchTree<T extends Comparable<T>> { // Tracks the number of nodes in this BST private int nodeCount = 0; // This BST is a rooted tree so we maintain a handle on the root node private Node root = null; // Internal node containing node references // and the actual node data private class Node { T data; Node left, right; public Node(Node left, Node right, T elem) { this.data = elem; this.left = left; this.right = right; } } // Check if this binary tree is empty public boolean isEmpty() { return size() == 0; } // Get the number of nodes in this binary tree public int size() { return nodeCount; } // Add an element to this binary tree. Returns true // if we successfully perform an insertion public boolean add(T elem) { // Check if the value already exists in this // binary tree, if it does ignore adding it if (contains(elem)) { return false; // Otherwise add this element to the binary tree } else { root = add(root, elem); nodeCount++; return true; } } // Private method to recursively add a value in the binary tree private Node add(Node node, T elem) { // Base case: found a leaf node if (node == null) { node = new Node(null, null, elem); } else { // Pick a subtree to insert element if (elem.compareTo(node.data) < 0) { node.left = add(node.left, elem); } else { node.right = add(node.right, elem); } } return node; } // Remove a value from this binary tree if it exists, O(n) public boolean remove(T elem) { // Make sure the node we want to remove // actually exists before we remove it if (contains(elem)) { root = remove(root, elem); nodeCount--; return true; } return false; } private Node remove(Node node, T elem) { if (node == null) return null; int cmp = elem.compareTo(node.data); // Dig into left subtree, the value we're looking // for is smaller than the current value if (cmp < 0) { node.left = remove(node.left, elem); // Dig into right subtree, the value we're looking // for is greater than the current value } else if (cmp > 0) { node.right = remove(node.right, elem); // Found the node we wish to remove } else { // This is the case with only a right subtree or // no subtree at all. In this situation just // swap the node we wish to remove with its right child. if (node.left == null) { Node rightChild = node.right; node.data = null; node = null; return rightChild; // This is the case with only a left subtree or // no subtree at all. In this situation just // swap the node we wish to remove with its left child. } else if (node.right == null) { Node leftChild = node.left; node.data = null; node = null; return leftChild; // When removing a node from a binary tree with two links the // successor of the node being removed can either be the largest // value in the left subtree or the smallest value in the right // subtree. In this implementation I have decided to find the // smallest value in the right subtree which can be found by // traversing as far left as possible in the right subtree. } else { // Find the leftmost node in the right subtree Node tmp = findMin(node.right); // Swap the data node.data = tmp.data; // Go into the right subtree and remove the leftmost node we // found and swapped data with. This prevents us from having // two nodes in our tree with the same value. node.right = remove(node.right, tmp.data); // If instead we wanted to find the largest node in the left // subtree as opposed to smallest node in the right subtree // here is what we would do: // Node tmp = findMax(node.left); // node.data = tmp.data; // node.left = remove(node.left, tmp.data); } } return node; } // Helper method to find the leftmost node (which has the smallest value) private Node findMin(Node node) { while (node.left != null) node = node.left; return node; } // Helper method to find the rightmost node (which has the largest value) private Node findMax(Node node) { while (node.right != null) node = node.right; return node; } // returns true is the element exists in the tree public boolean contains(T elem) { return contains(root, elem); } // private recursive method to find an element in the tree private boolean contains(Node node, T elem) { // Base case: reached bottom, value not found if (node == null) return false; int cmp = elem.compareTo(node.data); // Dig into the left subtree because the value we're // looking for is smaller than the current value if (cmp < 0) return contains(node.left, elem); // Dig into the right subtree because the value we're // looking for is greater than the current value else if (cmp > 0) return contains(node.right, elem); // We found the value we were looking for else return true; } // Computes the height of the tree, O(n) public int height() { return height(root); } // Recursive helper method to compute the height of the tree private int height(Node node) { if (node == null) return 0; return Math.max(height(node.left), height(node.right)) + 1; } // This method returns an iterator for a given TreeTraversalOrder. // The ways in which you can traverse the tree are in four different ways: // preorder, inorder, postorder and levelorder. public java.util.Iterator<T> traverse(TreeTraversalOrder order) { switch (order) { case PRE_ORDER: return preOrderTraversal(); case IN_ORDER: return inOrderTraversal(); case POST_ORDER: return postOrderTraversal(); case LEVEL_ORDER: return levelOrderTraversal(); default: return null; } } // Returns as iterator to traverse the tree in pre order private java.util.Iterator<T> preOrderTraversal() { final int expectedNodeCount = nodeCount; final java.util.Stack<Node> stack = new java.util.Stack<>(); stack.push(root); return new java.util.Iterator<T>() { @Override public boolean hasNext() { if (expectedNodeCount != nodeCount) throw new java.util.ConcurrentModificationException(); return root != null && !stack.isEmpty(); } @Override public T next() { if (expectedNodeCount != nodeCount) throw new java.util.ConcurrentModificationException(); Node node = stack.pop(); if (node.right != null) stack.push(node.right); if (node.left != null) stack.push(node.left); return node.data; } @Override public void remove() { throw new UnsupportedOperationException(); } }; } // Returns as iterator to traverse the tree in order private java.util.Iterator<T> inOrderTraversal() { final int expectedNodeCount = nodeCount; final java.util.Stack<Node> stack = new java.util.Stack<>(); stack.push(root); return new java.util.Iterator<T>() { Node trav = root; @Override public boolean hasNext() { if (expectedNodeCount != nodeCount) throw new java.util.ConcurrentModificationException(); return root != null && !stack.isEmpty(); } @Override public T next() { if (expectedNodeCount != nodeCount) throw new java.util.ConcurrentModificationException(); // Dig left while (trav != null && trav.left != null) { stack.push(trav.left); trav = trav.left; } Node node = stack.pop(); // Try moving down right once if (node.right != null) { stack.push(node.right); trav = node.right; } return node.data; } @Override public void remove() { throw new UnsupportedOperationException(); } }; } // Returns as iterator to traverse the tree in post order private java.util.Iterator<T> postOrderTraversal() { final int expectedNodeCount = nodeCount; final java.util.Stack<Node> stack1 = new java.util.Stack<>(); final java.util.Stack<Node> stack2 = new java.util.Stack<>(); stack1.push(root); while (!stack1.isEmpty()) { Node node = stack1.pop(); if (node != null) { stack2.push(node); if (node.left != null) stack1.push(node.left); if (node.right != null) stack1.push(node.right); } } return new java.util.Iterator<T>() { @Override public boolean hasNext() { if (expectedNodeCount != nodeCount) throw new java.util.ConcurrentModificationException(); return root != null && !stack2.isEmpty(); } @Override public T next() { if (expectedNodeCount != nodeCount) throw new java.util.ConcurrentModificationException(); return stack2.pop().data; } @Override public void remove() { throw new UnsupportedOperationException(); } }; } // Returns as iterator to traverse the tree in level order private java.util.Iterator<T> levelOrderTraversal() { final int expectedNodeCount = nodeCount; final java.util.Queue<Node> queue = new java.util.LinkedList<>(); queue.offer(root); return new java.util.Iterator<T>() { @Override public boolean hasNext() { if (expectedNodeCount != nodeCount) throw new java.util.ConcurrentModificationException(); return root != null && !queue.isEmpty(); } @Override public T next() { if (expectedNodeCount != nodeCount) throw new java.util.ConcurrentModificationException(); Node node = queue.poll(); if (node.left != null) queue.offer(node.left); if (node.right != null) queue.offer(node.right); return node.data; } @Override public void remove() { throw new UnsupportedOperationException(); } }; } }
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2017 by Hitachi Vantara : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.trans.steps.prioritizestreams; import java.util.List; import org.pentaho.di.core.CheckResult; import org.pentaho.di.core.CheckResultInterface; import org.pentaho.di.core.Const; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleStepException; import org.pentaho.di.core.exception.KettleXMLException; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.repository.Repository; import org.pentaho.di.shared.SharedObjectInterface; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.BaseStepMeta; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.step.StepMetaInterface; import org.pentaho.metastore.api.IMetaStore; import org.w3c.dom.Node; /* * Created on 30-06-2008 * */ public class PrioritizeStreamsMeta extends BaseStepMeta implements StepMetaInterface { private static Class<?> PKG = PrioritizeStreamsMeta.class; // for i18n purposes, needed by Translator2!! /** by which steps to display? */ private String[] stepName; public PrioritizeStreamsMeta() { super(); // allocate BaseStepMeta } public void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ) throws KettleXMLException { readData( stepnode, databases ); } public Object clone() { PrioritizeStreamsMeta retval = (PrioritizeStreamsMeta) super.clone(); int nrfields = stepName.length; retval.allocate( nrfields ); System.arraycopy( stepName, 0, retval.stepName, 0, nrfields ); return retval; } public void allocate( int nrfields ) { stepName = new String[nrfields]; } /** * @return Returns the stepName. */ public String[] getStepName() { return stepName; } /** * @param stepName * The stepName to set. */ public void setStepName( String[] stepName ) { this.stepName = stepName; } public void getFields( RowMetaInterface rowMeta, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { // Default: nothing changes to rowMeta } private void readData( Node stepnode, List<? extends SharedObjectInterface> databases ) throws KettleXMLException { try { Node steps = XMLHandler.getSubNode( stepnode, "steps" ); int nrsteps = XMLHandler.countNodes( steps, "step" ); allocate( nrsteps ); for ( int i = 0; i < nrsteps; i++ ) { Node fnode = XMLHandler.getSubNodeByNr( steps, "step", i ); stepName[i] = XMLHandler.getTagValue( fnode, "name" ); } } catch ( Exception e ) { throw new KettleXMLException( "Unable to load step info from XML", e ); } } public String getXML() { StringBuilder retval = new StringBuilder(); retval.append( " <steps>" + Const.CR ); for ( int i = 0; i < stepName.length; i++ ) { retval.append( " <step>" + Const.CR ); retval.append( " " + XMLHandler.addTagValue( "name", stepName[i] ) ); retval.append( " </step>" + Const.CR ); } retval.append( " </steps>" + Const.CR ); return retval.toString(); } public void setDefault() { int nrsteps = 0; allocate( nrsteps ); for ( int i = 0; i < nrsteps; i++ ) { stepName[i] = "step" + i; } } public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ) throws KettleException { try { int nrsteps = rep.countNrStepAttributes( id_step, "step_name" ); allocate( nrsteps ); for ( int i = 0; i < nrsteps; i++ ) { stepName[i] = rep.getStepAttributeString( id_step, i, "step_name" ); } } catch ( Exception e ) { throw new KettleException( "Unexpected error reading step information from the repository", e ); } } public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ) throws KettleException { try { for ( int i = 0; i < stepName.length; i++ ) { rep.saveStepAttribute( id_transformation, id_step, i, "step_name", stepName[i] ); } } catch ( Exception e ) { throw new KettleException( "Unable to save step information to the repository for id_step=" + id_step, e ); } } public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ) { CheckResult cr; if ( prev == null || prev.size() == 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_WARNING, BaseMessages.getString( PKG, "PrioritizeStreamsMeta.CheckResult.NotReceivingFields" ), stepMeta ); remarks.add( cr ); } else { if ( stepName.length > 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "PrioritizeStreamsMeta.CheckResult.AllStepsFound" ), stepMeta ); remarks.add( cr ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_WARNING, BaseMessages.getString( PKG, "PrioritizeStreamsMeta.CheckResult.NoStepsEntered" ), stepMeta ); remarks.add( cr ); } } // See if we have input streams leading to this step! if ( input.length > 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "PrioritizeStreamsMeta.CheckResult.StepRecevingData2" ), stepMeta ); remarks.add( cr ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "PrioritizeStreamsMeta.CheckResult.NoInputReceivedFromOtherSteps" ), stepMeta ); remarks.add( cr ); } } public StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta tr, Trans trans ) { return new PrioritizeStreams( stepMeta, stepDataInterface, cnr, tr, trans ); } public StepDataInterface getStepData() { return new PrioritizeStreamsData(); } }
package com.mx.dxinl.gzmtrmap; import android.Manifest; import android.app.AlertDialog; import android.content.pm.PackageManager; import android.database.sqlite.SQLiteDatabase; import android.os.AsyncTask; import android.os.Build; import android.os.Bundle; import android.support.v4.app.ActivityCompat; import android.support.v4.content.ContextCompat; import android.support.v7.app.AppCompatActivity; import android.util.Log; import android.view.View; import android.widget.Button; import android.widget.EditText; import android.widget.TextView; import android.widget.Toast; import com.mx.dxinl.gzmtrmap.Structs.Line; import com.mx.dxinl.gzmtrmap.Structs.Node; import com.mx.dxinl.gzmtrmap.Utils.AssetsDatabaseHelper; import com.mx.dxinl.gzmtrmap.Utils.DbUtils; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; public class MainActivity extends AppCompatActivity implements ChoseNodeListener { private static final String TAG = "GZMtrMap"; private static final String DB_NAME = "mtr.db"; private static final int MY_PERMISSIONS_REQUEST_EXTERNAL_STORAGE = 48; private MtrView mtr; private TextView start; private TextView end; private String startNodeName, endNodeName; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); if (Build.VERSION.SDK_INT >= 23) { int checkReadExternal = ContextCompat.checkSelfPermission(this, android.Manifest.permission.READ_EXTERNAL_STORAGE); int checkWriteExternal = ContextCompat.checkSelfPermission(this, Manifest.permission.WRITE_EXTERNAL_STORAGE); if (checkReadExternal == PackageManager.PERMISSION_DENIED || checkWriteExternal == PackageManager.PERMISSION_DENIED) { ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.READ_EXTERNAL_STORAGE, Manifest.permission.WRITE_EXTERNAL_STORAGE}, MY_PERMISSIONS_REQUEST_EXTERNAL_STORAGE); } else { init(); } } else { init(); } } private void init() { mtr = (MtrView) findViewById(R.id.mtr); start = (EditText) findViewById(R.id.start); end = (EditText) findViewById(R.id.end); Button findRouteBtn = (Button) findViewById(R.id.find_route); findRouteBtn.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { startNodeName = start.getText().toString(); endNodeName = end.getText().toString(); if (startNodeName == null || startNodeName.length() == 0) { Toast.makeText(MainActivity.this, String.format(getString(R.string.cannot_be_blank), getString(R.string.start)), Toast.LENGTH_SHORT).show(); } else if (endNodeName.length() == 0) { Toast.makeText(MainActivity.this, String.format(getString(R.string.cannot_be_blank), getString(R.string.end)), Toast.LENGTH_SHORT).show(); } else if (startNodeName.equals(endNodeName)) { Toast.makeText(MainActivity.this, getString(R.string.start_equals_end), Toast.LENGTH_SHORT).show(); } else { Log.e("path", mtr.findRoute(startNodeName, endNodeName)); } } }); Map<String, Integer> colorMap = new HashMap<>(); colorMap.put("blue", R.color.blue); colorMap.put("indigo", R.color.indigo); colorMap.put("cyan", R.color.cyan); colorMap.put("teal", R.color.teal); colorMap.put("lightgreen", R.color.lightgreen); colorMap.put("lime", R.color.lime); colorMap.put("brown", R.color.brown); colorMap.put("yellow", R.color.yellow); colorMap.put("bluegrey", R.color.bluegrey); colorMap.put("grey", R.color.grey); colorMap.put("red", R.color.red); colorMap.put("black", R.color.black); mtr.setColorMap(colorMap); mtr.setChoseNodeListener(this); new DBProcessTask().execute(); } @Override public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) { switch (requestCode) { case MY_PERMISSIONS_REQUEST_EXTERNAL_STORAGE: if (grantResults[0] == PackageManager.PERMISSION_DENIED || grantResults[1] == PackageManager.PERMISSION_DENIED) { new CloseAppTask().execute(); } else { init(); } break; } } @Override public void setStartNode(String name) { start.setText(name); } @Override public void setEndNode(String name) { end.setText(name); } public class DBProcessTask extends AsyncTask<Object, Object, List<Node>> { private int maxMapCoordinate; private AlertDialog dialog; @Override protected void onPreExecute() { super.onPreExecute(); dialog = new AlertDialog.Builder(MainActivity.this) .setIcon(R.mipmap.ic_launcher) .setTitle(R.string.tips) .setMessage(R.string.loading) .setCancelable(false) .create(); dialog.show(); } @Override protected List<Node> doInBackground(Object... params) { SQLiteDatabase db = AssetsDatabaseHelper.openDatabase(MainActivity.this, DB_NAME); if (db != null) { List<Node> nodes = DbUtils.getNodes(db); int maxX = DbUtils.getMaxOrMinValue(db, "node", "x", DbUtils.MAX_OR_MIN.MAX); int minX = DbUtils.getMaxOrMinValue(db, "node", "x", DbUtils.MAX_OR_MIN.MIN); int maxY = DbUtils.getMaxOrMinValue(db, "node", "y", DbUtils.MAX_OR_MIN.MAX); int minY = DbUtils.getMaxOrMinValue(db, "node", "y", DbUtils.MAX_OR_MIN.MIN); maxMapCoordinate = Math.max(Math.max(maxX, maxY), Math.max(Math.abs(minX), Math.abs(minY))); List<Line> lines = DbUtils.getLines(db); for (Node node : nodes) { List<String> lineNames = DbUtils.getLineNames(db, node.name); List<Line> nodeLines = new ArrayList<>(); for (String lineName : lineNames) { for (Line line : lines) { if (line.name.equals(lineName)) { nodeLines.add(line); break; } } } node.lines = nodeLines; if (lineNames.size() == 1) { node.color = DbUtils.getColorName(db, lineNames.toArray()[0].toString()); } else { node.color = "black"; } HashMap<String, Integer> neighborsMap = DbUtils.getNeighbors(db, node.name); HashMap<Node, Integer> neighborsDist = new HashMap<>(); for (String neighborName : neighborsMap.keySet()) { for (Node tmpNode : nodes) { if (tmpNode.name.equals(neighborName)) { neighborsDist.put(tmpNode, neighborsMap.get(neighborName)); break; } } } node.neighborsDist = neighborsDist; } return nodes; } return null; } @Override protected void onPostExecute(List<Node> nodes) { super.onPostExecute(nodes); dialog.dismiss(); if (nodes != null) { mtr.setNodes(nodes, maxMapCoordinate); } else { Toast.makeText(MainActivity.this, getString(R.string.load_failed), Toast.LENGTH_SHORT).show(); } } } public class CloseAppTask extends AsyncTask<Object, Integer, Object> { private AlertDialog dialog; String msg = getString(R.string.no_permission) + getString(R.string.close_app); @Override protected void onPreExecute() { super.onPreExecute(); dialog = new AlertDialog.Builder(MainActivity.this).create(); dialog.setCancelable(false); dialog.setTitle(getString(R.string.close)); dialog.setMessage(String.format(msg, 3)); dialog.show(); } @Override protected Object doInBackground(Object[] params) { try { Thread.sleep(1000); publishProgress(2); Thread.sleep(1000); publishProgress(1); Thread.sleep(1000); publishProgress(0); } catch (InterruptedException e) { e.printStackTrace(); } return null; } @Override protected void onProgressUpdate(Integer[] values) { super.onProgressUpdate(values); dialog.setMessage(String.format(msg, values[0])); } @Override protected void onPostExecute(Object o) { super.onPostExecute(o); dialog.dismiss(); finish(); } } }
/* Copyright 2015 Samsung Electronics Co., LTD * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gearvrf.scene_objects; import org.apache.commons.math3.geometry.euclidean.threed.Vector3D; import org.gearvrf.GVRSceneObject; import org.gearvrf.GVRRenderData; import org.gearvrf.GVRContext; import org.gearvrf.GVRMesh; import org.gearvrf.utility.Log; public class GVRCylinderSceneObject extends GVRSceneObject { @SuppressWarnings("unused") private static final String TAG = Log.tag(GVRCylinderSceneObject.class); private static final int STACK_NUMBER = 10; private static final int SLICE_NUMBER = 36; private static final float BASE_RADIUS = 0.5f; private static final float TOP_RADIUS = 0.5f; private static final float HEIGHT = 1.0f; private float[] vertices; private float[] normals; private float[] texCoords; private char[] indices; private int vertexCount = 0; private int texCoordCount = 0; private char indexCount = 0; private int triangleCount = 0; /** * Constructs a cylinder scene object with a height of 1, radius of 0.5,2 * stacks, and 36 slices. * * @param gvrContext * current {@link GVRContext} */ public GVRCylinderSceneObject(GVRContext gvrContext) { super(gvrContext); generateCylinder(BASE_RADIUS, TOP_RADIUS, HEIGHT, STACK_NUMBER, SLICE_NUMBER); GVRMesh mesh = new GVRMesh(gvrContext); mesh.setVertices(vertices); mesh.setNormals(normals); mesh.setTexCoords(texCoords); mesh.setTriangles(indices); GVRRenderData renderData = new GVRRenderData(gvrContext); attachRenderData(renderData); renderData.setMesh(mesh); } /** * Constructs a cylinder scene object. * * @param gvrContext * current {@link GVRContext} * @param bottomRadius * radius for the bottom of the cylinder * @param topRadius * radius for the top of the cylinder * @param height * height of the cylinder * @param stackNumber * number of quads high to make the cylinder. * @param sliceNumber * number of quads around to make the cylinder. */ public GVRCylinderSceneObject(GVRContext gvrContext, float bottomRadius, float topRadius, float height, int stackNumber, int sliceNumber) { super(gvrContext); // assert height, numStacks, numSlices > 0 if (height <= 0 || stackNumber <= 0 || sliceNumber <= 0) { throw new IllegalArgumentException( "height, numStacks, and numSlices must be > 0. Values passed were: height=" + height + ", numStacks=" + stackNumber + ", numSlices=" + sliceNumber); } // assert numCaps > 0 if (bottomRadius <= 0 && topRadius <= 0) { throw new IllegalArgumentException( "bottomRadius and topRadius must be >= 0 and at least one of bottomRadius or topRadius must be > 0. Values passed were: bottomRadius=" + bottomRadius + ", topRadius=" + topRadius); } generateCylinder(bottomRadius, topRadius, height, stackNumber, sliceNumber); GVRMesh mesh = new GVRMesh(gvrContext); mesh.setVertices(vertices); mesh.setNormals(normals); mesh.setTexCoords(texCoords); mesh.setTriangles(indices); GVRRenderData renderData = new GVRRenderData(gvrContext); attachRenderData(renderData); renderData.setMesh(mesh); } private void generateCylinder(float bottomRadius, float topRadius, float height, int stackNumber, int sliceNumber) { int capNumber = 2; if (bottomRadius == 0) { capNumber--; } if (topRadius == 0) { capNumber--; } int capVertexNumber = 3 * sliceNumber; int bodyVertexNumber = 4 * sliceNumber * stackNumber; int vertexNumber = (capNumber * capVertexNumber) + bodyVertexNumber; int triangleNumber = (capNumber * capVertexNumber) + (6 * sliceNumber * stackNumber); float halfHeight = height / 2.0f; vertices = new float[3 * vertexNumber]; normals = new float[3 * vertexNumber]; texCoords = new float[2 * triangleNumber]; indices = new char[triangleNumber]; // top cap // 3 * numSlices if (topRadius > 0) { createCap(topRadius, halfHeight, sliceNumber, 1.0f); } // cylinder body // 4 * numSlices * numStacks createBody(bottomRadius, topRadius, height, stackNumber, sliceNumber); // bottom cap // 3 * numSlices if (bottomRadius > 0) { createCap(bottomRadius, -halfHeight, sliceNumber, -1.0f); } } private void createCap(float radius, float height, int sliceNumber, float normalDirection) { for (int slice = 0; slice < sliceNumber; slice++) { double theta0 = ((slice) / sliceNumber) * 2.0 * Math.PI; double theta1 = ((slice + 1) / sliceNumber) * 2.0 * Math.PI; float y = height; float x0 = (float) (radius * Math.cos(theta0)); float z0 = (float) (radius * Math.sin(theta0)); float x1 = (float) (radius * Math.cos(theta1)); float z1 = (float) (radius * Math.sin(theta1)); float s0 = 1.0f - ((float) (slice) / sliceNumber); float s1 = 1.0f - ((float) (slice + 1) / sliceNumber); float s2 = (s0 + s1) / 2.0f; vertices[vertexCount + 0] = x0; vertices[vertexCount + 1] = y; vertices[vertexCount + 2] = z0; vertices[vertexCount + 3] = x1; vertices[vertexCount + 4] = y; vertices[vertexCount + 5] = z1; vertices[vertexCount + 6] = 0.0f; vertices[vertexCount + 7] = y; vertices[vertexCount + 8] = 0.0f; normals[vertexCount + 0] = 0.0f; normals[vertexCount + 1] = normalDirection; normals[vertexCount + 2] = 0.0f; normals[vertexCount + 3] = 0.0f; normals[vertexCount + 4] = normalDirection; normals[vertexCount + 5] = 0.0f; normals[vertexCount + 6] = 0.0f; normals[vertexCount + 7] = normalDirection; normals[vertexCount + 8] = 0.0f; texCoords[texCoordCount + 0] = s0; texCoords[texCoordCount + 1] = 0.0f; texCoords[texCoordCount + 2] = s1; texCoords[texCoordCount + 3] = 0.0f; texCoords[texCoordCount + 4] = s2; texCoords[texCoordCount + 5] = 1.0f; if (normalDirection > 0) { indices[indexCount + 0] = (char) (triangleCount + 1); indices[indexCount + 1] = (char) (triangleCount + 0); indices[indexCount + 2] = (char) (triangleCount + 2); } else { indices[indexCount + 0] = (char) (triangleCount + 0); indices[indexCount + 1] = (char) (triangleCount + 1); indices[indexCount + 2] = (char) (triangleCount + 2); } vertexCount += 9; texCoordCount += 6; indexCount += 3; triangleCount += 3; } } private void createBody(float bottomRadius, float topRadius, float height, int stackNumber, int sliceNumber) { float difference = bottomRadius - topRadius; float halfHeight = height / 2.0f; for (int stack = 0; stack < stackNumber; stack++) { int initVertexCount = vertexCount; float stackPercentage0 = ((float) (stack) / stackNumber); float stackPercentage1 = ((float) (stack + 1) / stackNumber); float t0 = 1.0f - stackPercentage0; float t1 = 1.0f - stackPercentage1; float y0 = -halfHeight + (stackPercentage0 * height); float y1 = -halfHeight + (stackPercentage1 * height); float nx, ny, nz; for (int slice = 0; slice < sliceNumber; slice++) { float slicePercentage0 = ((float) (slice) / sliceNumber); float slicePercentage1 = ((float) (slice + 1) / sliceNumber); double theta0 = slicePercentage0 * 2.0 * Math.PI; double theta1 = slicePercentage1 * 2.0 * Math.PI; double cosTheta0 = Math.cos(theta0); double sinTheta0 = Math.sin(theta0); double cosTheta1 = Math.cos(theta1); double sinTheta1 = Math.sin(theta1); float radius = (bottomRadius - (difference * stackPercentage0)); float x0 = (float) (radius * cosTheta0); float z0 = (float) (-radius * sinTheta0); float x1 = (float) (radius * cosTheta1); float z1 = (float) (-radius * sinTheta1); radius = (bottomRadius - (difference * stackPercentage1)); float x2 = (float) (radius * cosTheta0); float z2 = (float) (-radius * sinTheta0); float x3 = (float) (radius * cosTheta1); float z3 = (float) (-radius * sinTheta1); float s0 = slicePercentage0; float s1 = slicePercentage1; vertices[vertexCount + 0] = x0; vertices[vertexCount + 1] = y0; vertices[vertexCount + 2] = z0; vertices[vertexCount + 3] = x1; vertices[vertexCount + 4] = y0; vertices[vertexCount + 5] = z1; vertices[vertexCount + 6] = x2; vertices[vertexCount + 7] = y1; vertices[vertexCount + 8] = z2; vertices[vertexCount + 9] = x3; vertices[vertexCount + 10] = y1; vertices[vertexCount + 11] = z3; // calculate normal Vector3D v1 = new Vector3D(x1 - x0, 0, z1 - z0); Vector3D v2 = new Vector3D(x2 - x0, y1 - y0, z2 - z0); Vector3D v3 = v1.crossProduct(v2).normalize(); nx = (float) v3.getX(); ny = (float) v3.getY(); nz = (float) v3.getZ(); normals[vertexCount + 0] = nx; normals[vertexCount + 1] = ny; normals[vertexCount + 2] = nz; normals[vertexCount + 3] = nx; normals[vertexCount + 4] = ny; normals[vertexCount + 5] = nz; normals[vertexCount + 6] = nx; normals[vertexCount + 7] = ny; normals[vertexCount + 8] = nz; normals[vertexCount + 9] = nx; normals[vertexCount + 10] = ny; normals[vertexCount + 11] = nz; texCoords[texCoordCount + 0] = s0; texCoords[texCoordCount + 1] = t0; texCoords[texCoordCount + 2] = s1; texCoords[texCoordCount + 3] = t0; texCoords[texCoordCount + 4] = s0; texCoords[texCoordCount + 5] = t1; texCoords[texCoordCount + 6] = s1; texCoords[texCoordCount + 7] = t1; indices[indexCount + 0] = (char) (triangleCount + 0); // 0 indices[indexCount + 1] = (char) (triangleCount + 1); // 1 indices[indexCount + 2] = (char) (triangleCount + 2); // 2 indices[indexCount + 3] = (char) (triangleCount + 2); // 2 indices[indexCount + 4] = (char) (triangleCount + 1); // 1 indices[indexCount + 5] = (char) (triangleCount + 3); // 3 vertexCount += 12; texCoordCount += 8; indexCount += 6; triangleCount += 4; } for (int i = initVertexCount; i < vertexCount - 12; i += 12) { Vector3D v1 = new Vector3D(normals[i + 3], normals[i + 4], normals[i + 5]); Vector3D v2 = new Vector3D(normals[i + 12], normals[i + 13], normals[i + 14]); Vector3D v3 = v1.add(v2).normalize(); nx = (float) v3.getX(); ny = (float) v3.getY(); nz = (float) v3.getZ(); normals[i + 3] = nx; normals[i + 4] = ny; normals[i + 5] = nz; normals[i + 12] = nx; normals[i + 13] = ny; normals[i + 14] = nz; v1 = new Vector3D(normals[i + 9], normals[i + 10], normals[i + 11]); v2 = new Vector3D(normals[i + 18], normals[i + 19], normals[i + 20]); v3 = v1.add(v2).normalize(); nx = (float) v3.getX(); ny = (float) v3.getY(); nz = (float) v3.getZ(); normals[i + 9] = nx; normals[i + 10] = ny; normals[i + 11] = nz; normals[i + 18] = nx; normals[i + 19] = ny; normals[i + 20] = nz; } int i1 = vertexCount - 12; Vector3D v1 = new Vector3D(normals[i1 + 3], normals[i1 + 4], normals[i1 + 5]); int i2 = initVertexCount; Vector3D v2 = new Vector3D(normals[i2 + 0], normals[i2 + 1], normals[i2 + 2]); Vector3D v3 = v1.add(v2).normalize(); nx = (float) v3.getX(); ny = (float) v3.getY(); nz = (float) v3.getZ(); normals[i1 + 3] = nx; normals[i1 + 4] = ny; normals[i1 + 5] = nz; normals[i2 + 0] = nx; normals[i2 + 1] = ny; normals[i2 + 2] = nz; v1 = new Vector3D(normals[i1 + 9], normals[i1 + 10], normals[i1 + 11]); v2 = new Vector3D(normals[i2 + 6], normals[i2 + 7], normals[i2 + 8]); v3 = v1.add(v2).normalize(); nx = (float) v3.getX(); ny = (float) v3.getY(); nz = (float) v3.getZ(); normals[i1 + 9] = nx; normals[i1 + 10] = ny; normals[i1 + 11] = nz; normals[i2 + 6] = nx; normals[i2 + 7] = ny; normals[i2 + 8] = nz; } } }
package org.jgroups.protocols; import org.jgroups.*; import org.jgroups.annotations.*; import org.jgroups.stack.IpAddress; import org.jgroups.stack.Protocol; import org.jgroups.util.Util; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.net.InetAddress; import java.net.NetworkInterface; import java.util.*; import java.util.concurrent.DelayQueue; import java.util.concurrent.Delayed; import java.util.concurrent.TimeUnit; import java.util.function.Supplier; /** * Catches SUSPECT events traveling up the stack. Verifies that the suspected member is really dead. If yes, * passes SUSPECT event up the stack, otherwise discards it. Has to be placed somewhere above the FD layer and * below the GMS layer (receiver of the SUSPECT event). Note that SUSPECT events may be reordered by this protocol. * @author Bela Ban */ @MBean(description="Double-checks suspicions reports") public class VERIFY_SUSPECT extends Protocol implements Runnable { /* ------------------------------------------ Properties ------------------------------------------ */ @Property(description="Number of millisecs to wait for a response from a suspected member") protected long timeout=2000; @Property(description="Number of verify heartbeats sent to a suspected member") protected int num_msgs=1; @Property(description="Use InetAddress.isReachable() to verify suspected member instead of regular messages") protected boolean use_icmp; @Property(description="Send the I_AM_NOT_DEAD message back as a multicast rather than as multiple unicasts " + "(default is false)") protected boolean use_mcast_rsps; @LocalAddress @Property(description="Interface for ICMP pings. Used if use_icmp is true " + "The following special values are also recognized: GLOBAL, SITE_LOCAL, LINK_LOCAL and NON_LOOPBACK", systemProperty={Global.BIND_ADDR}) protected InetAddress bind_addr; // interface for ICMP pings /* --------------------------------------------- Fields ------------------------------------------------ */ /** network interface to be used to send the ICMP packets */ protected NetworkInterface intf; protected Address local_addr; // a list of suspects, ordered by time when a SUSPECT event needs to be sent up protected final DelayQueue<Entry> suspects=new DelayQueue<>(); protected volatile Thread timer; protected volatile boolean running; @ManagedAttribute(description = "List of currently suspected members") public String getSuspects() { synchronized(suspects) { return suspects.toString(); } } public VERIFY_SUSPECT() { } /* ------------------------------------------ Builder-like methods ------------------------------------------ */ public VERIFY_SUSPECT setTimeout(long timeout) { this.timeout = timeout; return this; } public long getTimeout() { return timeout; } public Object down(Event evt) { switch(evt.getType()) { case Event.SET_LOCAL_ADDRESS: local_addr=evt.getArg(); break; case Event.VIEW_CHANGE: View v=evt.getArg(); adjustSuspectedMembers(v.getMembers()); break; } return down_prot.down(evt); } public Object up(Event evt) { switch(evt.getType()) { case Event.SUSPECT: // it all starts here ... // todo: change to collections in 4.1 Collection<Address> s=evt.arg() instanceof Address? Collections.singletonList(evt.arg()) : evt.arg(); if(s == null) return null; s.remove(local_addr); // ignoring suspect of self if(use_icmp) s.forEach(this::verifySuspectWithICMP); else verifySuspect(s); return null; // don't pass up; we will decide later (after verification) whether to pass it up case Event.CONFIG: if(bind_addr == null) { Map<String,Object> config=evt.getArg(); bind_addr=(InetAddress)config.get("bind_addr"); } } return up_prot.up(evt); } public Object up(Message msg) { VerifyHeader hdr=msg.getHeader(this.id); if(hdr == null) return up_prot.up(msg); switch(hdr.type) { case VerifyHeader.ARE_YOU_DEAD: if(hdr.from == null) { log.error(Util.getMessage("AREYOUDEADHdrFromIsNull")); return null; } Address target=use_mcast_rsps? null : hdr.from; for(int i=0; i < num_msgs; i++) { Message rsp=new Message(target).setFlag(Message.Flag.INTERNAL) .putHeader(this.id, new VerifyHeader(VerifyHeader.I_AM_NOT_DEAD, local_addr)); down_prot.down(rsp); } return null; case VerifyHeader.I_AM_NOT_DEAD: if(hdr.from == null) { log.error(Util.getMessage("IAMNOTDEADHdrFromIsNull")); return null; } unsuspect(hdr.from); return null; } return null; } /** * Removes all elements from suspects that are <em>not</em> in the new membership */ protected void adjustSuspectedMembers(List<Address> new_mbrship) { synchronized(suspects) { suspects.removeIf(entry -> !new_mbrship.contains(entry.suspect)); } } /** * Started when a suspected member is added to suspects. Iterates over the queue as long as there are suspects in * it and removes a suspect when the timeout for it has elapsed. Sends up a SUSPECT event for every removed suspect. * When a suspected member is un-suspected, the member is removed from the queue. */ public void run() { for(;;) { synchronized(suspects) { // atomically checks for the empty queue and sets running to false (JGRP-2287) if(suspects.isEmpty()) { running=false; return; } } try { Entry entry=suspects.poll(timeout,TimeUnit.MILLISECONDS); if(entry != null) { List<Entry> expired=new ArrayList<>(suspects.size()); suspects.drainTo(expired); // let's see if we can remove more elements which have also expired Collection<Address> suspect_list=new LinkedHashSet<>(); suspect_list.add(entry.suspect); expired.forEach(e -> suspect_list.add(e.suspect)); log.debug("%s %s dead (passing up SUSPECT event)", suspect_list, suspect_list.size() > 1? "are" : "is"); up_prot.up(new Event(Event.SUSPECT, suspect_list)); } } catch(InterruptedException e) { if(!running) break; } } } /* --------------------------------- Private Methods ----------------------------------- */ /** * Sends ARE_YOU_DEAD message to suspected_mbr, wait for return or timeout */ protected void verifySuspect(Collection<Address> mbrs) { if(mbrs == null || mbrs.isEmpty()) return; if(addSuspects(mbrs)) { startTimer(); // start timer before we send out are you dead messages log.trace("verifying that %s %s dead", mbrs, mbrs.size() == 1? "is" : "are"); } for(Address mbr: mbrs) { for(int i=0; i < num_msgs; i++) { Message msg=new Message(mbr).setFlag(Message.Flag.INTERNAL) .putHeader(this.id, new VerifyHeader(VerifyHeader.ARE_YOU_DEAD, local_addr)); down_prot.down(msg); } } } protected void verifySuspectWithICMP(Address suspected_mbr) { InetAddress host=suspected_mbr instanceof IpAddress? ((IpAddress)suspected_mbr).getIpAddress() : null; if(host == null) throw new IllegalArgumentException("suspected_mbr is not of type IpAddress - FD_ICMP only works with these"); try { if(log.isTraceEnabled()) log.trace("pinging host " + suspected_mbr + " using interface " + intf); long start=getCurrentTimeMillis(), stop; boolean rc=host.isReachable(intf, 0, (int)timeout); stop=getCurrentTimeMillis(); if(rc) // success log.trace("successfully received response from " + host + " (after " + (stop-start) + "ms)"); else { // failure log.debug("failed pinging " + suspected_mbr + " after " + (stop-start) + "ms; passing up SUSPECT event"); removeSuspect(suspected_mbr); up_prot.up(new Event(Event.SUSPECT, Collections.singletonList(suspected_mbr))); } } catch(Exception ex) { log.error(Util.getMessage("FailedPinging"),suspected_mbr, ex); } } /** * Adds suspected members to the suspect list. Returns true if a member is not present and the timer is not running. * @param list The list of suspected members * @return true if the timer needs to be started, or false otherwise */ protected boolean addSuspects(Collection<Address> list) { if(list == null || list.isEmpty()) return false; boolean added=false; synchronized(suspects) { for(Address suspected_mbr : list) { boolean found_dupe=suspects.stream().anyMatch(e -> e.suspect.equals(suspected_mbr)); if(!found_dupe) { suspects.add(new Entry(suspected_mbr, getCurrentTimeMillis() + timeout)); added=true; } } return (added && !running) && (running=true); } } protected boolean removeSuspect(Address suspect) { if(suspect == null) return false; synchronized(suspects) { return suspects.removeIf(e -> Objects.equals(e.suspect, suspect)); } } protected void clearSuspects() { synchronized(suspects) { suspects.clear(); } } public void unsuspect(Address mbr) { boolean removed=mbr != null && removeSuspect(mbr); if(removed) { log.trace("member " + mbr + " was unsuspected"); down_prot.down(new Event(Event.UNSUSPECT, mbr)); up_prot.up(new Event(Event.UNSUSPECT, mbr)); } } protected synchronized void startTimer() { timer=getThreadFactory().newThread(this,"VERIFY_SUSPECT.TimerThread"); timer.setDaemon(true); timer.start(); } public void init() throws Exception { super.init(); if(bind_addr != null) intf=NetworkInterface.getByInetAddress(bind_addr); } public synchronized void stop() { clearSuspects(); running=false; if(timer != null && timer.isAlive()) { Thread tmp=timer; timer=null; tmp.interrupt(); } timer=null; } private static long getCurrentTimeMillis() { return TimeUnit.NANOSECONDS.toMillis(System.nanoTime()); } /* ----------------------------- End of Private Methods -------------------------------- */ protected static class Entry implements Delayed { protected final Address suspect; protected final long target_time; public Entry(Address suspect, long target_time) { this.suspect=suspect; this.target_time=target_time; } public int compareTo(Delayed o) { Entry other=(Entry)o; long my_delay=getDelay(TimeUnit.MILLISECONDS), other_delay=other.getDelay(TimeUnit.MILLISECONDS); return Long.compare(my_delay, other_delay); } public long getDelay(TimeUnit unit) { long delay=target_time - getCurrentTimeMillis(); return unit.convert(delay, TimeUnit.MILLISECONDS); } public String toString() { return suspect + ": " + target_time; } } public static class VerifyHeader extends Header { static final short ARE_YOU_DEAD=1; // 'from' is sender of verify msg static final short I_AM_NOT_DEAD=2; // 'from' is suspected member short type=ARE_YOU_DEAD; Address from; // member who wants to verify that suspected_mbr is dead public VerifyHeader() { } // used for externalization VerifyHeader(short type) { this.type=type; } VerifyHeader(short type, Address from) { this(type); this.from=from; } public short getMagicId() {return 54;} public Supplier<? extends Header> create() {return VerifyHeader::new;} public String toString() { switch(type) { case ARE_YOU_DEAD: return "[VERIFY_SUSPECT: ARE_YOU_DEAD]"; case I_AM_NOT_DEAD: return "[VERIFY_SUSPECT: I_AM_NOT_DEAD]"; default: return "[VERIFY_SUSPECT: unknown type (" + type + ")]"; } } @Override public void writeTo(DataOutput out) throws IOException { out.writeShort(type); Util.writeAddress(from, out); } @Override public void readFrom(DataInput in) throws IOException, ClassNotFoundException { type=in.readShort(); from=Util.readAddress(in); } @Override public int serializedSize() { return Global.SHORT_SIZE + Util.size(from); } } }
/* * Copyright (C) 2011 Thomas Akehurst * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.tomakehurst.wiremock.stubbing; import com.github.tomakehurst.wiremock.common.LocalNotifier; import com.github.tomakehurst.wiremock.common.Notifier; import com.github.tomakehurst.wiremock.http.Request; import com.github.tomakehurst.wiremock.http.ResponseDefinition; import org.jmock.Mockery; import org.jmock.integration.junit4.JMock; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import static com.github.tomakehurst.wiremock.client.WireMock.urlEqualTo; import static com.github.tomakehurst.wiremock.http.RequestMethod.GET; import static com.github.tomakehurst.wiremock.http.RequestMethod.OPTIONS; import static com.github.tomakehurst.wiremock.http.RequestMethod.POST; import static com.github.tomakehurst.wiremock.http.RequestMethod.PUT; import static com.github.tomakehurst.wiremock.matching.RequestPatternBuilder.newRequestPattern; import static com.github.tomakehurst.wiremock.stubbing.Scenario.STARTED; import static com.github.tomakehurst.wiremock.testsupport.MockRequestBuilder.aRequest; import static java.net.HttpURLConnection.HTTP_NOT_FOUND; import static org.hamcrest.Matchers.is; import static org.hamcrest.MatcherAssert.assertThat; @RunWith(JMock.class) public class InMemoryMappingsTest { private InMemoryStubMappings mappings; private Mockery context; private Notifier notifier; @Before public void init() { mappings = new InMemoryStubMappings(); context = new Mockery(); notifier = context.mock(Notifier.class); } @After public void cleanUp() { LocalNotifier.set(null); } @Test public void correctlyAcceptsMappingAndReturnsCorrespondingResponse() { mappings.addMapping(new StubMapping( newRequestPattern(PUT, urlEqualTo("/some/resource")).build(), new ResponseDefinition(204, ""))); Request request = aRequest(context).withMethod(PUT).withUrl("/some/resource").build(); ResponseDefinition response = mappings.serveFor(request).getResponseDefinition(); assertThat(response.getStatus(), is(204)); } @Test public void returnsNotFoundWhenMethodIncorrect() { mappings.addMapping(new StubMapping( newRequestPattern(PUT, urlEqualTo("/some/resource")).build(), new ResponseDefinition(204, ""))); Request request = aRequest(context).withMethod(POST).withUrl("/some/resource").build(); ResponseDefinition response = mappings.serveFor(request).getResponseDefinition(); assertThat(response.getStatus(), is(HTTP_NOT_FOUND)); } @Test public void returnsNotFoundWhenUrlIncorrect() { mappings.addMapping(new StubMapping( newRequestPattern(PUT, urlEqualTo("/some/resource")).build(), new ResponseDefinition(204, ""))); Request request = aRequest(context).withMethod(PUT).withUrl("/some/bad/resource").build(); ResponseDefinition response = mappings.serveFor(request).getResponseDefinition(); assertThat(response.getStatus(), is(HTTP_NOT_FOUND)); } @Test public void returnsNotConfiguredResponseForUnmappedRequest() { Request request = aRequest(context).withMethod(OPTIONS).withUrl("/not/mapped").build(); ResponseDefinition response = mappings.serveFor(request).getResponseDefinition(); assertThat(response.getStatus(), is(HTTP_NOT_FOUND)); assertThat(response.wasConfigured(), is(false)); } @Test public void returnsMostRecentlyInsertedResponseIfTwoOrMoreMatch() { mappings.addMapping(new StubMapping( newRequestPattern(GET, urlEqualTo("/duplicated/resource")).build(), new ResponseDefinition(204, "Some content"))); mappings.addMapping(new StubMapping( newRequestPattern(GET, urlEqualTo("/duplicated/resource")).build(), new ResponseDefinition(201, "Desired content"))); ResponseDefinition response = mappings.serveFor(aRequest(context).withMethod(GET).withUrl("/duplicated/resource").build()).getResponseDefinition(); assertThat(response.getStatus(), is(201)); assertThat(response.getBody(), is("Desired content")); } @Test public void returnsMappingInScenarioOnlyWhenStateIsCorrect() { StubMapping firstGetMapping = new StubMapping( newRequestPattern(GET, urlEqualTo("/scenario/resource")).build(), new ResponseDefinition(204, "Initial content")); firstGetMapping.setScenarioName("TestScenario"); firstGetMapping.setRequiredScenarioState(STARTED); mappings.addMapping(firstGetMapping); StubMapping putMapping = new StubMapping( newRequestPattern(PUT, urlEqualTo("/scenario/resource")).build(), new ResponseDefinition(204, "")); putMapping.setScenarioName("TestScenario"); putMapping.setRequiredScenarioState(STARTED); putMapping.setNewScenarioState("Modified"); mappings.addMapping(putMapping); StubMapping secondGetMapping = new StubMapping( newRequestPattern(GET, urlEqualTo("/scenario/resource")).build(), new ResponseDefinition(204, "Modified content")); secondGetMapping.setScenarioName("TestScenario"); secondGetMapping.setRequiredScenarioState("Modified"); mappings.addMapping(secondGetMapping); Request firstGet = aRequest(context, "firstGet").withMethod(GET).withUrl("/scenario/resource").build(); Request put = aRequest(context, "put").withMethod(PUT).withUrl("/scenario/resource").build(); Request secondGet = aRequest(context, "secondGet").withMethod(GET).withUrl("/scenario/resource").build(); assertThat(mappings.serveFor(firstGet).getResponseDefinition().getBody(), is("Initial content")); mappings.serveFor(put); assertThat(mappings.serveFor(secondGet).getResponseDefinition().getBody(), is("Modified content")); } @Test public void returnsMappingInScenarioWithNoRequiredState() { StubMapping firstGetMapping = new StubMapping( newRequestPattern(GET, urlEqualTo("/scenario/resource")).build(), new ResponseDefinition(200, "Expected content")); firstGetMapping.setScenarioName("TestScenario"); mappings.addMapping(firstGetMapping); Request request = aRequest(context).withMethod(GET).withUrl("/scenario/resource").build(); assertThat(mappings.serveFor(request).getResponseDefinition().getBody(), is("Expected content")); } @Test public void supportsResetOfAllScenariosState() { StubMapping firstGetMapping = new StubMapping( newRequestPattern(GET, urlEqualTo("/scenario/resource")).build(), new ResponseDefinition(204, "Desired content")); firstGetMapping.setScenarioName("TestScenario"); firstGetMapping.setRequiredScenarioState(STARTED); mappings.addMapping(firstGetMapping); StubMapping putMapping = new StubMapping( newRequestPattern(PUT, urlEqualTo("/scenario/resource")).build(), new ResponseDefinition(204, "")); putMapping.setScenarioName("TestScenario"); putMapping.setRequiredScenarioState(STARTED); putMapping.setNewScenarioState("Modified"); mappings.addMapping(putMapping); mappings.serveFor( aRequest(context, "put /scenario/resource") .withMethod(PUT).withUrl("/scenario/resource").build()); ResponseDefinition response = mappings.serveFor( aRequest(context, "1st get /scenario/resource") .withMethod(GET).withUrl("/scenario/resource").build()).getResponseDefinition(); assertThat(response.wasConfigured(), is(false)); mappings.resetScenarios(); response = mappings.serveFor( aRequest(context, "2nd get /scenario/resource") .withMethod(GET).withUrl("/scenario/resource").build()).getResponseDefinition(); assertThat(response.getBody(), is("Desired content")); } @Test public void scenariosShouldBeResetWhenMappingsAreReset() { StubMapping firstMapping = aBasicMappingInScenario("Starting content"); firstMapping.setRequiredScenarioState(Scenario.STARTED); firstMapping.setNewScenarioState("modified"); mappings.addMapping(firstMapping); StubMapping secondMapping = aBasicMappingInScenario("Modified content"); secondMapping.setRequiredScenarioState("modified"); mappings.addMapping(secondMapping); Request request = aRequest(context).withMethod(POST).withUrl("/scenario/resource").build(); mappings.serveFor(request); assertThat(mappings.serveFor(request).getResponseDefinition().getBody(), is("Modified content")); mappings.reset(); StubMapping thirdMapping = aBasicMappingInScenario("Starting content"); thirdMapping.setRequiredScenarioState(Scenario.STARTED); mappings.addMapping(thirdMapping); assertThat(mappings.serveFor(request).getResponseDefinition().getBody(), is("Starting content")); } private StubMapping aBasicMappingInScenario(String body) { StubMapping mapping = new StubMapping( newRequestPattern(POST, urlEqualTo("/scenario/resource")).build(), new ResponseDefinition(200, body)); mapping.setScenarioName("TestScenario"); return mapping; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.gobblin.cluster; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.function.Function; import org.apache.helix.HelixDataAccessor; import org.apache.helix.HelixManager; import org.apache.helix.HelixProperty; import org.apache.helix.InstanceType; import org.apache.helix.NotificationContext; import org.apache.helix.api.listeners.ControllerChangeListener; import org.apache.helix.api.listeners.LiveInstanceChangeListener; import org.apache.helix.messaging.handling.HelixTaskResult; import org.apache.helix.messaging.handling.MessageHandler; import org.apache.helix.messaging.handling.MultiTypeMessageHandlerFactory; import org.apache.helix.model.LiveInstance; import org.apache.helix.model.Message; import org.apache.helix.task.TargetState; import org.apache.helix.task.TaskDriver; import org.apache.helix.task.WorkflowConfig; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import com.google.common.eventbus.EventBus; import com.google.common.util.concurrent.MoreExecutors; import com.typesafe.config.Config; import lombok.Getter; import lombok.extern.slf4j.Slf4j; import org.apache.gobblin.cluster.event.ClusterManagerShutdownRequest; import org.apache.gobblin.configuration.ConfigurationKeys; import org.apache.gobblin.instrumented.Instrumented; import org.apache.gobblin.instrumented.StandardMetricsBridge; import org.apache.gobblin.metrics.ContextAwareHistogram; import org.apache.gobblin.metrics.MetricContext; import org.apache.gobblin.util.ConfigUtils; /** * Encapsulate all Helix related components: controller, participants, etc. * Provide all kinds of callbacks, listeners, message handlers that each Helix components need to register. */ @Slf4j public class GobblinHelixMultiManager implements StandardMetricsBridge { /** * Helix manager to handle cluster manager leader election. * Corresponds to cluster with key name {@link GobblinClusterConfigurationKeys#MANAGER_CLUSTER_NAME_KEY} iff dedicatedManagerCluster is true. * Corresponds to cluster with key name {@link GobblinClusterConfigurationKeys#HELIX_CLUSTER_NAME_KEY} iff dedicatedManagerCluster is false. */ @Getter private HelixManager managerClusterHelixManager = null; /** * Helix manager to handle job distribution. * Corresponds to cluster with key name {@link GobblinClusterConfigurationKeys#HELIX_CLUSTER_NAME_KEY}. */ @Getter private HelixManager jobClusterHelixManager = null; /** * Helix manager to handle planning job distribution. * Corresponds to cluster with key name {@link GobblinClusterConfigurationKeys#HELIX_CLUSTER_NAME_KEY}. */ @Getter private Optional<HelixManager> taskDriverHelixManager = Optional.empty(); /** * Helix controller for job distribution. Effective only iff below two conditions are established: * 1. In {@link GobblinHelixMultiManager#dedicatedManagerCluster} mode. * 2. {@link GobblinHelixMultiManager#dedicatedJobClusterController} is turned on. * Typically used for unit test and local deployment. */ private Optional<HelixManager> jobClusterController = Optional.empty(); /** * Helix controller for planning job distribution. Effective only iff below two conditions are established: * 1. In {@link GobblinHelixMultiManager#dedicatedManagerCluster} mode. * 2. {@link GobblinHelixMultiManager#dedicatedTaskDriverCluster} is turned on. * Typically used for unit test and local deployment. */ private Optional<HelixManager> taskDriverClusterController = Optional.empty(); /** * Separate manager cluster and job distribution cluster iff this flag is turned on. Otherwise {@link GobblinHelixMultiManager#jobClusterHelixManager} * is same as {@link GobblinHelixMultiManager#managerClusterHelixManager}. */ private boolean dedicatedManagerCluster = false; private boolean dedicatedTaskDriverCluster = false; /** * Create a dedicated controller for job distribution. */ private boolean dedicatedJobClusterController = true; @Getter boolean isLeader = false; boolean isStandaloneMode = false; private final GobblinClusterManager.StopStatus stopStatus; private final Config config; private final EventBus eventBus; private final HelixManagerMetrics metrics; private final MultiTypeMessageHandlerFactory userDefinedMessageHandlerFactory; private final List<LeadershipChangeAwareComponent> leadershipChangeAwareComponents = Lists.newArrayList(); public GobblinHelixMultiManager( Config config, Function<Void, MultiTypeMessageHandlerFactory> messageHandlerFactoryFunction, EventBus eventBus, GobblinClusterManager.StopStatus stopStatus) { this.config = config; this.eventBus = eventBus; this.stopStatus = stopStatus; this.isStandaloneMode = ConfigUtils.getBoolean(config, GobblinClusterConfigurationKeys.STANDALONE_CLUSTER_MODE_KEY, GobblinClusterConfigurationKeys.DEFAULT_STANDALONE_CLUSTER_MODE); MetricContext metricContext = Instrumented.getMetricContext(ConfigUtils.configToState(config), this.getClass()); this.metrics = new HelixManagerMetrics(metricContext, this.config); this.dedicatedManagerCluster = ConfigUtils.getBoolean(config, GobblinClusterConfigurationKeys.DEDICATED_MANAGER_CLUSTER_ENABLED,false); this.dedicatedTaskDriverCluster = ConfigUtils.getBoolean(config, GobblinClusterConfigurationKeys.DEDICATED_TASK_DRIVER_CLUSTER_ENABLED, false); this.userDefinedMessageHandlerFactory = messageHandlerFactoryFunction.apply(null); initialize(); } protected void addLeadershipChangeAwareComponent (LeadershipChangeAwareComponent component) { this.leadershipChangeAwareComponents.add(component); } /** * Build the {@link HelixManager} for the Application Master. */ protected static HelixManager buildHelixManager(Config config, String clusterName, InstanceType type) { Preconditions.checkArgument(config.hasPath(GobblinClusterConfigurationKeys.ZK_CONNECTION_STRING_KEY)); String zkConnectionString = config.getString(GobblinClusterConfigurationKeys.ZK_CONNECTION_STRING_KEY); log.info("Using ZooKeeper connection string: " + zkConnectionString); String helixInstanceName = ConfigUtils.getString(config, GobblinClusterConfigurationKeys.HELIX_INSTANCE_NAME_KEY, GobblinClusterManager.class.getSimpleName()); return GobblinHelixManagerFactory.getZKHelixManager( config.getString(clusterName), helixInstanceName, type, zkConnectionString); } public void initialize() { if (this.dedicatedManagerCluster) { Preconditions.checkArgument(this.config.hasPath(GobblinClusterConfigurationKeys.MANAGER_CLUSTER_NAME_KEY)); log.info("We will use separate clusters to manage GobblinClusterManager and job distribution."); // This will create and register a Helix controller in ZooKeeper this.managerClusterHelixManager = buildHelixManager(this.config, GobblinClusterConfigurationKeys.MANAGER_CLUSTER_NAME_KEY, InstanceType.CONTROLLER); // This will create a Helix administrator to dispatch jobs to ZooKeeper this.jobClusterHelixManager = buildHelixManager(this.config, GobblinClusterConfigurationKeys.HELIX_CLUSTER_NAME_KEY, InstanceType.ADMINISTRATOR); // This will create a dedicated controller for job distribution this.dedicatedJobClusterController = ConfigUtils.getBoolean( this.config, GobblinClusterConfigurationKeys.DEDICATED_JOB_CLUSTER_CONTROLLER_ENABLED, true); if (this.dedicatedJobClusterController) { this.jobClusterController = Optional.of(GobblinHelixMultiManager .buildHelixManager(this.config, GobblinClusterConfigurationKeys.HELIX_CLUSTER_NAME_KEY, InstanceType.CONTROLLER)); } if (this.dedicatedTaskDriverCluster) { // This will create a Helix administrator to dispatch jobs to ZooKeeper this.taskDriverHelixManager = Optional.of(buildHelixManager(this.config, GobblinClusterConfigurationKeys.TASK_DRIVER_CLUSTER_NAME_KEY, InstanceType.ADMINISTRATOR)); /** * Create a dedicated controller for planning job distribution. */ boolean dedicatedTaskDriverClusterController = ConfigUtils .getBoolean(this.config, GobblinClusterConfigurationKeys.DEDICATED_TASK_DRIVER_CLUSTER_CONTROLLER_ENABLED, true); // This will create a dedicated controller for planning job distribution if (dedicatedTaskDriverClusterController) { this.taskDriverClusterController = Optional.of(GobblinHelixMultiManager .buildHelixManager(this.config, GobblinClusterConfigurationKeys.TASK_DRIVER_CLUSTER_NAME_KEY, InstanceType.CONTROLLER)); } } } else { log.info("We will use same cluster to manage GobblinClusterManager and job distribution."); // This will create and register a Helix controller in ZooKeeper boolean isHelixClusterManaged = ConfigUtils.getBoolean(this.config, GobblinClusterConfigurationKeys.IS_HELIX_CLUSTER_MANAGED, GobblinClusterConfigurationKeys.DEFAULT_IS_HELIX_CLUSTER_MANAGED); this.managerClusterHelixManager = buildHelixManager(this.config, GobblinClusterConfigurationKeys.HELIX_CLUSTER_NAME_KEY, isHelixClusterManaged ? InstanceType.PARTICIPANT : InstanceType.CONTROLLER); this.jobClusterHelixManager = this.managerClusterHelixManager; } } @VisibleForTesting protected void connect() { try { this.isLeader = false; this.managerClusterHelixManager.connect(); if (this.dedicatedManagerCluster) { if (jobClusterController.isPresent()) { this.jobClusterController.get().connect(); } if (this.dedicatedTaskDriverCluster) { if (taskDriverClusterController.isPresent()) { this.taskDriverClusterController.get().connect(); } } this.jobClusterHelixManager.connect(); if (this.taskDriverHelixManager.isPresent()) { this.taskDriverHelixManager.get().connect(); } } this.jobClusterHelixManager.addLiveInstanceChangeListener(new GobblinLiveInstanceChangeListener()); this.jobClusterHelixManager.getMessagingService() .registerMessageHandlerFactory(Message.MessageType.USER_DEFINE_MSG.toString(), userDefinedMessageHandlerFactory); this.jobClusterHelixManager.getMessagingService() .registerMessageHandlerFactory(GobblinHelixConstants.SHUTDOWN_MESSAGE_TYPE, new ControllerShutdownMessageHandlerFactory()); // standalone mode listens for controller change if (this.isStandaloneMode) { // Subscribe to leadership changes this.managerClusterHelixManager.addControllerListener((ControllerChangeListener) this::handleLeadershipChange); } } catch (Exception e) { log.error("HelixManager failed to connect", e); throw Throwables.propagate(e); } } protected boolean isConnected() { return managerClusterHelixManager.isConnected() && jobClusterHelixManager.isConnected(); } protected void disconnect() { if (managerClusterHelixManager.isConnected()) { this.managerClusterHelixManager.disconnect(); } if (this.dedicatedManagerCluster) { if (jobClusterHelixManager.isConnected()) { this.jobClusterHelixManager.disconnect(); } if (taskDriverHelixManager.isPresent()) { this.taskDriverHelixManager.get().disconnect(); } if (jobClusterController.isPresent() && jobClusterController.get().isConnected()) { this.jobClusterController.get().disconnect(); } if (taskDriverClusterController.isPresent() && taskDriverClusterController.get().isConnected()) { this.taskDriverClusterController.get().disconnect(); } } } /** * A custom implementation of {@link LiveInstanceChangeListener}. */ private static class GobblinLiveInstanceChangeListener implements LiveInstanceChangeListener { @Override public void onLiveInstanceChange(List<LiveInstance> liveInstances, NotificationContext changeContext) { if (log.isDebugEnabled()) { for (LiveInstance liveInstance : liveInstances) { log.debug("Live Helix participant instance: " + liveInstance.getInstanceName()); } } } } /** * Handle leadership change. * The applicationLauncher is only started on the leader. * The leader cleans up existing jobs before starting the applicationLauncher. * @param changeContext notification context */ @VisibleForTesting void handleLeadershipChange(NotificationContext changeContext) { this.metrics.clusterLeadershipChange.update(1); if (this.managerClusterHelixManager.isLeader()) { // can get multiple notifications on a leadership change, // so only start the application launcher the first time // the notification is received log.info("Leader notification for {} isLeader {} HM.isLeader {}", managerClusterHelixManager.getInstanceName(), isLeader, managerClusterHelixManager.isLeader()); if (!isLeader) { log.info("New Helix Controller leader {}", this.managerClusterHelixManager.getInstanceName()); cleanUpJobs(); for (LeadershipChangeAwareComponent c: this.leadershipChangeAwareComponents) { c.becomeActive(); } isLeader = true; } } else { // stop and reinitialize services since they are not restartable // this prepares them to start when this cluster manager becomes a leader if (isLeader) { isLeader = false; for (LeadershipChangeAwareComponent c: this.leadershipChangeAwareComponents) { c.becomeStandby(); } } } } /** * Delete jobs from the helix cluster */ @VisibleForTesting public void cleanUpJobs() { cleanUpJobs(this.jobClusterHelixManager); this.taskDriverHelixManager.ifPresent(this::cleanUpJobs); } private void cleanUpJobs(HelixManager helixManager) { // Clean up existing jobs TaskDriver taskDriver = new TaskDriver(helixManager); Map<String, WorkflowConfig> workflows = taskDriver.getWorkflows(); log.debug("cleanUpJobs workflow count {} workflows {}", workflows.size(), workflows.keySet()); boolean cleanupDistJobs = ConfigUtils.getBoolean(this.config, GobblinClusterConfigurationKeys.CLEAN_ALL_DIST_JOBS, GobblinClusterConfigurationKeys.DEFAULT_CLEAN_ALL_DIST_JOBS); for (Map.Entry<String, WorkflowConfig> entry : workflows.entrySet()) { String workflowName = entry.getKey(); if (workflowName.contains(GobblinClusterConfigurationKeys.PLANNING_JOB_NAME_PREFIX) || workflowName.contains(GobblinClusterConfigurationKeys.ACTUAL_JOB_NAME_PREFIX)) { if (!cleanupDistJobs) { log.info("Distributed job {} won't be deleted.", workflowName); continue; } } WorkflowConfig workflowConfig = entry.getValue(); // request delete if not already requested if (workflowConfig.getTargetState() != TargetState.DELETE) { taskDriver.delete(workflowName); log.info("Requested delete of workflowName {}", workflowName); } } } /** * A custom {@link MultiTypeMessageHandlerFactory} for {@link MessageHandler}s that handle messages of type * "SHUTDOWN" for shutting down the controller. */ private class ControllerShutdownMessageHandlerFactory implements MultiTypeMessageHandlerFactory { @Override public MessageHandler createHandler(Message message, NotificationContext context) { return new ControllerShutdownMessageHandler(message, context); } @Override public String getMessageType() { return GobblinHelixConstants.SHUTDOWN_MESSAGE_TYPE; } public List<String> getMessageTypes() { return Collections.singletonList(getMessageType()); } @Override public void reset() { } /** * A custom {@link MessageHandler} for handling messages of sub type * {@link HelixMessageSubTypes#APPLICATION_MASTER_SHUTDOWN}. */ private class ControllerShutdownMessageHandler extends MessageHandler { public ControllerShutdownMessageHandler(Message message, NotificationContext context) { super(message, context); } @Override public HelixTaskResult handleMessage() { String messageSubType = this._message.getMsgSubType(); Preconditions.checkArgument( messageSubType.equalsIgnoreCase(HelixMessageSubTypes.APPLICATION_MASTER_SHUTDOWN.toString()), String.format("Unknown %s message subtype: %s", GobblinHelixConstants.SHUTDOWN_MESSAGE_TYPE, messageSubType)); HelixTaskResult result = new HelixTaskResult(); if (stopStatus.isStopInProgress()) { result.setSuccess(true); return result; } log.info("Handling message " + HelixMessageSubTypes.APPLICATION_MASTER_SHUTDOWN.toString()); ScheduledExecutorService shutdownMessageHandlingCompletionWatcher = MoreExecutors.getExitingScheduledExecutorService(new ScheduledThreadPoolExecutor(1)); // Schedule the task for watching on the removal of the shutdown message, which indicates that // the message has been successfully processed and it's safe to disconnect the HelixManager. // This is a hacky way of watching for the completion of processing the shutdown message and // should be replaced by a fix to https://issues.apache.org/jira/browse/HELIX-611. shutdownMessageHandlingCompletionWatcher.scheduleAtFixedRate(new Runnable() { @Override public void run() { HelixManager helixManager = _notificationContext.getManager(); HelixDataAccessor helixDataAccessor = helixManager.getHelixDataAccessor(); HelixProperty helixProperty = helixDataAccessor .getProperty(_message.getKey(helixDataAccessor.keyBuilder(), helixManager.getInstanceName())); // The absence of the shutdown message indicates it has been removed if (helixProperty == null) { eventBus.post(new ClusterManagerShutdownRequest()); } } }, 0, 1, TimeUnit.SECONDS); result.setSuccess(true); return result; } @Override public void onError(Exception e, ErrorCode code, ErrorType type) { log.error( String.format("Failed to handle message with exception %s, error code %s, error type %s", e, code, type)); } } } /** * A custom {@link MultiTypeMessageHandlerFactory} for {@link ControllerUserDefinedMessageHandler}s that * handle messages of type {@link org.apache.helix.model.Message.MessageType#USER_DEFINE_MSG}. */ static class ControllerUserDefinedMessageHandlerFactory implements MultiTypeMessageHandlerFactory { @Override public MessageHandler createHandler(Message message, NotificationContext context) { return new ControllerUserDefinedMessageHandler(message, context); } @Override public String getMessageType() { return Message.MessageType.USER_DEFINE_MSG.toString(); } public List<String> getMessageTypes() { return Collections.singletonList(getMessageType()); } @Override public void reset() { } /** * A custom {@link MessageHandler} for handling user-defined messages to the controller. * * <p> * Currently does not handle any user-defined messages. If this class is passed a custom message, it will simply * print out a warning and return successfully. Sub-classes of {@link GobblinClusterManager} should override * {@link GobblinClusterManager#getUserDefinedMessageHandlerFactory()}. * </p> */ private static class ControllerUserDefinedMessageHandler extends MessageHandler { public ControllerUserDefinedMessageHandler(Message message, NotificationContext context) { super(message, context); } @Override public HelixTaskResult handleMessage() { log.warn(String .format("No handling setup for %s message of subtype: %s", Message.MessageType.USER_DEFINE_MSG.toString(), this._message.getMsgSubType())); HelixTaskResult helixTaskResult = new HelixTaskResult(); helixTaskResult.setSuccess(true); return helixTaskResult; } @Override public void onError(Exception e, ErrorCode code, ErrorType type) { log.error( String.format("Failed to handle message with exception %s, error code %s, error type %s", e, code, type)); } } } /** * Helix related metrics */ private static class HelixManagerMetrics extends StandardMetricsBridge.StandardMetrics { public static final String CLUSTER_LEADERSHIP_CHANGE = "clusterLeadershipChange"; private final ContextAwareHistogram clusterLeadershipChange; public HelixManagerMetrics(final MetricContext metricContext, final Config config) { int timeWindowSizeInMinutes = ConfigUtils.getInt(config, ConfigurationKeys.METRIC_TIMER_WINDOW_SIZE_IN_MINUTES, ConfigurationKeys.DEFAULT_METRIC_TIMER_WINDOW_SIZE_IN_MINUTES); this.clusterLeadershipChange = metricContext.contextAwareHistogram(CLUSTER_LEADERSHIP_CHANGE, timeWindowSizeInMinutes, TimeUnit.MINUTES); this.contextAwareMetrics.add(clusterLeadershipChange); } @Override public String getName() { return GobblinClusterManager.class.getName(); } } @Override public Collection<StandardMetrics> getStandardMetricsCollection() { return ImmutableList.of(this.metrics); } }
package com.ivan.utils.math.bits; public class BitMathTest { public static void main(final String[] args) { int i = 7; printBin(i); for (int k = 0; k < 10; k++) { i = BitMath.nextWithSameNumberOfOneBits(i); printBin(i); } System.out.println(); System.out.println(BitMath.nextPowerOfTwo((byte) 17)); System.out.println(BitMath.prevPowerOfTwo((byte) 17)); System.out.println(); printCmp(1, 2); printCmp(1, 3); printCmp(1, 7); printCmp(2, 1); printCmp(3, 1); printCmp(7, 1); System.out.println(); printDiff(0, -1); printDiff(1, 2); printDiff(1, 3); printDiff(1, 7); printDiff(2, 1); printDiff(3, 1); printDiff(7, 1); printDiff(-1, 0); System.out.println(); printParity(1); printParity(2); printParity(3); printParity(4); printParity(5); printParity(6); printParity(7); System.out.println(); printLeftMostZeroByte(0x00000000); printLeftMostZeroByte(0x00000125); printLeftMostZeroByte(0xff170000); printLeftMostZeroByte(0x9f00116f); printLeftMostZeroByte(0x80140013); printLeftMostZeroByte(0x1acbde00); printLeftMostZeroByte(0x00794f8c); printLeftMostZeroByte(0xdeadbeef); System.out.println(); printRightMostZeroByte(0x00000000); printRightMostZeroByte(0x00000125); printRightMostZeroByte(0xff170000); printRightMostZeroByte(0x9f00116f); printRightMostZeroByte(0x80140013); printRightMostZeroByte(0x1acbde00); printRightMostZeroByte(0x00794f8c); printRightMostZeroByte(0xdeadbeef); System.out.println(); System.out.println(BitMath.nextWithSameNumberOfOneBits((byte) 0x80)); System.out.println(); System.out.println(BitMath.numberOfLeadingZeros((short) 0x3000)); System.out.println(); System.out.println(BitMath.bitCount((byte) 0x8f)); System.out.println(); System.out.println(BitMath.signum((byte) 0x00)); System.out.println(); System.out.println(hex(BitMath.reverseBytes((short) 0x8040))); System.out.println(); System.out.println(bin(0x8040) + " -> " + bin(BitMath.reverse((short) 0x8040) & 0xffff)); System.out.println(bin(0x75) + " -> " + bin(BitMath.reverse((byte) 0x75) & 0xff)); System.out.println(); System.out.println(bin(0x003f) + " -> " + bin(BitMath.rotateRight((short) 0x003f, 5) & 0xffff)); System.out.println(bin(0x3f) + " -> " + bin(BitMath.rotateRight((byte) 0x3f, 5) & 0xff)); System.out.println(); System.out.println(bin(0xfc00) + " -> " + bin(BitMath.rotateLeft((short) 0xfc00, 5) & 0xffff)); System.out.println(bin(0xfc) + " -> " + bin(BitMath.rotateLeft((byte) 0xfc, 5) & 0xff)); System.out.println(); System.out.println(BitMath.parity(1L)); System.out.println(BitMath.parity(0xffffffffffffffefL)); System.out.println(BitMath.parity((short) 0xfffeL)); System.out.println(BitMath.parity((short) 0xffffL)); System.out.println(BitMath.parity((byte) 0xfeL)); System.out.println(BitMath.parity((byte) 0xffL)); System.out.println(); System.out.println(BitMath.compareBitCount((byte) 1, (byte) 2)); System.out.println(BitMath.compareBitCount((byte) 1, (byte) 3)); System.out.println(BitMath.compareBitCount((byte) 1, (byte) 7)); System.out.println(BitMath.compareBitCount((byte) 2, (byte) 1)); System.out.println(BitMath.compareBitCount((byte) 3, (byte) 1)); System.out.println(BitMath.compareBitCount((byte) 7, (byte) 1)); System.out.println(); System.out.println(BitMath.compareBitCount((short) 1, (short) 2)); System.out.println(BitMath.compareBitCount((short) 1, (short) 3)); System.out.println(BitMath.compareBitCount((short) 1, (short) 7)); System.out.println(BitMath.compareBitCount((short) 2, (short) 1)); System.out.println(BitMath.compareBitCount((short) 3, (short) 1)); System.out.println(BitMath.compareBitCount((short) 7, (short) 1)); System.out.println(); System.out.println(BitMath.bitCountDiff(1L, 2L)); System.out.println(BitMath.bitCountDiff(1L, 3L)); System.out.println(BitMath.bitCountDiff(1L, 7L)); System.out.println(BitMath.bitCountDiff(1L, 0xFFFFFFFFFFFFFFFFL)); System.out.println(BitMath.bitCountDiff(0L, 0xFFFFFFFFFFFFFFFFL)); System.out.println(BitMath.bitCountDiff(2L, 1L)); System.out.println(BitMath.bitCountDiff(3L, 1L)); System.out.println(BitMath.bitCountDiff(7L, 1L)); System.out.println(BitMath.bitCountDiff(0xFFFFFFFFFFFFFFFFL, 1L)); System.out.println(BitMath.bitCountDiff(0xFFFFFFFFFFFFFFFFL, 0L)); System.out.println(); System.out.println(BitMath.bitCountDiff((short) 1, (short) 2)); System.out.println(BitMath.bitCountDiff((short) 1, (short) 3)); System.out.println(BitMath.bitCountDiff((short) 1, (short) 7)); System.out.println(BitMath.bitCountDiff((short) 1, (short) 0xFFFF)); System.out.println(BitMath.bitCountDiff((short) 0, (short) 0xFFFF)); System.out.println(BitMath.bitCountDiff((short) 2, (short) 1)); System.out.println(BitMath.bitCountDiff((short) 3, (short) 1)); System.out.println(BitMath.bitCountDiff((short) 7, (short) 1)); System.out.println(BitMath.bitCountDiff((short) 0xFFFF, (short) 1)); System.out.println(BitMath.bitCountDiff((short) 0xFFFF, (short) 0)); System.out.println(); System.out.println(BitMath.bitCountDiff((byte) 1, (byte) 2)); System.out.println(BitMath.bitCountDiff((byte) 1, (byte) 3)); System.out.println(BitMath.bitCountDiff((byte) 1, (byte) 7)); System.out.println(BitMath.bitCountDiff((byte) 1, (byte) 0xFF)); System.out.println(BitMath.bitCountDiff((byte) 0, (byte) 0xFF)); System.out.println(BitMath.bitCountDiff((byte) 2, (byte) 1)); System.out.println(BitMath.bitCountDiff((byte) 3, (byte) 1)); System.out.println(BitMath.bitCountDiff((byte) 7, (byte) 1)); System.out.println(BitMath.bitCountDiff((byte) 0xFF, (byte) 1)); System.out.println(BitMath.bitCountDiff((byte) 0xFF, (byte) 0)); System.out.println(); System.out.println(BitMath.rightMostZeroByte(0x0000000000000000L)); System.out.println(BitMath.rightMostZeroByte(0x00000000000000ffL)); System.out.println(BitMath.rightMostZeroByte(0x000000000000ffffL)); System.out.println(BitMath.rightMostZeroByte(0x0000000000ffffffL)); System.out.println(BitMath.rightMostZeroByte(0x00000000ffffffffL)); System.out.println(BitMath.rightMostZeroByte(0x000000ffffffffffL)); System.out.println(BitMath.rightMostZeroByte(0x0000ffffffffffffL)); System.out.println(BitMath.rightMostZeroByte(0x00ffffffffffffffL)); System.out.println(BitMath.rightMostZeroByte(0xffffffffffffffffL)); System.out.println(BitMath.rightMostZeroByte(0x00000000000000ffL)); System.out.println(BitMath.rightMostZeroByte(0x000000000000ff00L)); System.out.println(BitMath.rightMostZeroByte(0x0000000000ff00ffL)); System.out.println(BitMath.rightMostZeroByte(0x00000000ff00ffffL)); System.out.println(BitMath.rightMostZeroByte(0x000000ff00ffffffL)); System.out.println(BitMath.rightMostZeroByte(0x0000ff00ffffffffL)); System.out.println(BitMath.rightMostZeroByte(0x00ff00ffffffffffL)); System.out.println(BitMath.rightMostZeroByte(0xff00ffffffffffffL)); System.out.println(); System.out.println(BitMath.rightMostZeroByte((short) 0x0000)); System.out.println(BitMath.rightMostZeroByte((short) 0xff00)); System.out.println(BitMath.rightMostZeroByte((short) 0x00ff)); System.out.println(BitMath.rightMostZeroByte((short) 0xffff)); System.out.println(); System.out.println(BitMath.leftMostZeroByte(0x0000000000000000L)); System.out.println(BitMath.leftMostZeroByte(0xff00000000000000L)); System.out.println(BitMath.leftMostZeroByte(0xffff000000000000L)); System.out.println(BitMath.leftMostZeroByte(0xffffff0000000000L)); System.out.println(BitMath.leftMostZeroByte(0xffffffff00000000L)); System.out.println(BitMath.leftMostZeroByte(0xffffffffff000000L)); System.out.println(BitMath.leftMostZeroByte(0xffffffffffff0000L)); System.out.println(BitMath.leftMostZeroByte(0xffffffffffffff00L)); System.out.println(BitMath.leftMostZeroByte(0xffffffffffffffffL)); System.out.println(BitMath.leftMostZeroByte(0xff00000000000000L)); System.out.println(BitMath.leftMostZeroByte(0x00ff000000000000L)); System.out.println(BitMath.leftMostZeroByte(0xff00ff0000000000L)); System.out.println(BitMath.leftMostZeroByte(0xffff00ff00000000L)); System.out.println(BitMath.leftMostZeroByte(0xffffff00ff000000L)); System.out.println(BitMath.leftMostZeroByte(0xffffffff00ff0000L)); System.out.println(BitMath.leftMostZeroByte(0xffffffffff00ff00L)); System.out.println(BitMath.leftMostZeroByte(0xffffffffffff00ffL)); System.out.println(); System.out.println(BitMath.leftMostZeroByte((short) 0x0000)); System.out.println(BitMath.leftMostZeroByte((short) 0x00ff)); System.out.println(BitMath.leftMostZeroByte((short) 0xff00)); System.out.println(BitMath.leftMostZeroByte((short) 0xffff)); System.out.println(); System.out.println(bin(25) + " -> " + bin(BitMath.setBit(25, 9))); System.out.println(bin(25) + " -> " + bin(BitMath.setBit(25, 39))); System.out.println(bin(25) + " -> " + bin(BitMath.clearBit(25, 3))); System.out.println(bin(25) + " -> " + bin(BitMath.clearBit(25, 32))); System.out.println(bin(25) + " -> " + bin(BitMath.toggleBit(25, 3))); System.out.println(bin(25) + " -> " + bin(BitMath.toggleBit(25, 5))); System.out.println(bin(25) + " -> " + bin(BitMath.toggleBit(25, 32))); System.out.println(bin(25) + " -> " + bin(BitMath.toggleBit(25, 33))); } private static String bin(final int i) { return Integer.toBinaryString(i); } private static String hex(final int i) { final String zeros = "00000000"; final String hex = Integer.toHexString(i); return zeros.substring(hex.length()) + hex; } private static void printBin(final int i) { System.out.println(bin(i) + " - " + BitMath.bitCount(i) + " bits"); } private static void printCmp(final int x, final int y) { final int cmp = BitMath.compareBitCount(x, y); char cmpChar; cmpChar = cmp < 0 ? '<' : cmp > 0 ? '>' : '='; System.out.println("pop(" + bin(x) + ") " + cmpChar + " pop(" + bin(y) + ")"); } private static void printDiff(final int x, final int y) { final int diff = BitMath.bitCountDiff(x, y); System.out.println("pop(" + bin(x) + ") - pop(" + bin(y) + ") = " + diff); } private static void printParity(final int x) { final int parity = BitMath.parity(x); System.out.println("parity(" + bin(x) + ") = " + parity); } private static void printLeftMostZeroByte(final int x) { final int lmzb = BitMath.leftMostZeroByte(x); System.out.println("lmzb(" + hex(x) + ") = " + lmzb); } private static void printRightMostZeroByte(final int x) { final int rmzb = BitMath.rightMostZeroByte(x); System.out.println("rmzb(" + hex(x) + ") = " + rmzb); } }
package ca.uhn.fhir.rest.server; import static ca.uhn.fhir.util.UrlUtil.escape; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.startsWith; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.TimeUnit; import org.apache.commons.io.IOUtils; import org.apache.http.HttpEntity; import org.apache.http.HttpResponse; import org.apache.http.NameValuePair; import org.apache.http.client.entity.UrlEncodedFormEntity; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.entity.ByteArrayEntity; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClientBuilder; import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; import org.apache.http.message.BasicNameValuePair; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.servlet.ServletHandler; import org.eclipse.jetty.servlet.ServletHolder; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import com.google.common.net.UrlEscapers; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.model.api.Bundle; import ca.uhn.fhir.model.api.IResource; import ca.uhn.fhir.model.api.Include; import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum; import ca.uhn.fhir.model.base.composite.BaseCodingDt; import ca.uhn.fhir.model.dstu.composite.CodingDt; import ca.uhn.fhir.model.dstu.resource.BaseResource; import ca.uhn.fhir.model.dstu.resource.Observation; import ca.uhn.fhir.model.dstu.resource.Patient; import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.narrative.DefaultThymeleafNarrativeGenerator; import ca.uhn.fhir.rest.annotation.Create; import ca.uhn.fhir.rest.annotation.IdParam; import ca.uhn.fhir.rest.annotation.IncludeParam; import ca.uhn.fhir.rest.annotation.OptionalParam; import ca.uhn.fhir.rest.annotation.RequiredParam; import ca.uhn.fhir.rest.annotation.ResourceParam; import ca.uhn.fhir.rest.annotation.Search; import ca.uhn.fhir.rest.api.MethodOutcome; import ca.uhn.fhir.rest.client.IGenericClient; import ca.uhn.fhir.rest.param.ReferenceParam; import ca.uhn.fhir.rest.param.StringAndListParam; import ca.uhn.fhir.rest.param.StringOrListParam; import ca.uhn.fhir.rest.param.StringParam; import ca.uhn.fhir.rest.param.TokenOrListParam; import ca.uhn.fhir.rest.param.TokenParam; import ca.uhn.fhir.util.PortUtil; public class SearchSearchServerDstu1Test { private static CloseableHttpClient ourClient; private static FhirContext ourCtx = FhirContext.forDstu1(); private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(SearchSearchServerDstu1Test.class); private static int ourPort; private static Server ourServer; private static RestfulServer ourServlet; private static IServerAddressStrategy ourDefaultAddressStrategy; private static Set<Include> ourLastIncludes; private static StringAndListParam ourLastAndList; @Before public void before() { ourServlet.setServerAddressStrategy(ourDefaultAddressStrategy); ourLastIncludes = null; ourLastAndList = null; } @Test public void testParseEscapedValues() throws Exception { StringBuilder b = new StringBuilder(); b.append("http://localhost:"); b.append(ourPort); b.append("/Patient?"); b.append(escape("findPatientWithAndList")).append('=').append(escape("NE\\,NE,NE\\,NE")).append('&'); b.append(escape("findPatientWithAndList")).append('=').append(escape("NE\\\\NE")).append('&'); b.append(escape("findPatientWithAndList:exact")).append('=').append(escape("E\\$E")).append('&'); b.append(escape("findPatientWithAndList:exact")).append('=').append(escape("E\\|E")).append('&'); HttpGet httpGet = new HttpGet(b.toString()); HttpResponse status = ourClient.execute(httpGet); String responseContent = IOUtils.toString(status.getEntity().getContent()); IOUtils.closeQuietly(status.getEntity().getContent()); ourLog.info(responseContent); assertEquals(200, status.getStatusLine().getStatusCode()); assertEquals(4, ourLastAndList.getValuesAsQueryTokens().size()); assertEquals(2, ourLastAndList.getValuesAsQueryTokens().get(0).getValuesAsQueryTokens().size()); assertFalse(ourLastAndList.getValuesAsQueryTokens().get(0).getValuesAsQueryTokens().get(0).isExact()); assertEquals("NE,NE", ourLastAndList.getValuesAsQueryTokens().get(0).getValuesAsQueryTokens().get(0).getValue()); assertEquals("NE,NE", ourLastAndList.getValuesAsQueryTokens().get(0).getValuesAsQueryTokens().get(1).getValue()); assertEquals("NE\\NE", ourLastAndList.getValuesAsQueryTokens().get(1).getValuesAsQueryTokens().get(0).getValue()); assertTrue(ourLastAndList.getValuesAsQueryTokens().get(2).getValuesAsQueryTokens().get(0).isExact()); assertEquals("E$E", ourLastAndList.getValuesAsQueryTokens().get(2).getValuesAsQueryTokens().get(0).getValue()); assertEquals("E|E", ourLastAndList.getValuesAsQueryTokens().get(3).getValuesAsQueryTokens().get(0).getValue()); } @Test public void testEncodeConvertsReferencesToRelative() throws Exception { HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient?_query=searchWithRef"); HttpResponse status = ourClient.execute(httpGet); String responseContent = IOUtils.toString(status.getEntity().getContent()); IOUtils.closeQuietly(status.getEntity().getContent()); ourLog.info(responseContent); assertEquals(200, status.getStatusLine().getStatusCode()); Patient patient = (Patient) ourCtx.newXmlParser().parseBundle(responseContent).getEntries().get(0).getResource(); String ref = patient.getManagingOrganization().getReference().getValue(); assertEquals("Organization/555", ref); } @Test public void testOmitEmptyOptionalParam() throws Exception { HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient?_id="); HttpResponse status = ourClient.execute(httpGet); String responseContent = IOUtils.toString(status.getEntity().getContent()); IOUtils.closeQuietly(status.getEntity().getContent()); assertEquals(200, status.getStatusLine().getStatusCode()); Bundle bundle = ourCtx.newXmlParser().parseBundle(responseContent); assertEquals(1, bundle.getEntries().size()); Patient p = bundle.getResources(Patient.class).get(0); assertEquals(null, p.getNameFirstRep().getFamilyFirstRep().getValue()); } @Test public void testReturnLinks() throws Exception { HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient?_query=findWithLinks"); CloseableHttpResponse status = ourClient.execute(httpGet); String responseContent = IOUtils.toString(status.getEntity().getContent()); IOUtils.closeQuietly(status.getEntity().getContent()); assertEquals(200, status.getStatusLine().getStatusCode()); Bundle bundle = ourCtx.newXmlParser().parseBundle(responseContent); assertEquals(10, bundle.getEntries().size()); Patient p = bundle.getResources(Patient.class).get(0); assertEquals("AAANamed", p.getIdentifierFirstRep().getValue().getValue()); assertEquals("http://foo/Patient?_id=1", bundle.getEntries().get(0).getLinkSearch().getValue()); assertEquals("http://localhost:" + ourPort + "/Patient/99881", bundle.getEntries().get(0).getLinkAlternate().getValue()); assertEquals("http://foo/Patient?_id=1", ResourceMetadataKeyEnum.LINK_SEARCH.get(p)); assertEquals("http://localhost:" + ourPort + "/Patient/99881", ResourceMetadataKeyEnum.LINK_ALTERNATE.get(p)); } @Test public void testSearchIncludesParametersNone() throws Exception { HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient?_query=searchIncludes"); CloseableHttpResponse status = ourClient.execute(httpGet); IOUtils.toString(status.getEntity().getContent()); IOUtils.closeQuietly(status.getEntity().getContent()); assertEquals(200, status.getStatusLine().getStatusCode()); assertThat(ourLastIncludes, empty()); } @Test public void testSearchIncludesParametersIncludes() throws Exception { HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient?_query=searchIncludes&_include=foo&_include:recurse=bar"); CloseableHttpResponse status = ourClient.execute(httpGet); IOUtils.toString(status.getEntity().getContent()); IOUtils.closeQuietly(status.getEntity().getContent()); assertEquals(200, status.getStatusLine().getStatusCode()); assertEquals(2, ourLastIncludes.size()); assertThat(ourLastIncludes, containsInAnyOrder(new Include("foo", false), new Include("bar", true))); } @Test public void testSearchIncludesParametersIncludesList() throws Exception { HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient?_query=searchIncludesList&_include=foo&_include:recurse=bar"); CloseableHttpResponse status = ourClient.execute(httpGet); IOUtils.toString(status.getEntity().getContent()); IOUtils.closeQuietly(status.getEntity().getContent()); assertEquals(200, status.getStatusLine().getStatusCode()); assertEquals(2, ourLastIncludes.size()); assertThat(ourLastIncludes, containsInAnyOrder(new Include("foo", false), new Include("bar", true))); } /** * #149 */ @Test public void testReturnLinksWithAddressStrategy() throws Exception { ourServlet.setServerAddressStrategy(new HardcodedServerAddressStrategy("https://blah.com/base")); HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient?_query=findWithLinks"); CloseableHttpResponse status = ourClient.execute(httpGet); String responseContent = IOUtils.toString(status.getEntity().getContent()); IOUtils.closeQuietly(status.getEntity().getContent()); assertEquals(200, status.getStatusLine().getStatusCode()); Bundle bundle = ourCtx.newXmlParser().parseBundle(responseContent); ourLog.info(responseContent); assertEquals(10, bundle.getEntries().size()); assertEquals("https://blah.com/base", bundle.getLinkBase().getValue()); assertEquals("https://blah.com/base/Patient?_query=findWithLinks", bundle.getLinkSelf().getValue()); Patient p = bundle.getResources(Patient.class).get(0); assertEquals("AAANamed", p.getIdentifierFirstRep().getValue().getValue()); assertEquals("http://foo/Patient?_id=1", bundle.getEntries().get(0).getLinkSearch().getValue()); assertEquals("https://blah.com/base/Patient/99881", bundle.getEntries().get(0).getLinkAlternate().getValue()); assertEquals("http://foo/Patient?_id=1", ResourceMetadataKeyEnum.LINK_SEARCH.get(p)); assertEquals("https://blah.com/base/Patient/99881", ResourceMetadataKeyEnum.LINK_ALTERNATE.get(p)); String linkNext = bundle.getLinkNext().getValue(); ourLog.info(linkNext); assertThat(linkNext, startsWith("https://blah.com/base?_getpages=")); /* * Load the second page */ String urlPart = linkNext.substring(linkNext.indexOf('?')); String link = "http://localhost:" + ourPort + urlPart; httpGet = new HttpGet(link); status = ourClient.execute(httpGet); responseContent = IOUtils.toString(status.getEntity().getContent()); IOUtils.closeQuietly(status.getEntity().getContent()); assertEquals(200, status.getStatusLine().getStatusCode()); bundle = ourCtx.newXmlParser().parseBundle(responseContent); ourLog.info(responseContent); assertEquals(10, bundle.getEntries().size()); assertEquals("https://blah.com/base", bundle.getLinkBase().getValue()); assertEquals(linkNext, bundle.getLinkSelf().getValue()); p = bundle.getResources(Patient.class).get(0); assertEquals("AAANamed", p.getIdentifierFirstRep().getValue().getValue()); assertEquals("http://foo/Patient?_id=11", bundle.getEntries().get(0).getLinkSearch().getValue()); assertEquals("https://blah.com/base/Patient/998811", bundle.getEntries().get(0).getLinkAlternate().getValue()); assertEquals("http://foo/Patient?_id=11", ResourceMetadataKeyEnum.LINK_SEARCH.get(p)); assertEquals("https://blah.com/base/Patient/998811", ResourceMetadataKeyEnum.LINK_ALTERNATE.get(p)); } /** * Try loading the page as a POST just to make sure we get the right error */ @Test public void testGetPagesWithPost() throws Exception { HttpPost httpPost = new HttpPost("http://localhost:" + ourPort); List<? extends NameValuePair> parameters = Collections.singletonList(new BasicNameValuePair("_getpages", "AAA")); httpPost.setEntity(new UrlEncodedFormEntity(parameters)); CloseableHttpResponse status = ourClient.execute(httpPost); String responseContent = IOUtils.toString(status.getEntity().getContent()); IOUtils.closeQuietly(status.getEntity().getContent()); ourLog.info(responseContent); assertEquals(400, status.getStatusLine().getStatusCode()); // assertThat(responseContent, containsString("Requests for _getpages must use HTTP GET")); } @Test public void testSearchById() throws Exception { HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient?_id=aaa"); HttpResponse status = ourClient.execute(httpGet); String responseContent = IOUtils.toString(status.getEntity().getContent()); IOUtils.closeQuietly(status.getEntity().getContent()); assertEquals(200, status.getStatusLine().getStatusCode()); Bundle bundle = ourCtx.newXmlParser().parseBundle(responseContent); assertEquals(1, bundle.getEntries().size()); Patient p = bundle.getResources(Patient.class).get(0); assertEquals("idaaa", p.getNameFirstRep().getFamilyAsSingleString()); } @Test public void testSearchByIdUsingClient() throws Exception { IGenericClient client = ourCtx.newRestfulGenericClient("http://localhost:" + ourPort); Bundle bundle = client.search().forResource("Patient").where(BaseResource.RES_ID.matches().value("aaa")).execute(); assertEquals(1, bundle.getEntries().size()); Patient p = bundle.getResources(Patient.class).get(0); assertEquals("idaaa", p.getNameFirstRep().getFamilyAsSingleString()); } @Test public void testSearchWithOrList() throws Exception { HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient?findPatientWithOrList=aaa,bbb"); HttpResponse status = ourClient.execute(httpGet); String responseContent = IOUtils.toString(status.getEntity().getContent()); IOUtils.closeQuietly(status.getEntity().getContent()); assertEquals(200, status.getStatusLine().getStatusCode()); Bundle bundle = ourCtx.newXmlParser().parseBundle(responseContent); assertEquals(1, bundle.getEntries().size()); Patient p = bundle.getResources(Patient.class).get(0); assertEquals("aaa", p.getIdentifier().get(0).getValue().getValue()); assertEquals("bbb", p.getIdentifier().get(1).getValue().getValue()); } @Test public void testSearchWithTokenParameter() throws Exception { String token = UrlEscapers.urlFragmentEscaper().asFunction().apply("http://www.dmix.gov/vista/2957|301"); HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient?tokenParam=" + token); HttpResponse status = ourClient.execute(httpGet); String responseContent = IOUtils.toString(status.getEntity().getContent()); IOUtils.closeQuietly(status.getEntity().getContent()); assertEquals(200, status.getStatusLine().getStatusCode()); Bundle bundle = ourCtx.newXmlParser().parseBundle(responseContent); assertEquals(1, bundle.getEntries().size()); Patient p = bundle.getResources(Patient.class).get(0); assertEquals("http://www.dmix.gov/vista/2957", p.getNameFirstRep().getFamilyAsSingleString()); assertEquals("301", p.getNameFirstRep().getGivenAsSingleString()); } @Test public void testSearchByPost() throws Exception { HttpPost filePost = new HttpPost("http://localhost:" + ourPort + "/Patient/_search"); // add parameters to the post method List<NameValuePair> parameters = new ArrayList<NameValuePair>(); parameters.add(new BasicNameValuePair("_id", "aaa")); UrlEncodedFormEntity sendentity = new UrlEncodedFormEntity(parameters, "UTF-8"); filePost.setEntity(sendentity); HttpResponse status = ourClient.execute(filePost); String responseContent = IOUtils.toString(status.getEntity().getContent()); IOUtils.closeQuietly(status.getEntity().getContent()); assertEquals(200, status.getStatusLine().getStatusCode()); Bundle bundle = ourCtx.newXmlParser().parseBundle(responseContent); assertEquals(1, bundle.getEntries().size()); Patient p = bundle.getResources(Patient.class).get(0); assertEquals("idaaa", p.getNameFirstRep().getFamilyAsSingleString()); } /** * See #164 */ @Test public void testSearchByPostWithParamsInBodyAndUrl() throws Exception { HttpPost filePost = new HttpPost("http://localhost:" + ourPort + "/Patient/_search?name=Central"); // add parameters to the post method List<NameValuePair> parameters = new ArrayList<NameValuePair>(); parameters.add(new BasicNameValuePair("_id", "aaa")); UrlEncodedFormEntity sendentity = new UrlEncodedFormEntity(parameters, "UTF-8"); filePost.setEntity(sendentity); HttpResponse status = ourClient.execute(filePost); String responseContent = IOUtils.toString(status.getEntity().getContent()); IOUtils.closeQuietly(status.getEntity().getContent()); ourLog.info(responseContent); assertEquals(200, status.getStatusLine().getStatusCode()); Bundle bundle = ourCtx.newXmlParser().parseBundle(responseContent); assertEquals(1, bundle.getEntries().size()); Patient p = bundle.getResources(Patient.class).get(0); assertEquals("idaaa", p.getName().get(0).getFamilyAsSingleString()); assertEquals("nameCentral", p.getName().get(1).getFamilyAsSingleString()); } /** * See #164 */ @Test public void testSearchByPostWithInvalidPostUrl() throws Exception { HttpPost filePost = new HttpPost("http://localhost:" + ourPort + "/Patient?name=Central"); // should end with // _search // add parameters to the post method List<NameValuePair> parameters = new ArrayList<NameValuePair>(); parameters.add(new BasicNameValuePair("_id", "aaa")); UrlEncodedFormEntity sendentity = new UrlEncodedFormEntity(parameters, "UTF-8"); filePost.setEntity(sendentity); HttpResponse status = ourClient.execute(filePost); String responseContent = IOUtils.toString(status.getEntity().getContent()); IOUtils.closeQuietly(status.getEntity().getContent()); ourLog.info(responseContent); assertEquals(400, status.getStatusLine().getStatusCode()); assertThat(responseContent, containsString("<details value=\"Incorrect Content-Type header value of &quot;application/x-www-form-urlencoded; charset=UTF-8&quot; was provided in the request. A FHIR Content-Type is required for &quot;CREATE&quot; operation\"/>")); } /** * See #164 */ @Test public void testSearchByPostWithMissingContentType() throws Exception { HttpPost filePost = new HttpPost("http://localhost:" + ourPort + "/Patient?name=Central"); // should end with // _search HttpEntity sendentity = new ByteArrayEntity(new byte[] { 1, 2, 3, 4 }); filePost.setEntity(sendentity); HttpResponse status = ourClient.execute(filePost); String responseContent = IOUtils.toString(status.getEntity().getContent()); IOUtils.closeQuietly(status.getEntity().getContent()); ourLog.info(responseContent); assertEquals(400, status.getStatusLine().getStatusCode()); assertThat(responseContent, containsString("<details value=\"No Content-Type header was provided in the request. This is required for &quot;CREATE&quot; operation\"/>")); } @Test public void testSearchCompartment() throws Exception { HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient/123/fooCompartment"); HttpResponse status = ourClient.execute(httpGet); String responseContent = IOUtils.toString(status.getEntity().getContent()); ourLog.info(responseContent); IOUtils.closeQuietly(status.getEntity().getContent()); assertEquals(200, status.getStatusLine().getStatusCode()); Bundle bundle = ourCtx.newXmlParser().parseBundle(responseContent); assertEquals(1, bundle.getEntries().size()); Patient p = bundle.getResources(Patient.class).get(0); assertEquals("fooCompartment", p.getIdentifierFirstRep().getValue().getValue()); assertThat(bundle.getEntries().get(0).getResource().getId().getValue(), containsString("Patient/123")); } @Test public void testSearchGetWithUnderscoreSearch() throws Exception { HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Observation/_search?subject%3APatient=100&name=3141-9%2C8302-2%2C8287-5%2C39156-5"); HttpResponse status = ourClient.execute(httpGet); String responseContent = IOUtils.toString(status.getEntity().getContent()); IOUtils.closeQuietly(status.getEntity().getContent()); assertEquals(200, status.getStatusLine().getStatusCode()); Bundle bundle = ourCtx.newXmlParser().parseBundle(responseContent); assertEquals(1, bundle.getEntries().size()); Observation p = bundle.getResources(Observation.class).get(0); assertEquals("Patient/100", p.getSubject().getReference().toString()); assertEquals(4, p.getName().getCoding().size()); assertEquals("3141-9", p.getName().getCoding().get(0).getCode().getValue()); assertEquals("8302-2", p.getName().getCoding().get(1).getCode().getValue()); } @Test public void testSpecificallyNamedQueryGetsPrecedence() throws Exception { HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient?AAA=123"); HttpResponse status = ourClient.execute(httpGet); String responseContent = IOUtils.toString(status.getEntity().getContent()); IOUtils.closeQuietly(status.getEntity().getContent()); assertEquals(200, status.getStatusLine().getStatusCode()); Bundle bundle = ourCtx.newXmlParser().parseBundle(responseContent); assertEquals(1, bundle.getEntries().size()); Patient p = bundle.getResources(Patient.class).get(0); assertEquals("AAA", p.getIdentifierFirstRep().getValue().getValue()); // Now the named query httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient?_query=findPatientByAAA&AAA=123"); status = ourClient.execute(httpGet); responseContent = IOUtils.toString(status.getEntity().getContent()); IOUtils.closeQuietly(status.getEntity().getContent()); assertEquals(200, status.getStatusLine().getStatusCode()); bundle = ourCtx.newXmlParser().parseBundle(responseContent); assertEquals(1, bundle.getEntries().size()); p = bundle.getResources(Patient.class).get(0); assertEquals("AAANamed", p.getIdentifierFirstRep().getValue().getValue()); } @AfterClass public static void afterClass() throws Exception { ourServer.stop(); } @BeforeClass public static void beforeClass() throws Exception { ourPort = PortUtil.findFreePort(); ourServer = new Server(ourPort); DummyPatientResourceProvider patientProvider = new DummyPatientResourceProvider(); ServletHandler proxyHandler = new ServletHandler(); ourServlet = new RestfulServer(); ourServlet.getFhirContext().setNarrativeGenerator(new DefaultThymeleafNarrativeGenerator()); ourServlet.setPagingProvider(new FifoMemoryPagingProvider(10).setDefaultPageSize(10)); ourServlet.setResourceProviders(patientProvider, new DummyObservationResourceProvider()); ServletHolder servletHolder = new ServletHolder(ourServlet); proxyHandler.addServletWithMapping(servletHolder, "/*"); ourServer.setHandler(proxyHandler); ourServer.start(); PoolingHttpClientConnectionManager connectionManager = new PoolingHttpClientConnectionManager(5000, TimeUnit.MILLISECONDS); HttpClientBuilder builder = HttpClientBuilder.create(); builder.setConnectionManager(connectionManager); ourClient = builder.build(); ourDefaultAddressStrategy = ourServlet.getServerAddressStrategy(); } public static class DummyObservationResourceProvider implements IResourceProvider { @Override public Class<? extends IResource> getResourceType() { return Observation.class; } @Search public Observation search(@RequiredParam(name = "subject") ReferenceParam theSubject, @RequiredParam(name = "name") TokenOrListParam theName) { Observation o = new Observation(); o.setId("1"); o.getSubject().setReference(theSubject.getResourceType() + "/" + theSubject.getIdPart()); for (BaseCodingDt next : theName.getListAsCodings()) { o.getName().getCoding().add(new CodingDt(next)); } return o; } } public static class DummyPatientResourceProvider implements IResourceProvider { /** * Only needed for #164 */ @Create public MethodOutcome create(@ResourceParam Patient thePatient) { throw new IllegalArgumentException(); } @Search(compartmentName = "fooCompartment") public List<Patient> compartment(@IdParam IdDt theId) { ArrayList<Patient> retVal = new ArrayList<Patient>(); Patient patient = new Patient(); patient.setId(theId); patient.addIdentifier("system", "fooCompartment"); retVal.add(patient); return retVal; } @Search(queryName = "searchWithRef") public Patient searchWithRef() { Patient patient = new Patient(); patient.setId("Patient/1/_history/1"); patient.getManagingOrganization().setReference("http://localhost:" + ourPort + "/Organization/555/_history/666"); return patient; } @Search public List<Patient> findPatient(@RequiredParam(name = "_id") StringParam theParam, @OptionalParam(name = "name") StringParam theName) { ArrayList<Patient> retVal = new ArrayList<Patient>(); Patient patient = new Patient(); patient.setId("1"); patient.addIdentifier("system", "identifier123"); if (theParam != null) { patient.addName().addFamily("id" + theParam.getValue()); if (theName != null) { patient.addName().addFamily("name" + theName.getValue()); } } retVal.add(patient); return retVal; } @Search public List<Patient> findPatientByAAA01(@RequiredParam(name = "AAA") StringParam theParam) { ArrayList<Patient> retVal = new ArrayList<Patient>(); Patient patient = new Patient(); patient.setId("1"); patient.addIdentifier("system", "AAA"); retVal.add(patient); return retVal; } @Search(queryName = "findPatientByAAA") public List<Patient> findPatientByAAA02Named(@OptionalParam(name = "AAA") StringParam theParam) { ArrayList<Patient> retVal = new ArrayList<Patient>(); Patient patient = new Patient(); patient.setId("1"); patient.addIdentifier("system", "AAANamed"); retVal.add(patient); return retVal; } @Search() public List<Patient> findPatientWithOrList(@RequiredParam(name = "findPatientWithOrList") StringOrListParam theParam) { ArrayList<Patient> retVal = new ArrayList<Patient>(); Patient patient = new Patient(); patient.setId("1"); for (StringParam next : theParam.getValuesAsQueryTokens()) { patient.addIdentifier("system", next.getValue()); } retVal.add(patient); return retVal; } @Search() public List<Patient> findPatientWithAndList(@RequiredParam(name = "findPatientWithAndList") StringAndListParam theParam) { ourLastAndList = theParam; ArrayList<Patient> retVal = new ArrayList<Patient>(); return retVal; } @Search() public List<Patient> findPatientWithToken(@RequiredParam(name = "tokenParam") TokenParam theParam) { ArrayList<Patient> retVal = new ArrayList<Patient>(); Patient patient = new Patient(); patient.setId("1"); patient.addName().addFamily(theParam.getSystem()).addGiven(theParam.getValue()); retVal.add(patient); return retVal; } @Search(queryName = "findWithLinks") public List<Patient> findWithLinks() { ArrayList<Patient> retVal = new ArrayList<Patient>(); for (int i = 1; i <= 20; i++) { Patient patient = new Patient(); patient.setId("" + i); patient.addIdentifier("system", "AAANamed"); ResourceMetadataKeyEnum.LINK_SEARCH.put(patient, ("http://foo/Patient?_id=" + i)); ResourceMetadataKeyEnum.LINK_ALTERNATE.put(patient, ("Patient/9988" + i)); retVal.add(patient); } return retVal; } @Search(queryName = "searchIncludes") public List<Patient> searchIncludes(@IncludeParam Set<Include> theIncludes) { ourLastIncludes = theIncludes; ArrayList<Patient> retVal = new ArrayList<Patient>(); return retVal; } @Search(queryName = "searchIncludesList") public List<Patient> searchIncludesList(@IncludeParam List<Include> theIncludes) { if (theIncludes != null) { ourLastIncludes = new HashSet<Include>(theIncludes); } ArrayList<Patient> retVal = new ArrayList<Patient>(); return retVal; } @Override public Class<? extends IResource> getResourceType() { return Patient.class; } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.db.compaction; import java.nio.ByteBuffer; import java.util.*; import java.util.concurrent.TimeUnit; import org.junit.BeforeClass; import org.junit.Test; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import org.apache.cassandra.SchemaLoader; import org.apache.cassandra.Util; import org.apache.cassandra.config.KSMetaData; import org.apache.cassandra.db.ColumnFamilyStore; import org.apache.cassandra.db.DecoratedKey; import org.apache.cassandra.db.Keyspace; import org.apache.cassandra.db.Mutation; import org.apache.cassandra.db.RowUpdateBuilder; import org.apache.cassandra.exceptions.ConfigurationException; import org.apache.cassandra.io.sstable.format.SSTableReader; import org.apache.cassandra.locator.SimpleStrategy; import org.apache.cassandra.utils.Pair; import static org.apache.cassandra.db.compaction.DateTieredCompactionStrategy.getBuckets; import static org.apache.cassandra.db.compaction.DateTieredCompactionStrategy.newestBucket; import static org.apache.cassandra.db.compaction.DateTieredCompactionStrategy.trimToThreshold; import static org.apache.cassandra.db.compaction.DateTieredCompactionStrategy.filterOldSSTables; import static org.apache.cassandra.db.compaction.DateTieredCompactionStrategy.validateOptions; import static org.junit.Assert.*; public class DateTieredCompactionStrategyTest extends SchemaLoader { public static final String KEYSPACE1 = "DateTieredCompactionStrategyTest"; private static final String CF_STANDARD1 = "Standard1"; @BeforeClass public static void defineSchema() throws ConfigurationException { SchemaLoader.prepareServer(); SchemaLoader.createKeyspace(KEYSPACE1, SimpleStrategy.class, KSMetaData.optsWithRF(1), SchemaLoader.standardCFMD(KEYSPACE1, CF_STANDARD1)); } @Test public void testOptionsValidation() throws ConfigurationException { Map<String, String> options = new HashMap<>(); options.put(DateTieredCompactionStrategyOptions.BASE_TIME_KEY, "30"); options.put(DateTieredCompactionStrategyOptions.MAX_SSTABLE_AGE_KEY, "1825"); Map<String, String> unvalidated = validateOptions(options); assertTrue(unvalidated.isEmpty()); try { options.put(DateTieredCompactionStrategyOptions.BASE_TIME_KEY, "0"); validateOptions(options); fail(String.format("%s == 0 should be rejected", DateTieredCompactionStrategyOptions.BASE_TIME_KEY)); } catch (ConfigurationException e) {} try { options.put(DateTieredCompactionStrategyOptions.BASE_TIME_KEY, "-1337"); validateOptions(options); fail(String.format("Negative %s should be rejected", DateTieredCompactionStrategyOptions.BASE_TIME_KEY)); } catch (ConfigurationException e) { options.put(DateTieredCompactionStrategyOptions.BASE_TIME_KEY, "1"); } try { options.put(DateTieredCompactionStrategyOptions.MAX_SSTABLE_AGE_KEY, "-1337"); validateOptions(options); fail(String.format("Negative %s should be rejected", DateTieredCompactionStrategyOptions.MAX_SSTABLE_AGE_KEY)); } catch (ConfigurationException e) { options.put(DateTieredCompactionStrategyOptions.MAX_SSTABLE_AGE_KEY, "0"); } options.put("bad_option", "1.0"); unvalidated = validateOptions(options); assertTrue(unvalidated.containsKey("bad_option")); } @Test public void testTimeConversions() { Map<String, String> options = new HashMap<>(); options.put(DateTieredCompactionStrategyOptions.BASE_TIME_KEY, "30"); options.put(DateTieredCompactionStrategyOptions.TIMESTAMP_RESOLUTION_KEY, "SECONDS"); DateTieredCompactionStrategyOptions opts = new DateTieredCompactionStrategyOptions(options); assertEquals(opts.maxSSTableAge, TimeUnit.SECONDS.convert(365, TimeUnit.DAYS)); options.put(DateTieredCompactionStrategyOptions.TIMESTAMP_RESOLUTION_KEY, "MILLISECONDS"); opts = new DateTieredCompactionStrategyOptions(options); assertEquals(opts.maxSSTableAge, TimeUnit.MILLISECONDS.convert(365, TimeUnit.DAYS)); options.put(DateTieredCompactionStrategyOptions.TIMESTAMP_RESOLUTION_KEY, "MICROSECONDS"); options.put(DateTieredCompactionStrategyOptions.MAX_SSTABLE_AGE_KEY, "10"); opts = new DateTieredCompactionStrategyOptions(options); assertEquals(opts.maxSSTableAge, TimeUnit.MICROSECONDS.convert(10, TimeUnit.DAYS)); options.put(DateTieredCompactionStrategyOptions.MAX_SSTABLE_AGE_KEY, "0.5"); opts = new DateTieredCompactionStrategyOptions(options); assertEquals(opts.maxSSTableAge, TimeUnit.MICROSECONDS.convert(1, TimeUnit.DAYS) / 2); options.put(DateTieredCompactionStrategyOptions.TIMESTAMP_RESOLUTION_KEY, "HOURS"); options.put(DateTieredCompactionStrategyOptions.MAX_SSTABLE_AGE_KEY, "0.5"); opts = new DateTieredCompactionStrategyOptions(options); assertEquals(opts.maxSSTableAge, 12); } @Test public void testGetBuckets() { List<Pair<String, Long>> pairs = Lists.newArrayList( Pair.create("a", 199L), Pair.create("b", 299L), Pair.create("a", 1L), Pair.create("b", 201L) ); List<List<String>> buckets = getBuckets(pairs, 100L, 2, 200L); assertEquals(2, buckets.size()); for (List<String> bucket : buckets) { assertEquals(2, bucket.size()); assertEquals(bucket.get(0), bucket.get(1)); } pairs = Lists.newArrayList( Pair.create("a", 2000L), Pair.create("b", 3600L), Pair.create("a", 200L), Pair.create("c", 3950L), Pair.create("too new", 4125L), Pair.create("b", 3899L), Pair.create("c", 3900L) ); buckets = getBuckets(pairs, 100L, 3, 4050L); // targets (divPosition, size): (40, 100), (39, 100), (12, 300), (3, 900), (0, 2700) // in other words: 0 - 2699, 2700 - 3599, 3600 - 3899, 3900 - 3999, 4000 - 4099 assertEquals(3, buckets.size()); for (List<String> bucket : buckets) { assertEquals(2, bucket.size()); assertEquals(bucket.get(0), bucket.get(1)); } // Test base 1. pairs = Lists.newArrayList( Pair.create("a", 200L), Pair.create("a", 299L), Pair.create("b", 2000L), Pair.create("b", 2014L), Pair.create("c", 3610L), Pair.create("c", 3690L), Pair.create("d", 3898L), Pair.create("d", 3899L), Pair.create("e", 3900L), Pair.create("e", 3950L), Pair.create("too new", 4125L) ); buckets = getBuckets(pairs, 100L, 1, 4050L); assertEquals(5, buckets.size()); for (List<String> bucket : buckets) { assertEquals(2, bucket.size()); assertEquals(bucket.get(0), bucket.get(1)); } } @Test public void testPrepBucket() { Keyspace keyspace = Keyspace.open(KEYSPACE1); ColumnFamilyStore cfs = keyspace.getColumnFamilyStore(CF_STANDARD1); cfs.truncateBlocking(); cfs.disableAutoCompaction(); ByteBuffer value = ByteBuffer.wrap(new byte[100]); // create 3 sstables int numSSTables = 3; for (int r = 0; r < numSSTables; r++) { DecoratedKey key = Util.dk(String.valueOf(r)); new RowUpdateBuilder(cfs.metadata, r, key.getKey()) .clustering("column") .add("val", value).build().applyUnsafe(); cfs.forceBlockingFlush(); } cfs.forceBlockingFlush(); List<SSTableReader> sstrs = new ArrayList<>(cfs.getSSTables()); List<SSTableReader> newBucket = newestBucket(Collections.singletonList(sstrs.subList(0, 2)), 4, 32, 9, 10); assertTrue("incoming bucket should not be accepted when it has below the min threshold SSTables", newBucket.isEmpty()); newBucket = newestBucket(Collections.singletonList(sstrs.subList(0, 2)), 4, 32, 10, 10); assertFalse("non-incoming bucket should be accepted when it has at least 2 SSTables", newBucket.isEmpty()); assertEquals("an sstable with a single value should have equal min/max timestamps", sstrs.get(0).getMinTimestamp(), sstrs.get(0).getMaxTimestamp()); assertEquals("an sstable with a single value should have equal min/max timestamps", sstrs.get(1).getMinTimestamp(), sstrs.get(1).getMaxTimestamp()); assertEquals("an sstable with a single value should have equal min/max timestamps", sstrs.get(2).getMinTimestamp(), sstrs.get(2).getMaxTimestamp()); // if we have more than the max threshold, the oldest should be dropped Collections.sort(sstrs, Collections.reverseOrder(new Comparator<SSTableReader>() { public int compare(SSTableReader o1, SSTableReader o2) { return Long.compare(o1.getMinTimestamp(), o2.getMinTimestamp()) ; } })); List<SSTableReader> bucket = trimToThreshold(sstrs, 2); assertEquals("one bucket should have been dropped", 2, bucket.size()); for (SSTableReader sstr : bucket) assertFalse("the oldest sstable should be dropped", sstr.getMinTimestamp() == 0); } @Test public void testFilterOldSSTables() { Keyspace keyspace = Keyspace.open(KEYSPACE1); ColumnFamilyStore cfs = keyspace.getColumnFamilyStore(CF_STANDARD1); cfs.truncateBlocking(); cfs.disableAutoCompaction(); ByteBuffer value = ByteBuffer.wrap(new byte[100]); // create 3 sstables int numSSTables = 3; for (int r = 0; r < numSSTables; r++) { DecoratedKey key = Util.dk(String.valueOf(r)); new RowUpdateBuilder(cfs.metadata, r, key.getKey()) .clustering("column") .add("val", value).build().applyUnsafe(); cfs.forceBlockingFlush(); } cfs.forceBlockingFlush(); Iterable<SSTableReader> filtered; List<SSTableReader> sstrs = new ArrayList<>(cfs.getSSTables()); filtered = filterOldSSTables(sstrs, 0, 2); assertEquals("when maxSSTableAge is zero, no sstables should be filtered", sstrs.size(), Iterables.size(filtered)); filtered = filterOldSSTables(sstrs, 1, 2); assertEquals("only the newest 2 sstables should remain", 2, Iterables.size(filtered)); filtered = filterOldSSTables(sstrs, 1, 3); assertEquals("only the newest sstable should remain", 1, Iterables.size(filtered)); filtered = filterOldSSTables(sstrs, 1, 4); assertEquals("no sstables should remain when all are too old", 0, Iterables.size(filtered)); } @Test public void testDropExpiredSSTables() throws InterruptedException { Keyspace keyspace = Keyspace.open(KEYSPACE1); ColumnFamilyStore cfs = keyspace.getColumnFamilyStore(CF_STANDARD1); cfs.truncateBlocking(); cfs.disableAutoCompaction(); ByteBuffer value = ByteBuffer.wrap(new byte[100]); // create 2 sstables DecoratedKey key = Util.dk(String.valueOf("expired")); new RowUpdateBuilder(cfs.metadata, System.currentTimeMillis(), 1, key.getKey()) .clustering("column") .add("val", value).build().applyUnsafe(); cfs.forceBlockingFlush(); SSTableReader expiredSSTable = cfs.getSSTables().iterator().next(); Thread.sleep(10); key = Util.dk(String.valueOf("nonexpired")); new RowUpdateBuilder(cfs.metadata, System.currentTimeMillis(), key.getKey()) .clustering("column") .add("val", value).build().applyUnsafe(); cfs.forceBlockingFlush(); assertEquals(cfs.getSSTables().size(), 2); Map<String, String> options = new HashMap<>(); options.put(DateTieredCompactionStrategyOptions.BASE_TIME_KEY, "30"); options.put(DateTieredCompactionStrategyOptions.TIMESTAMP_RESOLUTION_KEY, "MILLISECONDS"); options.put(DateTieredCompactionStrategyOptions.MAX_SSTABLE_AGE_KEY, Double.toString((1d / (24 * 60 * 60)))); DateTieredCompactionStrategy dtcs = new DateTieredCompactionStrategy(cfs, options); for (SSTableReader sstable : cfs.getSSTables()) dtcs.addSSTable(sstable); dtcs.startup(); assertNull(dtcs.getNextBackgroundTask((int) (System.currentTimeMillis() / 1000))); Thread.sleep(2000); AbstractCompactionTask t = dtcs.getNextBackgroundTask((int) (System.currentTimeMillis()/1000)); assertNotNull(t); assertEquals(1, Iterables.size(t.transaction.originals())); SSTableReader sstable = t.transaction.originals().iterator().next(); assertEquals(sstable, expiredSSTable); t.transaction.abort(); } }
package com.arjuna.webservices11.wsba.sei; import com.arjuna.services.framework.task.Task; import com.arjuna.services.framework.task.TaskManager; import com.arjuna.webservices11.wsarj.ArjunaContext; import com.arjuna.webservices11.wsba.processors.CoordinatorCompletionParticipantProcessor; import com.arjuna.webservices11.SoapFault11; import org.jboss.ws.api.addressing.MAP; import com.arjuna.webservices11.wsaddr.AddressingHelper; import com.arjuna.webservices.SoapFault; import org.oasis_open.docs.ws_tx.wsba._2006._06.BusinessAgreementWithCoordinatorCompletionParticipantPortType; import org.oasis_open.docs.ws_tx.wsba._2006._06.NotificationType; import org.oasis_open.docs.ws_tx.wsba._2006._06.StatusType; import org.xmlsoap.schemas.soap.envelope.Fault; import javax.annotation.Resource; import javax.jws.*; import javax.jws.soap.SOAPBinding; import javax.xml.ws.Action; import javax.xml.ws.WebServiceContext; import javax.xml.ws.handler.MessageContext; import javax.xml.ws.soap.Addressing; /** * This class was generated by the JAX-WS RI. * JAX-WS RI 2.1.1-b03- * Generated source version: 2.0 * */ @WebService(name = "BusinessAgreementWithCoordinatorCompletionParticipantPortType", targetNamespace = "http://docs.oasis-open.org/ws-tx/wsba/2006/06", //wsdlLocation = "/WEB-INF/wsdl/wsba-coordinator-completion-participant-binding.wsdl", serviceName = "BusinessAgreementWithCoordinatorCompletionParticipantService", portName = "BusinessAgreementWithCoordinatorCompletionParticipantPortType" ) @SOAPBinding(parameterStyle = SOAPBinding.ParameterStyle.BARE) @HandlerChain(file="/ws-t_handlers.xml") @Addressing(required=true) public class BusinessAgreementWithCoordinatorCompletionParticipantPortTypeImpl implements BusinessAgreementWithCoordinatorCompletionParticipantPortType { @Resource private WebServiceContext webServiceCtx; /** * * @param parameters */ @WebMethod(operationName = "CompleteOperation", action = "http://docs.oasis-open.org/ws-tx/wsba/2006/06/Complete") @Oneway @Action(input="http://docs.oasis-open.org/ws-tx/wsba/2006/06/Complete") public void completeOperation( @WebParam(name = "Complete", targetNamespace = "http://docs.oasis-open.org/ws-tx/wsba/2006/06", partName = "parameters") NotificationType parameters) { MessageContext ctx = webServiceCtx.getMessageContext(); final NotificationType complete = parameters; final MAP inboundMap = AddressingHelper.inboundMap(ctx); final ArjunaContext arjunaContext = ArjunaContext.getCurrentContext(ctx); TaskManager.getManager().queueTask(new Task() { public void executeTask() { CoordinatorCompletionParticipantProcessor.getProcessor().complete(complete, inboundMap, arjunaContext) ; } }) ; } /** * * @param parameters */ @WebMethod(operationName = "CloseOperation", action = "http://docs.oasis-open.org/ws-tx/wsba/2006/06/Close") @Oneway @Action(input="http://docs.oasis-open.org/ws-tx/wsba/2006/06/Close") public void closeOperation( @WebParam(name = "Close", targetNamespace = "http://docs.oasis-open.org/ws-tx/wsba/2006/06", partName = "parameters") NotificationType parameters) { MessageContext ctx = webServiceCtx.getMessageContext(); final NotificationType close = parameters; final MAP inboundMap = AddressingHelper.inboundMap(ctx); final ArjunaContext arjunaContext = ArjunaContext.getCurrentContext(ctx); TaskManager.getManager().queueTask(new Task() { public void executeTask() { CoordinatorCompletionParticipantProcessor.getProcessor().close(close, inboundMap, arjunaContext) ; } }) ; } /** * * @param parameters */ @WebMethod(operationName = "CancelOperation", action = "http://docs.oasis-open.org/ws-tx/wsba/2006/06/Cancel") @Oneway @Action(input="http://docs.oasis-open.org/ws-tx/wsba/2006/06/Cancel") public void cancelOperation( @WebParam(name = "Cancel", targetNamespace = "http://docs.oasis-open.org/ws-tx/wsba/2006/06", partName = "parameters") NotificationType parameters) { MessageContext ctx = webServiceCtx.getMessageContext(); final NotificationType cancel = parameters; final MAP inboundMap = AddressingHelper.inboundMap(ctx); final ArjunaContext arjunaContext = ArjunaContext.getCurrentContext(ctx); TaskManager.getManager().queueTask(new Task() { public void executeTask() { CoordinatorCompletionParticipantProcessor.getProcessor().cancel(cancel, inboundMap, arjunaContext) ; } }) ; } /** * * @param parameters */ @WebMethod(operationName = "CompensateOperation", action = "http://docs.oasis-open.org/ws-tx/wsba/2006/06/Compensate") @Oneway @Action(input="http://docs.oasis-open.org/ws-tx/wsba/2006/06/Compensate") public void compensateOperation( @WebParam(name = "Compensate", targetNamespace = "http://docs.oasis-open.org/ws-tx/wsba/2006/06", partName = "parameters") NotificationType parameters) { MessageContext ctx = webServiceCtx.getMessageContext(); final NotificationType compensate = parameters; final MAP inboundMap = AddressingHelper.inboundMap(ctx); final ArjunaContext arjunaContext = ArjunaContext.getCurrentContext(ctx); TaskManager.getManager().queueTask(new Task() { public void executeTask() { CoordinatorCompletionParticipantProcessor.getProcessor().compensate(compensate, inboundMap, arjunaContext) ; } }) ; } /** * * @param parameters */ @WebMethod(operationName = "FailedOperation", action = "http://docs.oasis-open.org/ws-tx/wsba/2006/06/Failed") @Oneway @Action(input="http://docs.oasis-open.org/ws-tx/wsba/2006/06/Failed") public void failedOperation( @WebParam(name = "Failed", targetNamespace = "http://docs.oasis-open.org/ws-tx/wsba/2006/06", partName = "parameters") NotificationType parameters) { MessageContext ctx = webServiceCtx.getMessageContext(); final NotificationType failed = parameters; final MAP inboundMap = AddressingHelper.inboundMap(ctx); final ArjunaContext arjunaContext = ArjunaContext.getCurrentContext(ctx); TaskManager.getManager().queueTask(new Task() { public void executeTask() { CoordinatorCompletionParticipantProcessor.getProcessor().failed(failed, inboundMap, arjunaContext) ; } }) ; } /** * * @param parameters */ @WebMethod(operationName = "ExitedOperation", action = "http://docs.oasis-open.org/ws-tx/wsba/2006/06/Exited") @Oneway @Action(input="http://docs.oasis-open.org/ws-tx/wsba/2006/06/Exited") public void exitedOperation( @WebParam(name = "Exited", targetNamespace = "http://docs.oasis-open.org/ws-tx/wsba/2006/06", partName = "parameters") NotificationType parameters) { MessageContext ctx = webServiceCtx.getMessageContext(); final NotificationType exited = parameters; final MAP inboundMap = AddressingHelper.inboundMap(ctx); final ArjunaContext arjunaContext = ArjunaContext.getCurrentContext(ctx); TaskManager.getManager().queueTask(new Task() { public void executeTask() { CoordinatorCompletionParticipantProcessor.getProcessor().exited(exited, inboundMap, arjunaContext) ; } }) ; } /** * * @param parameters */ @WebMethod(operationName = "NotCompleted", action = "http://docs.oasis-open.org/ws-tx/wsba/2006/06/NotCompleted") @Oneway @Action(input="http://docs.oasis-open.org/ws-tx/wsba/2006/06/NotCompleted") public void notCompleted( @WebParam(name = "NotCompleted", targetNamespace = "http://docs.oasis-open.org/ws-tx/wsba/2006/06", partName = "parameters") NotificationType parameters) { MessageContext ctx = webServiceCtx.getMessageContext(); final NotificationType notCompleted = parameters; final MAP inboundMap = AddressingHelper.inboundMap(ctx); final ArjunaContext arjunaContext = ArjunaContext.getCurrentContext(ctx); TaskManager.getManager().queueTask(new Task() { public void executeTask() { CoordinatorCompletionParticipantProcessor.getProcessor().notCompleted(notCompleted, inboundMap, arjunaContext) ; } }) ; } /** * * @param parameters */ @WebMethod(operationName = "GetStatusOperation", action = "http://docs.oasis-open.org/ws-tx/wsba/2006/06/GetStatus") @Oneway @Action(input="http://docs.oasis-open.org/ws-tx/wsba/2006/06/GetStatus") public void getStatusOperation( @WebParam(name = "GetStatus", targetNamespace = "http://docs.oasis-open.org/ws-tx/wsba/2006/06", partName = "parameters") NotificationType parameters) { MessageContext ctx = webServiceCtx.getMessageContext(); final NotificationType getStatus = parameters; final MAP inboundMap = AddressingHelper.inboundMap(ctx); final ArjunaContext arjunaContext = ArjunaContext.getCurrentContext(ctx); TaskManager.getManager().queueTask(new Task() { public void executeTask() { CoordinatorCompletionParticipantProcessor.getProcessor().getStatus(getStatus, inboundMap, arjunaContext) ; } }) ; } /** * * @param parameters */ @WebMethod(operationName = "StatusOperation", action = "http://docs.oasis-open.org/ws-tx/wsba/2006/06/Status") @Oneway @Action(input="http://docs.oasis-open.org/ws-tx/wsba/2006/06/Status") public void statusOperation( @WebParam(name = "Status", targetNamespace = "http://docs.oasis-open.org/ws-tx/wsba/2006/06", partName = "parameters") StatusType parameters) { MessageContext ctx = webServiceCtx.getMessageContext(); final StatusType status = parameters; final MAP inboundMap = AddressingHelper.inboundMap(ctx); final ArjunaContext arjunaContext = ArjunaContext.getCurrentContext(ctx); TaskManager.getManager().queueTask(new Task() { public void executeTask() { CoordinatorCompletionParticipantProcessor.getProcessor().status(status, inboundMap, arjunaContext) ; } }) ; } @WebMethod(operationName = "fault", action = "http://docs.oasis-open.org/ws-tx/wscoor/2006/06/fault") @Oneway @Action(input="http://docs.oasis-open.org/ws-tx/wscoor/2006/06/fault") public void soapFault( @WebParam(name = "Fault", targetNamespace = "http://schemas.xmlsoap.org/soap/envelope/", partName = "parameters") Fault fault) { MessageContext ctx = webServiceCtx.getMessageContext(); final MAP inboundMap = AddressingHelper.inboundMap(ctx); final ArjunaContext arjunaContext = ArjunaContext.getCurrentContext(ctx); final SoapFault soapFault = SoapFault11.fromFault(fault); TaskManager.getManager().queueTask(new Task() { public void executeTask() { CoordinatorCompletionParticipantProcessor.getProcessor().soapFault(soapFault, inboundMap, arjunaContext); ; } }) ; } }
package subtypes; import java.nio.file.Path; import java.util.Iterator; import org.junit.Assert; import org.junit.Test; import common.CommonTest; import de.ust.skill.common.java.api.SkillFile.Mode; import subtypes.api.SkillFile; /** * Two short tests that check type order implementation. * * @author Timm Felden */ @SuppressWarnings("static-method") public class ReadWriteOrder extends CommonTest { static SkillFile read(String s) throws Exception { return SkillFile.open("src/test/resources/" + s, Mode.Read, Mode.Append); } @Test public void makeSubtypesWAA() throws Exception { Path file = tmpFile("make.subtype.waa"); { final SkillFile sf = SkillFile.open(file, Mode.Create, Mode.Append); java.util.function.Supplier<A> addA = () -> { A a = sf.As().make(); a.setA(a); return a; }; java.util.function.Supplier<B> addB = () -> { B a = sf.Bs().make(); a.setA(a); a.setB(a); return a; }; java.util.function.Supplier<C> addC = () -> { C a = sf.Cs().make(); a.setA(a); a.setC(a); return a; }; java.util.function.Supplier<D> addD = () -> { D a = sf.Ds().make(); a.setA(a); a.setB(a); a.setD(a); return a; }; addC.get(); addA.get(); addB.get(); addA.get(); addB.get(); addB.get(); sf.flush(); addB.get(); addD.get(); addB.get(); addD.get(); sf.flush(); addA.get(); addC.get(); addD.get(); sf.close(); } // write order { SkillFile sf = SkillFile.open(file, Mode.Read); final String types = "aabbbcbbddadc"; for (int i = 0; i < types.length(); i++) { A obj = sf.As().getByID(i + 1); Assert.assertEquals(obj.getClass().getSimpleName().toLowerCase().charAt(0), types.charAt(i)); } } // type order { SkillFile sf = SkillFile.open(file, Mode.Read); final String types = "aaabbbbbdddcc"; Iterator<A> as = sf.As().typeOrderIterator(); for (int i = 0; i < types.length(); i++) { A obj = as.next(); Assert.assertEquals(obj.getClass().getSimpleName().toLowerCase().charAt(0), types.charAt(i)); } } // self references { SkillFile sf = SkillFile.open(file, Mode.Read); for (A a : sf.As()) Assert.assertEquals(a, a.getA()); for (B a : sf.Bs()) Assert.assertEquals(a, a.getB()); for (C a : sf.Cs()) Assert.assertEquals(a, a.getC()); for (D a : sf.Ds()) Assert.assertEquals(a, a.getD()); } } @Test public void subtypesRead() throws Exception { SkillFile state = read("localBasePoolOffset.sf"); String types = "aabbbcbbddacd"; // check types String actualTypes = state.As().stream().map(a -> a.getClass().getSimpleName().toLowerCase()).reduce("", String::concat); Assert.assertEquals("type order missmatch", types, actualTypes); // check fields (all fields are self-references) for (A a : state.As()) Assert.assertEquals(a.a, a); for (B b : state.Bs()) Assert.assertEquals(b.b, b); for (C c : state.Cs()) Assert.assertEquals(c.c, c); for (D d : state.Ds()) Assert.assertEquals(d.d, d); } @Test public void subtypesCreate() throws Exception { Path path = tmpFile("lbpo.create"); SkillFile sf = SkillFile.open(path, Mode.Create, Mode.Append); String[] blocks = new String[] { "aabbbc", "bbdd", "acd" }; for (String b : blocks) { b.chars().forEach(c -> { switch (c) { case 'a': { A i = sf.As().make(); i.a = i; break; } case 'b': { B i = sf.Bs().make(); i.a = i; i.b = i; break; } case 'c': { C i = sf.Cs().make(); i.a = i; i.c = i; break; } case 'd': { D i = sf.Ds().make(); i.a = i; i.b = i; i.d = i; break; } } }); sf.flush(); } } @Test public void subtypesWrite() throws Exception { Path path = tmpFile("lbpo.write"); SkillFile state = read("localBasePoolOffset.sf"); // check self references long index = 1L; for (A instance : state.As()) { Assert.assertEquals("index missmatch", instance.a.getSkillID(), index++); Assert.assertEquals("self reference corrupted", instance.a, instance); } state.changePath(path); state.flush(); // check self references again (write might not have restored them) index = 1L; for (A instance : state.As()) { Assert.assertEquals("index missmatch", instance.a.getSkillID(), index++); Assert.assertEquals("self reference corrupted", instance.a, instance); } SkillFile state2 = SkillFile.open(path); // check type of deserialized instances Iterator<A> is1 = state.As().typeOrderIterator(); Iterator<A> is2 = state2.As().typeOrderIterator(); while (is1.hasNext() || is2.hasNext()) { assert is1.hasNext() && is2.hasNext() : "same size"; Assert.assertEquals("check type of deserialized instances", is1.next().getClass(), is2.next().getClass()); } } }
package pl.temomuko.autostoprace.data.local.photo; import android.annotation.TargetApi; import android.content.ActivityNotFoundException; import android.content.Context; import android.content.Intent; import android.content.pm.PackageManager; import android.content.pm.ResolveInfo; import android.media.ExifInterface; import android.net.Uri; import android.os.Build; import android.os.Bundle; import android.os.Environment; import android.provider.MediaStore; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.support.v4.content.FileProvider; import com.bumptech.glide.load.resource.bitmap.ImageHeaderParser; import com.theartofdev.edmodo.cropper.CropImage; import com.theartofdev.edmodo.cropper.CropImageView; import java.io.File; import java.io.IOException; import java.text.SimpleDateFormat; import java.util.Date; import java.util.List; import javax.inject.Inject; import onactivityresult.ActivityResult; import onactivityresult.OnActivityResult; import pl.temomuko.autostoprace.BuildConfig; import pl.temomuko.autostoprace.ui.base.BaseActivity; public class PhotoShadowActivity extends BaseActivity { private static final int INITIAL_CROP_WINDOW_PADDING_RATIO = 0; private static final String BUNDLE_CAMERA_PHOTO_FILE = "bundle_camera_photo_file"; private static final String BUNDLE_CAMERA_PHOTO_URI = "bundle_camera_photo_uri"; private static final String BUNDLE_MAX_WIDTH_HEIGHT_IN_PX = "max_width_height_in_px"; private static final String REQUEST_TYPE_EXTRA = "request_type_extra"; private static final String ASPECT_RATIO_X_EXTRA = "aspect_ratio_x_extra"; private static final String ASPECT_RATIO_Y_EXTRA = "aspect_ratio_y_extra"; private static final String MAX_HEIGHT_WIDTH_IN_PX_EXTRA = "max_height_width_extra"; private static final int REQUEST_CODE_GALLERY = 0; private static final int REQUEST_CODE_CAMERA = 1; private static final int REQUEST_CODE_CROP_IMAGE = 2; private static final int NOT_SET = -1; private static final String GALLERY_INTENT_TYPE = "image/*"; private File cameraPhotoFile; private Uri cameraPhotoUri; private int maxWidthHeightInPx; @Inject ImageController mImageController; public static void startActivity(Context context, ImageSourceType imageSourceType, int aspectRatioX, int aspectRatioY, int maxHeightWidthInPx) { Intent intent = new Intent(context, PhotoShadowActivity.class); intent.putExtra(REQUEST_TYPE_EXTRA, imageSourceType); intent.putExtra(ASPECT_RATIO_X_EXTRA, aspectRatioX); intent.putExtra(ASPECT_RATIO_Y_EXTRA, aspectRatioY); intent.putExtra(MAX_HEIGHT_WIDTH_IN_PX_EXTRA, maxHeightWidthInPx); intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); context.startActivity(intent); } public static void startActivity(Context context, ImageSourceType imageSourceType, int maxHeightWidthInPx) { Intent intent = new Intent(context, PhotoShadowActivity.class); intent.putExtra(REQUEST_TYPE_EXTRA, imageSourceType); intent.putExtra(MAX_HEIGHT_WIDTH_IN_PX_EXTRA, maxHeightWidthInPx); intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); context.startActivity(intent); } @Override protected void onNewIntent(Intent intent) { handleIntent(intent); } @Override public void onCreate(@Nullable Bundle savedInstanceState) { super.onCreate(savedInstanceState); getActivityComponent().inject(this); if (savedInstanceState == null) { handleIntent(getIntent()); } else { cameraPhotoFile = (File) savedInstanceState.getSerializable(BUNDLE_CAMERA_PHOTO_FILE); cameraPhotoUri = savedInstanceState.getParcelable(BUNDLE_CAMERA_PHOTO_URI); maxWidthHeightInPx = savedInstanceState.getInt(BUNDLE_MAX_WIDTH_HEIGHT_IN_PX); } } @Override protected void onSaveInstanceState(Bundle outState) { super.onSaveInstanceState(outState); outState.putSerializable(BUNDLE_CAMERA_PHOTO_FILE, cameraPhotoFile); outState.putParcelable(BUNDLE_CAMERA_PHOTO_URI, cameraPhotoUri); outState.putInt(BUNDLE_MAX_WIDTH_HEIGHT_IN_PX, maxWidthHeightInPx); } @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); ActivityResult.onResult(requestCode, resultCode, data).into(this); } @OnActivityResult(requestCode = REQUEST_CODE_GALLERY) void onActivityResultGalleryRequestCode(int resultCode, Intent intent) { if (RESULT_OK == resultCode) { cropImageFromGallery(intent); } else { finishWithError(new PickPhotoFromGalleryCanceledException()); } } @OnActivityResult(requestCode = REQUEST_CODE_CAMERA) void onActivityResultCameraRequestCode(int resultCode, Intent intent) { if (RESULT_OK == resultCode) { revokeUriReadWritePermissionForKitkat(cameraPhotoUri); cropImageFromCamera(); } else { finishWithError(new TakePhotoCanceledException()); } } @OnActivityResult(requestCode = REQUEST_CODE_CROP_IMAGE) void onActivityResultCropImageRequestCode(int resultCode, Intent intent) { if (RESULT_OK == resultCode) { Uri cropUri = CropImage.getActivityResult(intent).getUri(); if (cameraPhotoUri != null && cameraPhotoFile != null) { cameraPhotoFile.delete(); } finishWithSuccess(cropUri); } else { finishWithError(new PhotoCroppingCanceledException()); } } private void cropImageFromGallery(Intent intent) { try { Uri destination = FileProvider.getUriForFile(this, BuildConfig.APPLICATION_ID, createCropImageFile()); Uri galleryPhoto = intent.getData(); Intent cropIntent = getCropIntent(galleryPhoto, destination); startActivityForResult(cropIntent, REQUEST_CODE_CROP_IMAGE); } catch (IOException e) { finishWithError(e); } } private void cropImageFromCamera() { try { Uri destination = FileProvider.getUriForFile(this, BuildConfig.APPLICATION_ID, createCropImageFile()); Intent cropIntent = getCropIntent(cameraPhotoUri, destination); startActivityForResult(cropIntent, REQUEST_CODE_CROP_IMAGE); } catch (IOException e) { finishWithError(e); } catch (RuntimeException re) { // workaround for Nexus 5X, where the photo URI is not available for a little time if (cameraPhotoUri == null) { takeAPhoto(); } else { finishWithError(re); } } } @NonNull private Intent getCropIntent(Uri source, Uri destination) throws IOException, RuntimeException { int rotation = getExifRotationFromUri(source); CropImage.ActivityBuilder cropActivityBuilder = CropImage.activity(source) .setInitialRotation(rotation) .setOutputUri(destination) .setInitialCropWindowPaddingRatio(INITIAL_CROP_WINDOW_PADDING_RATIO) .setRequestedSize(maxWidthHeightInPx, maxWidthHeightInPx, CropImageView.RequestSizeOptions.RESIZE_INSIDE); return cropActivityBuilder.getIntent(this); } private void finishWithError(Throwable throwable) { mImageController.passError(throwable); finish(); } private void finishWithSuccess(Uri uri) { mImageController.passResult(uri); finish(); } private void handleIntent(Intent intent) { maxWidthHeightInPx = intent.getIntExtra(MAX_HEIGHT_WIDTH_IN_PX_EXTRA, NOT_SET); ImageSourceType imageSourceType = (ImageSourceType) intent.getExtras().get(REQUEST_TYPE_EXTRA); if (imageSourceType == null) { throw new IllegalArgumentException("Requested source type cannot be null"); } switch (imageSourceType) { case GALLERY: requestPhotoFromGallery(); break; case CAMERA: takeAPhoto(); break; default: throw new IllegalArgumentException("Unhandled photo source"); } } private void requestPhotoFromGallery() { Intent intent = new Intent(); intent.setType(GALLERY_INTENT_TYPE); intent.setAction( Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT ? Intent.ACTION_OPEN_DOCUMENT : Intent.ACTION_GET_CONTENT ); startActivityForResult(intent, REQUEST_CODE_GALLERY); } private void takeAPhoto() { try { cameraPhotoFile = createCameraImageFile(); cameraPhotoUri = FileProvider.getUriForFile(this, BuildConfig.APPLICATION_ID, cameraPhotoFile); Intent takePictureIntent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE); takePictureIntent.putExtra(MediaStore.EXTRA_OUTPUT, cameraPhotoUri); grantUriReadWritePermissionForKitkat(takePictureIntent, cameraPhotoUri); startActivityForResult(takePictureIntent, REQUEST_CODE_CAMERA); } catch (IOException | ActivityNotFoundException e) { finishWithError(e); } } private File createCameraImageFile() throws IOException { String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date()); String imageFileName = "JPEG_" + timeStamp + "_"; File storageDir = getExternalFilesDir(Environment.DIRECTORY_PICTURES); return File.createTempFile(imageFileName, ".jpg", storageDir); } private File createCropImageFile() throws IOException { String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date()); String imageFileName = "JPEG_" + "CROP_" + timeStamp + "_"; File storageDir = getExternalFilesDir(Environment.DIRECTORY_PICTURES); return File.createTempFile(imageFileName, ".jpg", storageDir); } /** * This method needs to be called on kitkat when passing uri to prevent crash */ @TargetApi(Build.VERSION_CODES.KITKAT) private void grantUriReadWritePermissionForKitkat(Intent intent, Uri uri) { if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) { List<ResolveInfo> resolvedIntentActivities = getPackageManager().queryIntentActivities(intent, PackageManager.MATCH_DEFAULT_ONLY); for (ResolveInfo resolvedIntentInfo : resolvedIntentActivities) { String packageName = resolvedIntentInfo.activityInfo.packageName; grantUriPermission(packageName, uri, Intent.FLAG_GRANT_WRITE_URI_PERMISSION | Intent.FLAG_GRANT_READ_URI_PERMISSION); } } } /** * This method needs to be called on kitkat after onActivityResult */ @TargetApi(Build.VERSION_CODES.KITKAT) private void revokeUriReadWritePermissionForKitkat(Uri uri) { if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) { revokeUriPermission(uri, Intent.FLAG_GRANT_WRITE_URI_PERMISSION | Intent.FLAG_GRANT_READ_URI_PERMISSION); } } /** * Returns rotation saved in exif photo data, some devices handle photo rotation this way e.g. most Samsung devices. * ImageHeaderParser used here is a part of Glide library. * * @throws java.io.FileNotFoundException if the provided URI could not be opened. * @throws IOException if ImageHeaderParserFail. */ private int getExifRotationFromUri(Uri uri) throws IOException, RuntimeException { return exifToDegrees(new ImageHeaderParser(getContentResolver().openInputStream(uri)).getOrientation()); } private int exifToDegrees(int exifOrientation) { if (exifOrientation == ExifInterface.ORIENTATION_ROTATE_90) { return 90; } else if (exifOrientation == ExifInterface.ORIENTATION_ROTATE_180) { return 180; } else if (exifOrientation == ExifInterface.ORIENTATION_ROTATE_270) { return 270; } return 0; } public static class ImageActionCanceledException extends Exception { } public static class PickPhotoFromGalleryCanceledException extends ImageActionCanceledException { } public static class TakePhotoCanceledException extends ImageActionCanceledException { } public static class PhotoCroppingCanceledException extends ImageActionCanceledException { } }
/* * Copyright 2000-2017 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. */ package com.intellij.usages.impl; import com.intellij.icons.AllIcons; import com.intellij.openapi.actionSystem.AnAction; import com.intellij.openapi.actionSystem.AnActionEvent; import com.intellij.openapi.actionSystem.CustomShortcutSet; import com.intellij.openapi.extensions.Extensions; import com.intellij.openapi.project.Project; import com.intellij.usageView.UsageViewBundle; import com.intellij.usages.UsageView; import com.intellij.usages.UsageViewSettings; import com.intellij.usages.impl.rules.*; import com.intellij.usages.rules.UsageGroupingRule; import com.intellij.usages.rules.UsageGroupingRuleProvider; import com.intellij.util.containers.ContainerUtil; import org.jetbrains.annotations.NotNull; import javax.swing.*; import java.awt.event.InputEvent; import java.awt.event.KeyEvent; import java.util.ArrayList; import java.util.List; /** * @author max */ public class UsageGroupingRuleProviderImpl implements UsageGroupingRuleProvider { protected boolean supportsNonCodeRule() { return true; } protected boolean supportsModuleRule() { return true; } protected boolean supportsScopesRule() { return true; } @NotNull @Override public UsageGroupingRule[] getActiveRules(@NotNull Project project) { return getActiveRules(project, UsageViewSettings.getInstance()); } @Override @NotNull public UsageGroupingRule[] getActiveRules(@NotNull Project project, @NotNull UsageViewSettings usageViewSettings) { List<UsageGroupingRule> rules = new ArrayList<>(); if (supportsNonCodeRule()) { rules.add(new NonCodeUsageGroupingRule(project)); } if (supportsScopesRule() && usageViewSettings.isGroupByScope()) { rules.add(new UsageScopeGroupingRule()); } if (usageViewSettings.isGroupByUsageType()) { rules.add(new UsageTypeGroupingRule()); } if (supportsModuleRule() && usageViewSettings.isGroupByModule()) { rules.add(new ModuleGroupingRule(project, usageViewSettings.isFlattenModules())); } if (usageViewSettings.isGroupByPackage()) { rules.add(DirectoryGroupingRule.getInstance(project)); } if (usageViewSettings.isGroupByFileStructure()) { FileStructureGroupRuleProvider[] providers = Extensions.getExtensions(FileStructureGroupRuleProvider.EP_NAME); for (FileStructureGroupRuleProvider ruleProvider : providers) { ContainerUtil.addIfNotNull(rules, ruleProvider.getUsageGroupingRule(project, usageViewSettings)); } } else { rules.add(new FileGroupingRule(project)); } return rules.toArray(UsageGroupingRule.EMPTY_ARRAY); } @Override @NotNull public AnAction[] createGroupingActions(UsageView view) { UsageViewImpl impl = (UsageViewImpl)view; JComponent component = impl.getComponent(); GroupByModuleTypeAction groupByModuleTypeAction = supportsModuleRule() ? new GroupByModuleTypeAction(impl) : null; if (groupByModuleTypeAction != null) { groupByModuleTypeAction.registerCustomShortcutSet(new CustomShortcutSet(KeyStroke.getKeyStroke(KeyEvent.VK_D, InputEvent.CTRL_DOWN_MASK)), component, impl); } GroupByFileStructureAction groupByFileStructureAction = createGroupByFileStructureAction(impl); GroupByScopeAction groupByScopeAction = supportsScopesRule() ? new GroupByScopeAction(impl) : null; GroupByPackageAction groupByPackageAction = new GroupByPackageAction(impl); groupByPackageAction.registerCustomShortcutSet(new CustomShortcutSet(KeyStroke.getKeyStroke(KeyEvent.VK_P, InputEvent.CTRL_DOWN_MASK)), component, impl); ArrayList<AnAction> result = ContainerUtil.newArrayList(); if (view.getPresentation().isUsageTypeFilteringAvailable()) { GroupByUsageTypeAction groupByUsageTypeAction = new GroupByUsageTypeAction(impl); groupByUsageTypeAction.registerCustomShortcutSet(new CustomShortcutSet(KeyStroke.getKeyStroke(KeyEvent.VK_T, InputEvent.CTRL_DOWN_MASK)), component, impl); ContainerUtil.addIfNotNull(result, groupByUsageTypeAction); ContainerUtil.addIfNotNull(result, groupByScopeAction); ContainerUtil.addIfNotNull(result, groupByModuleTypeAction); if (supportsModuleRule()) { result.add(new FlattenModulesAction(impl)); } ContainerUtil.addIfNotNull(result, groupByPackageAction); ContainerUtil.addIfNotNull(result, groupByFileStructureAction); } else { ContainerUtil.addIfNotNull(result, groupByScopeAction); ContainerUtil.addIfNotNull(result, groupByModuleTypeAction); ContainerUtil.addIfNotNull(result, groupByPackageAction); ContainerUtil.addIfNotNull(result, groupByFileStructureAction); } return result.toArray(AnAction.EMPTY_ARRAY); } public static GroupByFileStructureAction createGroupByFileStructureAction(UsageViewImpl impl) { final JComponent component = impl.getComponent(); final GroupByFileStructureAction groupByFileStructureAction = new GroupByFileStructureAction(impl); groupByFileStructureAction.registerCustomShortcutSet(new CustomShortcutSet(KeyStroke.getKeyStroke(KeyEvent.VK_M, InputEvent.CTRL_DOWN_MASK)), component, impl); return groupByFileStructureAction; } private static class GroupByUsageTypeAction extends RuleAction { private GroupByUsageTypeAction(UsageViewImpl view) { super(view, UsageViewBundle.message("action.group.by.usage.type"), AllIcons.General.Filter); //TODO: special icon } @Override protected boolean getOptionValue() { return myView.getUsageViewSettings().isGroupByUsageType(); } @Override protected void setOptionValue(boolean value) { myView.getUsageViewSettings().setGroupByUsageType(value); } } private static class GroupByScopeAction extends RuleAction { private GroupByScopeAction(UsageViewImpl view) { super(view, "Group by test/production", AllIcons.Actions.GroupByTestProduction); } @Override protected boolean getOptionValue() { return myView.getUsageViewSettings().isGroupByScope(); } @Override protected void setOptionValue(boolean value) { myView.getUsageViewSettings().setGroupByScope(value); } } private static class GroupByModuleTypeAction extends RuleAction { private GroupByModuleTypeAction(UsageViewImpl view) { super(view, UsageViewBundle.message("action.group.by.module"), AllIcons.Actions.GroupByModule); } @Override protected boolean getOptionValue() { return myView.getUsageViewSettings().isGroupByModule(); } @Override protected void setOptionValue(boolean value) { myView.getUsageViewSettings().setGroupByModule(value); } } private static class FlattenModulesAction extends RuleAction { private FlattenModulesAction(UsageViewImpl view) { super(view, UsageViewBundle.message("action.flatten.modules"), AllIcons.ObjectBrowser.FlattenModules); } @Override protected boolean getOptionValue() { return myView.getUsageViewSettings().isFlattenModules(); } @Override protected void setOptionValue(boolean value) { myView.getUsageViewSettings().setFlattenModules(value); } @Override public void update(@NotNull AnActionEvent e) { super.update(e); e.getPresentation().setEnabled(myView.getUsageViewSettings().isGroupByModule()); } } private static class GroupByPackageAction extends RuleAction { private GroupByPackageAction(UsageViewImpl view) { super(view, DirectoryGroupingRule.getInstance(view.getProject()).getActionTitle(), AllIcons.Actions.GroupByPackage); } @Override protected boolean getOptionValue() { return myView.getUsageViewSettings().isGroupByPackage(); } @Override protected void setOptionValue(boolean value) { myView.getUsageViewSettings().setGroupByPackage(value); } } private static class GroupByFileStructureAction extends RuleAction { private GroupByFileStructureAction(UsageViewImpl view) { super(view, UsageViewBundle.message("action.group.by.file.structure"), AllIcons.Actions.GroupByMethod); } @Override protected boolean getOptionValue() { return myView.getUsageViewSettings().isGroupByFileStructure(); } @Override protected void setOptionValue(boolean value) { myView.getUsageViewSettings().setGroupByFileStructure(value); } } }
package kr.pe.burt.android.lib.fragmentnavigationcontroller; import android.annotation.TargetApi; import android.content.Context; import android.os.Build; import android.util.AttributeSet; import android.view.ViewTreeObserver; import android.widget.FrameLayout; /** * Created by burt on 2016. 5. 26.. */ class AndroidFragmentFrameLayout extends FrameLayout { private float yFraction = 0; private float xFraction = 0; private ViewTreeObserver.OnPreDrawListener preDrawListener = null; public AndroidFragmentFrameLayout(Context context) { super(context); } public AndroidFragmentFrameLayout(Context context, AttributeSet attrs) { super(context, attrs); } public AndroidFragmentFrameLayout(Context context, AttributeSet attrs, int defStyleAttr) { super(context, attrs, defStyleAttr); } @TargetApi(Build.VERSION_CODES.LOLLIPOP) public AndroidFragmentFrameLayout(Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes) { super(context, attrs, defStyleAttr, defStyleRes); } public void setAccordionPivotZero(float value) { setAlpha(1.0f); setScaleX(value); setPivotX(0); } public void setAccordionPivotWidth(float value) { setAlpha(1.0f); setScaleX(value); setPivotX(getWidth()); } public void setAccordionVerticalPivotZero(float value) { setAlpha(1.0f); setScaleY(value); setPivotY(0); } public void setAccordionPivotHeight(float value) { setAlpha(1.0f); setScaleY(value); setPivotY(getHeight()); } public void setCube(float fraction) { float translationX = getWidth() * fraction; setTranslationX(translationX); setRotationY(90 * fraction); setPivotX(0); setPivotY(getHeight() / 2); } public void setCubeVertical(float fraction) { float translationY = getHeight() * fraction; setTranslationY(translationY); setRotationX(-90 * fraction); setPivotY(0); setPivotX(getWidth() / 2); } public void setCubeBack(float fraction) { float translationX = getWidth() * fraction; setTranslationX(translationX); setRotationY(90 * fraction); setPivotY(getHeight() / 2); setPivotX(getWidth()); } public void setCubeVerticalBack(float fraction) { float translationY = getHeight() * fraction; setTranslationY(translationY); setRotationX(-90 * fraction); setPivotX(getWidth() / 2); setPivotY(getHeight()); } public void setGlide(float fraction) { float translationX = getWidth() * fraction; setTranslationX(translationX); setRotationY(90 * fraction); setPivotX(0); } public void setGlideBack(float fraction) { float translationX = getWidth() * fraction; setTranslationX(translationX); setRotationY(90 * fraction); setPivotX(0); setPivotY(getHeight() / 2); } public void setRotateDown(float fraction) { float translationX = getWidth() * fraction; setTranslationX(translationX); setRotation(20 * fraction); setPivotY(getHeight()); setPivotX(getWidth() / 2); } public void setRotateUp(float fraction) { float translationX = getWidth() * fraction; setTranslationX(translationX); setRotation(-20 * fraction); setPivotY(0); setPivotX(getWidth() / 2); } public void setRotateLeft(float fraction) { float translationY = getHeight() * fraction; setTranslationY(translationY); setRotation(20 * fraction); setPivotX(0); setPivotY(getHeight() / 2); } public void setRotateRight(float fraction) { float translationY = getHeight() * fraction; setTranslationY(translationY); setRotation(-20 * fraction); setPivotX(getWidth()); setPivotY(getHeight() / 2); } public void setYFraction(float fraction) { this.yFraction = fraction; if (getHeight() == 0) { if (preDrawListener == null) { preDrawListener = new ViewTreeObserver.OnPreDrawListener() { @Override public boolean onPreDraw() { getViewTreeObserver().removeOnPreDrawListener( preDrawListener); setYFraction(yFraction); return true; } }; getViewTreeObserver().addOnPreDrawListener(preDrawListener); } return; } float translationY = getHeight() * fraction; setTranslationY(translationY); } public void setXFraction(float fraction) { this.xFraction = fraction; if (getWidth() == 0) { if (preDrawListener == null) { preDrawListener = new ViewTreeObserver.OnPreDrawListener() { @Override public boolean onPreDraw() { getViewTreeObserver().removeOnPreDrawListener( preDrawListener); setXFraction(xFraction); return true; } }; getViewTreeObserver().addOnPreDrawListener(preDrawListener); } return; } float translationX = getWidth() * fraction; setTranslationX(translationX); } public void setTableHorizontalPivotZero(float fraction) { setRotationY(90 * fraction); setPivotX(0); setPivotY(getHeight() / 2); } public void setTableHorizontalPivotWidth(float fraction) { setRotationY(-90 * fraction); setPivotX(getWidth()); setPivotY(getHeight() / 2); } public void setTableVerticalPivotZero(float fraction) { setRotationX(-90 * fraction); setPivotX(getWidth() / 2); setPivotY(0); } public void setTableVerticalPivotHeight(float fraction) { setRotationX(90 * fraction); setPivotX(getWidth() / 2); setPivotY(getHeight()); } public void setZoomFromCornerPivotHG(float fraction) { setScaleX(fraction); setScaleY(fraction); setPivotX(getWidth()); setPivotY(getHeight()); } public void setZoomFromCornerPivotZero(float fraction) { setScaleX(fraction); setScaleY(fraction); setPivotX(0); setPivotY(0); } public void setZoomFromCornerPivotWidth(float fraction) { setScaleX(fraction); setScaleY(fraction); setPivotX(getWidth()); setPivotY(0); } public void setZoomFromCornerPivotHeight(float fraction) { setScaleX(fraction); setScaleY(fraction); setPivotX(0); setPivotY(getHeight()); } public void setZoomSlideHorizontal(float fraction) { setTranslationX(getWidth() * fraction); setPivotX(getWidth() / 2); setPivotY(getHeight() / 2); } public void setZoomSlideVertical(float fraction) { setTranslationY(getHeight() * fraction); setPivotX(getWidth() / 2); setPivotY(getHeight() / 2); } }
/* * Copyright 2014 The Netty Project * * The Netty Project licenses this file to you under the Apache License, version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package io.netty.handler.codec.http2; import io.netty.buffer.ByteBuf; import io.netty.buffer.ByteBufAllocator; import io.netty.buffer.CompositeByteBuf; import io.netty.buffer.EmptyByteBuf; import io.netty.buffer.Unpooled; import io.netty.channel.Channel; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelPromise; import io.netty.channel.DefaultChannelPromise; import io.netty.util.AsciiString; import io.netty.util.concurrent.EventExecutor; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.mockito.ArgumentCaptor; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import java.util.LinkedList; import java.util.List; import static io.netty.buffer.Unpooled.EMPTY_BUFFER; import static io.netty.handler.codec.http2.Http2CodecUtil.DEFAULT_MAX_HEADER_SIZE; import static io.netty.handler.codec.http2.Http2TestUtil.randomString; import static io.netty.util.CharsetUtil.UTF_8; import static java.lang.Math.min; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.fail; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyBoolean; import static org.mockito.Matchers.anyInt; import static org.mockito.Matchers.anyShort; import static org.mockito.Matchers.eq; import static org.mockito.Matchers.isA; import static org.mockito.Mockito.atLeastOnce; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; /** * Tests encoding/decoding each HTTP2 frame type. */ public class Http2FrameRoundtripTest { private static final byte[] MESSAGE = "hello world".getBytes(UTF_8); private static final int STREAM_ID = 0x7FFFFFFF; private static final int WINDOW_UPDATE = 0x7FFFFFFF; private static final long ERROR_CODE = 0xFFFFFFFFL; @Mock private Http2FrameListener listener; @Mock private ChannelHandlerContext ctx; @Mock private EventExecutor executor; @Mock private Channel channel; @Mock private ByteBufAllocator alloc; private Http2FrameWriter writer; private Http2FrameReader reader; private List<ByteBuf> needReleasing = new LinkedList<ByteBuf>(); @Before public void setup() throws Exception { MockitoAnnotations.initMocks(this); when(ctx.alloc()).thenReturn(alloc); when(ctx.executor()).thenReturn(executor); when(ctx.channel()).thenReturn(channel); doAnswer(new Answer<ByteBuf>() { @Override public ByteBuf answer(InvocationOnMock in) throws Throwable { return Unpooled.buffer(); } }).when(alloc).buffer(); doAnswer(new Answer<ByteBuf>() { @Override public ByteBuf answer(InvocationOnMock in) throws Throwable { return Unpooled.buffer((Integer) in.getArguments()[0]); } }).when(alloc).buffer(anyInt()); doAnswer(new Answer<ChannelPromise>() { @Override public ChannelPromise answer(InvocationOnMock invocation) throws Throwable { return new DefaultChannelPromise(channel); } }).when(ctx).newPromise(); writer = new DefaultHttp2FrameWriter(); reader = new DefaultHttp2FrameReader(false); } @After public void teardown() { try { // Release all of the buffers. for (ByteBuf buf : needReleasing) { buf.release(); } // Now verify that all of the reference counts are zero. for (ByteBuf buf : needReleasing) { int expectedFinalRefCount = 0; if (buf.isReadOnly() || buf instanceof EmptyByteBuf) { // Special case for when we're writing slices of the padding buffer. expectedFinalRefCount = 1; } assertEquals(expectedFinalRefCount, buf.refCnt()); } } finally { needReleasing.clear(); } } @Test public void emptyDataShouldMatch() throws Exception { final ByteBuf data = EMPTY_BUFFER; writer.writeData(ctx, STREAM_ID, data.slice(), 0, false, ctx.newPromise()); readFrames(); verify(listener).onDataRead(eq(ctx), eq(STREAM_ID), eq(data), eq(0), eq(false)); } @Test public void dataShouldMatch() throws Exception { final ByteBuf data = data(10); writer.writeData(ctx, STREAM_ID, data.slice(), 0, false, ctx.newPromise()); readFrames(); verify(listener).onDataRead(eq(ctx), eq(STREAM_ID), eq(data), eq(0), eq(false)); } @Test public void dataWithPaddingShouldMatch() throws Exception { final ByteBuf data = data(10); writer.writeData(ctx, STREAM_ID, data.slice(), 0xFF, true, ctx.newPromise()); readFrames(); verify(listener).onDataRead(eq(ctx), eq(STREAM_ID), eq(data), eq(0xFF), eq(true)); } @Test public void largeDataFrameShouldMatch() throws Exception { // Create a large message to force chunking. final ByteBuf originalData = data(1024 * 1024); final int originalPadding = 100; final boolean endOfStream = true; writer.writeData(ctx, STREAM_ID, originalData.slice(), originalPadding, endOfStream, ctx.newPromise()); readFrames(); // Verify that at least one frame was sent with eos=false and exactly one with eos=true. verify(listener, atLeastOnce()).onDataRead(eq(ctx), eq(STREAM_ID), any(ByteBuf.class), anyInt(), eq(false)); verify(listener).onDataRead(eq(ctx), eq(STREAM_ID), any(ByteBuf.class), anyInt(), eq(true)); // Capture the read data and padding. ArgumentCaptor<ByteBuf> dataCaptor = ArgumentCaptor.forClass(ByteBuf.class); ArgumentCaptor<Integer> paddingCaptor = ArgumentCaptor.forClass(Integer.class); verify(listener, atLeastOnce()).onDataRead(eq(ctx), eq(STREAM_ID), dataCaptor.capture(), paddingCaptor.capture(), anyBoolean()); // Make sure the data matches the original. for (ByteBuf chunk : dataCaptor.getAllValues()) { ByteBuf originalChunk = originalData.readSlice(chunk.readableBytes()); assertEquals(originalChunk, chunk); } assertFalse(originalData.isReadable()); // Make sure the padding matches the original. int totalReadPadding = 0; for (int framePadding : paddingCaptor.getAllValues()) { totalReadPadding += framePadding; } assertEquals(originalPadding, totalReadPadding); } @Test public void emptyHeadersShouldMatch() throws Exception { final Http2Headers headers = EmptyHttp2Headers.INSTANCE; writer.writeHeaders(ctx, STREAM_ID, headers, 0, true, ctx.newPromise()); readFrames(); verify(listener).onHeadersRead(eq(ctx), eq(STREAM_ID), eq(headers), eq(0), eq(true)); } @Test public void emptyHeadersWithPaddingShouldMatch() throws Exception { final Http2Headers headers = EmptyHttp2Headers.INSTANCE; writer.writeHeaders(ctx, STREAM_ID, headers, 0xFF, true, ctx.newPromise()); readFrames(); verify(listener).onHeadersRead(eq(ctx), eq(STREAM_ID), eq(headers), eq(0xFF), eq(true)); } @Test public void binaryHeadersWithoutPriorityShouldMatch() throws Exception { final Http2Headers headers = binaryHeaders(); writer.writeHeaders(ctx, STREAM_ID, headers, 0, true, ctx.newPromise()); readFrames(); verify(listener).onHeadersRead(eq(ctx), eq(STREAM_ID), eq(headers), eq(0), eq(true)); } @Test public void headersFrameWithoutPriorityShouldMatch() throws Exception { final Http2Headers headers = headers(); writer.writeHeaders(ctx, STREAM_ID, headers, 0, true, ctx.newPromise()); readFrames(); verify(listener).onHeadersRead(eq(ctx), eq(STREAM_ID), eq(headers), eq(0), eq(true)); } @Test public void headersFrameWithPriorityShouldMatch() throws Exception { final Http2Headers headers = headers(); writer.writeHeaders(ctx, STREAM_ID, headers, 4, (short) 255, true, 0, true, ctx.newPromise()); readFrames(); verify(listener).onHeadersRead(eq(ctx), eq(STREAM_ID), eq(headers), eq(4), eq((short) 255), eq(true), eq(0), eq(true)); } @Test public void headersWithPaddingWithoutPriorityShouldMatch() throws Exception { final Http2Headers headers = headers(); writer.writeHeaders(ctx, STREAM_ID, headers, 0xFF, true, ctx.newPromise()); readFrames(); verify(listener).onHeadersRead(eq(ctx), eq(STREAM_ID), eq(headers), eq(0xFF), eq(true)); } @Test public void headersWithPaddingWithPriorityShouldMatch() throws Exception { final Http2Headers headers = headers(); writer.writeHeaders(ctx, STREAM_ID, headers, 2, (short) 3, true, 0xFF, true, ctx.newPromise()); readFrames(); verify(listener).onHeadersRead(eq(ctx), eq(STREAM_ID), eq(headers), eq(2), eq((short) 3), eq(true), eq(0xFF), eq(true)); } @Test public void continuedHeadersShouldMatch() throws Exception { final Http2Headers headers = largeHeaders(); writer.writeHeaders(ctx, STREAM_ID, headers, 2, (short) 3, true, 0, true, ctx.newPromise()); readFrames(); verify(listener) .onHeadersRead(eq(ctx), eq(STREAM_ID), eq(headers), eq(2), eq((short) 3), eq(true), eq(0), eq(true)); } @Test public void continuedHeadersWithPaddingShouldMatch() throws Exception { final Http2Headers headers = largeHeaders(); writer.writeHeaders(ctx, STREAM_ID, headers, 2, (short) 3, true, 0xFF, true, ctx.newPromise()); readFrames(); verify(listener).onHeadersRead(eq(ctx), eq(STREAM_ID), eq(headers), eq(2), eq((short) 3), eq(true), eq(0xFF), eq(true)); } @Test public void headersThatAreTooBigShouldFail() throws Exception { final Http2Headers headers = headersOfSize(DEFAULT_MAX_HEADER_SIZE + 1); writer.writeHeaders(ctx, STREAM_ID, headers, 2, (short) 3, true, 0xFF, true, ctx.newPromise()); try { readFrames(); fail(); } catch (Http2Exception e) { verify(listener, never()).onHeadersRead(any(ChannelHandlerContext.class), anyInt(), any(Http2Headers.class), anyInt(), anyShort(), anyBoolean(), anyInt(), anyBoolean()); } } @Test public void emptyPushPromiseShouldMatch() throws Exception { final Http2Headers headers = EmptyHttp2Headers.INSTANCE; writer.writePushPromise(ctx, STREAM_ID, 2, headers, 0, ctx.newPromise()); readFrames(); verify(listener).onPushPromiseRead(eq(ctx), eq(STREAM_ID), eq(2), eq(headers), eq(0)); } @Test public void pushPromiseFrameShouldMatch() throws Exception { final Http2Headers headers = headers(); writer.writePushPromise(ctx, STREAM_ID, 1, headers, 5, ctx.newPromise()); readFrames(); verify(listener).onPushPromiseRead(eq(ctx), eq(STREAM_ID), eq(1), eq(headers), eq(5)); } @Test public void pushPromiseWithPaddingShouldMatch() throws Exception { final Http2Headers headers = headers(); writer.writePushPromise(ctx, STREAM_ID, 2, headers, 0xFF, ctx.newPromise()); readFrames(); verify(listener).onPushPromiseRead(eq(ctx), eq(STREAM_ID), eq(2), eq(headers), eq(0xFF)); } @Test public void continuedPushPromiseShouldMatch() throws Exception { final Http2Headers headers = largeHeaders(); writer.writePushPromise(ctx, STREAM_ID, 2, headers, 0, ctx.newPromise()); readFrames(); verify(listener).onPushPromiseRead(eq(ctx), eq(STREAM_ID), eq(2), eq(headers), eq(0)); } @Test public void continuedPushPromiseWithPaddingShouldMatch() throws Exception { final Http2Headers headers = largeHeaders(); writer.writePushPromise(ctx, STREAM_ID, 2, headers, 0xFF, ctx.newPromise()); readFrames(); verify(listener).onPushPromiseRead(eq(ctx), eq(STREAM_ID), eq(2), eq(headers), eq(0xFF)); } @Test public void goAwayFrameShouldMatch() throws Exception { final String text = "test"; final ByteBuf data = buf(text.getBytes()); writer.writeGoAway(ctx, STREAM_ID, ERROR_CODE, data.slice(), ctx.newPromise()); readFrames(); ArgumentCaptor<ByteBuf> captor = ArgumentCaptor.forClass(ByteBuf.class); verify(listener).onGoAwayRead(eq(ctx), eq(STREAM_ID), eq(ERROR_CODE), captor.capture()); assertEquals(data, captor.getValue()); } @Test public void pingFrameShouldMatch() throws Exception { final ByteBuf data = buf("01234567".getBytes(UTF_8)); writer.writePing(ctx, false, data.slice(), ctx.newPromise()); readFrames(); ArgumentCaptor<ByteBuf> captor = ArgumentCaptor.forClass(ByteBuf.class); verify(listener).onPingRead(eq(ctx), captor.capture()); assertEquals(data, captor.getValue()); } @Test public void pingAckFrameShouldMatch() throws Exception { final ByteBuf data = buf("01234567".getBytes(UTF_8)); writer.writePing(ctx, true, data.slice(), ctx.newPromise()); readFrames(); ArgumentCaptor<ByteBuf> captor = ArgumentCaptor.forClass(ByteBuf.class); verify(listener).onPingAckRead(eq(ctx), captor.capture()); assertEquals(data, captor.getValue()); } @Test public void priorityFrameShouldMatch() throws Exception { writer.writePriority(ctx, STREAM_ID, 1, (short) 1, true, ctx.newPromise()); readFrames(); verify(listener).onPriorityRead(eq(ctx), eq(STREAM_ID), eq(1), eq((short) 1), eq(true)); } @Test public void rstStreamFrameShouldMatch() throws Exception { writer.writeRstStream(ctx, STREAM_ID, ERROR_CODE, ctx.newPromise()); readFrames(); verify(listener).onRstStreamRead(eq(ctx), eq(STREAM_ID), eq(ERROR_CODE)); } @Test public void emptySettingsFrameShouldMatch() throws Exception { final Http2Settings settings = new Http2Settings(); writer.writeSettings(ctx, settings, ctx.newPromise()); readFrames(); verify(listener).onSettingsRead(eq(ctx), eq(settings)); } @Test public void settingsShouldStripShouldMatch() throws Exception { final Http2Settings settings = new Http2Settings(); settings.pushEnabled(true); settings.headerTableSize(4096); settings.initialWindowSize(123); settings.maxConcurrentStreams(456); writer.writeSettings(ctx, settings, ctx.newPromise()); readFrames(); verify(listener).onSettingsRead(eq(ctx), eq(settings)); } @Test public void settingsAckShouldMatch() throws Exception { writer.writeSettingsAck(ctx, ctx.newPromise()); readFrames(); verify(listener).onSettingsAckRead(eq(ctx)); } @Test public void windowUpdateFrameShouldMatch() throws Exception { writer.writeWindowUpdate(ctx, STREAM_ID, WINDOW_UPDATE, ctx.newPromise()); readFrames(); verify(listener).onWindowUpdateRead(eq(ctx), eq(STREAM_ID), eq(WINDOW_UPDATE)); } private void readFrames() throws Http2Exception { // Now read all of the written frames. ByteBuf write = captureWrites(); reader.readFrame(ctx, write, listener); } private ByteBuf data(int size) { byte[] data = new byte[size]; for (int ix = 0; ix < data.length;) { int length = min(MESSAGE.length, data.length - ix); System.arraycopy(MESSAGE, 0, data, ix, length); ix += length; } return buf(data); } private ByteBuf buf(byte[] bytes) { return Unpooled.wrappedBuffer(bytes); } private <T extends ByteBuf> T releaseLater(T buf) { needReleasing.add(buf); return buf; } private ByteBuf captureWrites() { ArgumentCaptor<ByteBuf> captor = ArgumentCaptor.forClass(ByteBuf.class); verify(ctx, atLeastOnce()).write(captor.capture(), isA(ChannelPromise.class)); CompositeByteBuf composite = releaseLater(Unpooled.compositeBuffer()); for (ByteBuf buf : captor.getAllValues()) { buf = releaseLater(buf.retain()); composite.addComponent(true, buf); } return composite; } private static Http2Headers headers() { return new DefaultHttp2Headers(false).method(AsciiString.of("GET")).scheme(AsciiString.of("https")) .authority(AsciiString.of("example.org")).path(AsciiString.of("/some/path/resource2")) .add(randomString(), randomString()); } private static Http2Headers largeHeaders() { DefaultHttp2Headers headers = new DefaultHttp2Headers(false); for (int i = 0; i < 100; ++i) { String key = "this-is-a-test-header-key-" + i; String value = "this-is-a-test-header-value-" + i; headers.add(AsciiString.of(key), AsciiString.of(value)); } return headers; } private Http2Headers headersOfSize(final int minSize) { final AsciiString singleByte = new AsciiString(new byte[]{0}, false); DefaultHttp2Headers headers = new DefaultHttp2Headers(false); for (int size = 0; size < minSize; size += 2) { headers.add(singleByte, singleByte); } return headers; } private static Http2Headers binaryHeaders() { DefaultHttp2Headers headers = new DefaultHttp2Headers(false); for (int ix = 0; ix < 10; ++ix) { headers.add(randomString(), randomString()); } return headers; } }
package graphql.schema.idl; import graphql.GraphQLError; import graphql.Internal; import graphql.language.AstPrinter; import graphql.language.FieldDefinition; import graphql.language.ImplementingTypeDefinition; import graphql.language.InputValueDefinition; import graphql.language.InterfaceTypeDefinition; import graphql.language.InterfaceTypeExtensionDefinition; import graphql.language.NonNullType; import graphql.language.ObjectTypeDefinition; import graphql.language.ObjectTypeExtensionDefinition; import graphql.language.Type; import graphql.language.TypeName; import graphql.schema.idl.errors.InterfaceFieldArgumentNotOptionalError; import graphql.schema.idl.errors.InterfaceFieldArgumentRedefinitionError; import graphql.schema.idl.errors.InterfaceFieldRedefinitionError; import graphql.schema.idl.errors.InterfaceImplementedMoreThanOnceError; import graphql.schema.idl.errors.InterfaceImplementingItselfError; import graphql.schema.idl.errors.InterfaceWithCircularImplementationHierarchyError; import graphql.schema.idl.errors.MissingInterfaceFieldArgumentsError; import graphql.schema.idl.errors.MissingInterfaceFieldError; import graphql.schema.idl.errors.MissingTransitiveInterfaceError; import graphql.util.FpKit; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.function.BinaryOperator; import java.util.function.Function; import java.util.stream.Stream; import static java.util.Collections.emptyList; import static java.util.stream.Collectors.toMap; import static java.util.stream.Collectors.toSet; /** * A support class to help break up the large SchemaTypeChecker class. This handles * the checking of {@link graphql.language.ImplementingTypeDefinition}s. */ @Internal class ImplementingTypesChecker { private static final Map<Class<? extends ImplementingTypeDefinition>, String> TYPE_OF_MAP = new HashMap<>(); static { TYPE_OF_MAP.put(ObjectTypeDefinition.class, "object"); TYPE_OF_MAP.put(ObjectTypeExtensionDefinition.class, "object extension"); TYPE_OF_MAP.put(InterfaceTypeDefinition.class, "interface"); TYPE_OF_MAP.put(InterfaceTypeExtensionDefinition.class, "interface extension"); } /* * "Implementing types" (i.e.: types that might implement interfaces) have the potential to be invalid if incorrectly defined. * * The same interface might not be implemented more than once by a type and its extensions * The implementing type must implement all the transitive interfaces * An interface implementation must not result in a circular reference (i.e.: an interface implementing itself) * All fields declared by an interface have to be correctly declared by its implementing type, including the proper field arguments */ void checkImplementingTypes(List<GraphQLError> errors, TypeDefinitionRegistry typeRegistry) { List<InterfaceTypeDefinition> interfaces = typeRegistry.getTypes(InterfaceTypeDefinition.class); List<ObjectTypeDefinition> objects = typeRegistry.getTypes(ObjectTypeDefinition.class); Stream.<ImplementingTypeDefinition<?>>concat(interfaces.stream(), objects.stream()) .forEach(type -> checkImplementingType(errors, typeRegistry, type)); } private void checkImplementingType( List<GraphQLError> errors, TypeDefinitionRegistry typeRegistry, ImplementingTypeDefinition type) { Map<InterfaceTypeDefinition, ImplementingTypeDefinition> implementedInterfaces = checkInterfacesNotImplementedMoreThanOnce(errors, type, typeRegistry); checkInterfaceIsImplemented(errors, typeRegistry, type, implementedInterfaces); checkAncestorImplementation(errors, typeRegistry, type, implementedInterfaces); } private Map<InterfaceTypeDefinition, ImplementingTypeDefinition> checkInterfacesNotImplementedMoreThanOnce( List<GraphQLError> errors, ImplementingTypeDefinition type, TypeDefinitionRegistry typeRegistry ) { Map<InterfaceTypeDefinition, List<ImplementingTypeDefinition>> implementedInterfaces = getLogicallyImplementedInterfaces(type, typeRegistry); Map<InterfaceTypeDefinition, ImplementingTypeDefinition> interfacesImplementedOnce = implementedInterfaces.entrySet() .stream() .filter(entry -> entry.getValue().size() == 1) .collect(toMap( Map.Entry::getKey, entry -> entry.getValue().get(0) )); implementedInterfaces.entrySet().stream() .filter(entry -> !interfacesImplementedOnce.containsKey(entry.getKey())) .forEach(entry -> { entry.getValue().forEach(offendingType -> { errors.add(new InterfaceImplementedMoreThanOnceError(TYPE_OF_MAP.get(offendingType.getClass()), offendingType, entry.getKey())); }); }); return interfacesImplementedOnce; } private void checkAncestorImplementation( List<GraphQLError> errors, TypeDefinitionRegistry typeRegistry, ImplementingTypeDefinition type, Map<InterfaceTypeDefinition, ImplementingTypeDefinition> implementedInterfaces) { if (implementedInterfaces.containsKey(type)) { errors.add(new InterfaceImplementingItselfError(TYPE_OF_MAP.get(type.getClass()), type)); return; } implementedInterfaces.forEach((implementedInterface, implementingType) -> { Set<InterfaceTypeDefinition> transitiveInterfaces = getLogicallyImplementedInterfaces(implementedInterface, typeRegistry).keySet(); transitiveInterfaces.forEach(transitiveInterface -> { if (transitiveInterface.equals(type)) { errors.add(new InterfaceWithCircularImplementationHierarchyError(TYPE_OF_MAP.get(type.getClass()), type, implementedInterface)); } else if (!implementedInterfaces.containsKey(transitiveInterface)) { errors.add(new MissingTransitiveInterfaceError(TYPE_OF_MAP.get(implementingType.getClass()), implementingType, implementedInterface, transitiveInterface)); } }); }); } private void checkInterfaceIsImplemented( List<GraphQLError> errors, TypeDefinitionRegistry typeRegistry, ImplementingTypeDefinition type, Map<InterfaceTypeDefinition, ImplementingTypeDefinition> implementedInterfaces ) { Set<FieldDefinition> fieldDefinitions = getLogicallyDeclaredFields(type, typeRegistry); Map<String, FieldDefinition> typeFields = fieldDefinitions.stream() .collect(toMap(FieldDefinition::getName, Function.identity(), mergeFirstValue())); implementedInterfaces.forEach((implementedInterface, implementingType) -> { implementedInterface.getFieldDefinitions().forEach(interfaceFieldDef -> { FieldDefinition typeFieldDef = typeFields.get(interfaceFieldDef.getName()); if (typeFieldDef == null) { errors.add(new MissingInterfaceFieldError(TYPE_OF_MAP.get(implementingType.getClass()), implementingType, implementedInterface, interfaceFieldDef)); } else { if (!typeRegistry.isSubTypeOf(typeFieldDef.getType(), interfaceFieldDef.getType())) { String interfaceFieldType = AstPrinter.printAst(interfaceFieldDef.getType()); String objectFieldType = AstPrinter.printAst(typeFieldDef.getType()); errors.add(new InterfaceFieldRedefinitionError(TYPE_OF_MAP.get(implementingType.getClass()), implementingType, implementedInterface, typeFieldDef, objectFieldType, interfaceFieldType)); } // look at arguments List<InputValueDefinition> objectArgs = typeFieldDef.getInputValueDefinitions(); List<InputValueDefinition> interfaceArgs = interfaceFieldDef.getInputValueDefinitions(); if (objectArgs.size() < interfaceArgs.size()) { errors.add(new MissingInterfaceFieldArgumentsError(TYPE_OF_MAP.get(implementingType.getClass()), implementingType, implementedInterface, typeFieldDef)); } else { checkArgumentConsistency(TYPE_OF_MAP.get(implementingType.getClass()), implementingType, implementedInterface, typeFieldDef, interfaceFieldDef, errors); } } }); }); } private void checkArgumentConsistency( String typeOfType, ImplementingTypeDefinition objectTypeDef, InterfaceTypeDefinition interfaceTypeDef, FieldDefinition objectFieldDef, FieldDefinition interfaceFieldDef, List<GraphQLError> errors ) { Map<String, InputValueDefinition> objectArgs = FpKit.getByName(objectFieldDef.getInputValueDefinitions(), InputValueDefinition::getName); Map<String, InputValueDefinition> interfaceArgs = FpKit.getByName(interfaceFieldDef.getInputValueDefinitions(), InputValueDefinition::getName); for (Map.Entry<String, InputValueDefinition> interfaceEntries : interfaceArgs.entrySet()) { InputValueDefinition interfaceArg = interfaceEntries.getValue(); InputValueDefinition objectArg = objectArgs.get(interfaceEntries.getKey()); if (objectArg == null) { errors.add(new MissingInterfaceFieldArgumentsError(typeOfType, objectTypeDef, interfaceTypeDef, objectFieldDef)); } else { String interfaceArgStr = AstPrinter.printAstCompact(interfaceArg); String objectArgStr = AstPrinter.printAstCompact(objectArg); if (!interfaceArgStr.equals(objectArgStr)) { errors.add(new InterfaceFieldArgumentRedefinitionError(typeOfType, objectTypeDef, interfaceTypeDef, objectFieldDef, objectArgStr, interfaceArgStr)); } } } if (objectArgs.size() > interfaceArgs.size()) { for (Map.Entry<String, InputValueDefinition> objetEntries : objectArgs.entrySet()) { InputValueDefinition objectArg = objetEntries.getValue(); InputValueDefinition interfaceArg = interfaceArgs.get(objetEntries.getKey()); if (interfaceArg == null) { // there is no interface counterpart previously checked above if (objectArg.getType() instanceof NonNullType) { String objectArgStr = AstPrinter.printAst(objectArg); errors.add(new InterfaceFieldArgumentNotOptionalError(typeOfType, objectTypeDef, interfaceTypeDef, objectFieldDef, objectArgStr)); } } } } } private Map<InterfaceTypeDefinition, List<ImplementingTypeDefinition>> getLogicallyImplementedInterfaces( ImplementingTypeDefinition type, TypeDefinitionRegistry typeRegistry ) { Stream<ImplementingTypeDefinition> extensions = Stream.concat( typeRegistry.interfaceTypeExtensions().getOrDefault(type.getName(), emptyList()).stream(), typeRegistry.objectTypeExtensions().getOrDefault(type.getName(), emptyList()).stream() ); return Stream.concat(Stream.of(type), extensions) .collect(HashMap::new, (map, implementingType) -> { List<Type> implementedInterfaces = implementingType.getImplements(); toInterfaceTypeDefinitions(typeRegistry, implementedInterfaces).forEach(implemented -> { List<ImplementingTypeDefinition> implementingTypes = map.getOrDefault(implemented, new ArrayList<>()); implementingTypes.add(implementingType); map.put(implemented, implementingTypes); }); }, HashMap::putAll); } private Set<FieldDefinition> getLogicallyDeclaredFields( ImplementingTypeDefinition type, TypeDefinitionRegistry typeRegistry ) { Stream<ImplementingTypeDefinition> extensions = Stream.concat( typeRegistry.interfaceTypeExtensions().getOrDefault(type.getName(), emptyList()).stream(), typeRegistry.objectTypeExtensions().getOrDefault(type.getName(), emptyList()).stream() ); return Stream.concat(Stream.of(type), extensions) .flatMap(implementingType -> { List<FieldDefinition> fieldDefinitions = implementingType.getFieldDefinitions(); return fieldDefinitions.stream(); }) .collect(toSet()); } private <T> BinaryOperator<T> mergeFirstValue() { return (v1, v2) -> v1; } private Optional<InterfaceTypeDefinition> toInterfaceTypeDefinition(Type type, TypeDefinitionRegistry typeRegistry) { TypeInfo typeInfo = TypeInfo.typeInfo(type); TypeName unwrapped = typeInfo.getTypeName(); return typeRegistry.getType(unwrapped, InterfaceTypeDefinition.class); } private Set<InterfaceTypeDefinition> toInterfaceTypeDefinitions(TypeDefinitionRegistry typeRegistry, Collection<Type> implementsTypes) { return implementsTypes.stream() .map(t -> toInterfaceTypeDefinition(t, typeRegistry)) .filter(Optional::isPresent) .map(Optional::get) .collect(toSet()); } }
package com.twistedplane.sealnote.data; import android.os.Parcel; import android.os.Parcelable; import android.util.Log; import com.twistedplane.sealnote.SealnoteApplication; import com.twistedplane.sealnote.utils.EasyDate; import java.text.ParseException; import java.util.HashSet; import java.util.Set; /** * Note contains all the data and helper functions related to a particular * note. Acts as a map between an entry in storage database and Java. */ public class Note implements Parcelable{ public static final String TAG = "Note"; public static enum Folder { FOLDER_NONE, /* No folder selected */ FOLDER_LIVE, /* Notes that are alive that are note deleted and archived */ FOLDER_ARCHIVE, /* Archived and undeleted notes */ FOLDER_TRASH, /* Deleted notes */ FOLDER_TAG, /* A tag is currently selected */ } public static enum FolderAction { NOTE_ARCHIVE, /* Move note to Archive folder */ NOTE_UNARCHIVE, /* Move note from Archive to its previous folder */ NOTE_DELETE, /* Move note to Trash, or delete permanently if already in Trash */ NOTE_RESTORE /* Move note from Trash to its original folder */ } public static enum Type { TYPE_GENERIC, TYPE_LOGIN, TYPE_CARD, } private int mId; /* Unique note id */ private int mPosition; /* Position of note */ private String mNoteTitle; /* Note title */ private NoteContent mNote; /* Note content */ private EasyDate mEditedDate; /* Last write date */ private int mColor; /* Note color code, 0-7 */ private boolean mArchived; /* Is note archived */ private boolean mDeleted; /* Is note in Trash folder */ private Type mType; /* Type of note eg. Credit Card, Password, Text */ private Set<String> mTags; /* Tags attached to this note */ public Note() { this.mId = -1; this.mColor = 0; this.mArchived = false; this.mDeleted = false; this.mType = Type.TYPE_GENERIC; } public Note(int id, int position, String title, String content, Type type) { this.mId = id; this.mPosition = position; this.mNote = NoteContent.fromString(type, content); this.mNoteTitle = title; this.mColor = -1; this.mArchived = false; this.mDeleted = false; this.mType = Type.TYPE_GENERIC; } /** * Constructor to recreate object for Parcel */ public Note(Parcel inParcel) { readFromParcel(inParcel); } /** * Helper method called by constructor to read from parcel */ private void readFromParcel(Parcel inParcel) { mId = inParcel.readInt(); mPosition = inParcel.readInt(); mNoteTitle = inParcel.readString(); mType = Type.valueOf(inParcel.readString()); mNote = NoteContent.fromString(mType, inParcel.readString()); try { mEditedDate = EasyDate.fromIsoString(inParcel.readString()); } catch (ParseException e) { Log.e(TAG, "Error parsing date retrieved from database!"); } mColor = inParcel.readInt(); mArchived = inParcel.readInt() > 0; mDeleted = inParcel.readInt() > 0; mTags = convertToTagSet(inParcel.readString()); } @Override public void writeToParcel(Parcel outParcel, int flags) { outParcel.writeInt(mId); outParcel.writeInt(mPosition); outParcel.writeString(mNoteTitle); outParcel.writeString(mType.name()); outParcel.writeString(mNote.toString()); outParcel.writeString(mEditedDate.toString()); outParcel.writeInt(mColor); outParcel.writeInt(mArchived ?1 :0); outParcel.writeInt(mDeleted ?1 :0); outParcel.writeString(convertTagSetToString(mTags)); } /** * Converts given space separated tag string to a Set collection */ public static Set<String> convertToTagSet(String tagString) { String tags[] = tagString.split(" "); HashSet<String> tagSet = new HashSet<String>(); for (String tag : tags) { String trimmed = tag.trim(); if (trimmed.equals("")) { continue; } tagSet.add(trimmed); } return tagSet; } /** * Convert given tag set to space separated tag string */ public static String convertTagSetToString(Set<String> tagSet) { StringBuilder builder = new StringBuilder(); for (String tag : tagSet) { builder.append(tag); builder.append(" "); } return builder.toString(); } public int getId() { return this.mId; } public void setId(int id) { this.mId = id; } public int getPosition() { return this.mPosition; } public void setPosition(int position) { this.mPosition = position; } public String getTitle() { return this.mNoteTitle; } public void setTitle(String title) { this.mNoteTitle = title; } public NoteContent getNote() { return this.mNote; } public void setNote(NoteContent content) { this.mNote = content; } public EasyDate getEditedDate() { return this.mEditedDate; } public void setEditedDate(EasyDate date) { this.mEditedDate = date; } public int getColor() { return this.mColor; } public void setColor(int color) { this.mColor = color; } public void setIsArchived(boolean archived) { this.mArchived = archived; } public boolean getIsArchived() { return this.mArchived; } public void setIsDeleted(boolean deleted) { this.mDeleted = deleted; } public boolean getIsDeleted() { return this.mDeleted; } public boolean getIsLive() { return !(this.mDeleted || mArchived); } public Type getType() { return mType; } public void setType(Type type) { mType = type; } /** * NOTE: This may return null, if loadGetTags() has not been * called earlier, as tags are not loaded from database with note. * They have to be explicitly loaded */ public Set<String> getTags() { return mTags; } public void setTags(Set<String> tagSet) { mTags = tagSet; } /** * Note by default doesn't load tags. Call this method to load * tags from database and return them. Use getTags() later */ public Set<String> loadGetTags() { final DatabaseHandler handler = SealnoteApplication.getDatabase(); mTags = handler.getNoteTags(mId); return mTags; } @Override public int describeContents() { return 0; } public static final Parcelable.Creator CREATOR = new Parcelable.Creator() { public Note createFromParcel(Parcel in) { return new Note(in); } public Note[] newArray(int size) { return new Note[size]; } }; }
/* * Copyright 2015 NAVER Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.navercorp.pinpoint.plugin.thrift.interceptor.server.async; import static com.navercorp.pinpoint.plugin.thrift.ThriftScope.THRIFT_SERVER_SCOPE; import com.navercorp.pinpoint.bootstrap.interceptor.scope.InterceptorScope; import org.apache.thrift.TBaseAsyncProcessor; import org.apache.thrift.protocol.TProtocol; import org.apache.thrift.server.AbstractNonblockingServer.AsyncFrameBuffer; import com.navercorp.pinpoint.bootstrap.context.MethodDescriptor; import com.navercorp.pinpoint.bootstrap.context.SpanEventRecorder; import com.navercorp.pinpoint.bootstrap.context.SpanRecorder; import com.navercorp.pinpoint.bootstrap.context.Trace; import com.navercorp.pinpoint.bootstrap.context.TraceContext; import com.navercorp.pinpoint.bootstrap.interceptor.AroundInterceptor; import com.navercorp.pinpoint.bootstrap.interceptor.annotation.Scope; import com.navercorp.pinpoint.bootstrap.interceptor.annotation.Name; import com.navercorp.pinpoint.bootstrap.interceptor.scope.ExecutionPolicy; import com.navercorp.pinpoint.bootstrap.interceptor.scope.InterceptorScopeInvocation; import com.navercorp.pinpoint.bootstrap.logging.PLogger; import com.navercorp.pinpoint.bootstrap.logging.PLoggerFactory; import com.navercorp.pinpoint.plugin.thrift.ThriftClientCallContext; import com.navercorp.pinpoint.plugin.thrift.ThriftConstants; import com.navercorp.pinpoint.plugin.thrift.ThriftUtils; import com.navercorp.pinpoint.plugin.thrift.field.accessor.AsyncMarkerFlagFieldAccessor; import com.navercorp.pinpoint.plugin.thrift.field.accessor.ServerMarkerFlagFieldAccessor; /** * Entry/exit point for tracing asynchronous processors for Thrift services. * <p> * Because trace objects cannot be created until the message is read, this interceptor works in tandem with other interceptors in the tracing pipeline. The * actual processing of input messages is not off-loaded to <tt>AsyncProcessFunction</tt> (unlike synchronous processors where <tt>ProcessFunction</tt> does * most of the work). * <ol> * <li> * <p> * {@link com.navercorp.pinpoint.plugin.thrift.interceptor.tprotocol.server.TProtocolReadMessageBeginInterceptor TProtocolReadMessageBeginInterceptor} retrieves * the method name called by the client.</li> * </p> * * <li> * <p> * {@link com.navercorp.pinpoint.plugin.thrift.interceptor.tprotocol.server.TProtocolReadFieldBeginInterceptor TProtocolReadFieldBeginInterceptor}, * {@link com.navercorp.pinpoint.plugin.thrift.interceptor.tprotocol.server.TProtocolReadTTypeInterceptor TProtocolReadTTypeInterceptor} reads the header fields * and injects the parent trace object (if any).</li></p> * * <li> * <p> * {@link com.navercorp.pinpoint.plugin.thrift.interceptor.tprotocol.server.TProtocolReadMessageEndInterceptor TProtocolReadMessageEndInterceptor} creates the * actual root trace object.</li></p> </ol> * <p> * <b><tt>TBaseAsyncProcessorProcessInterceptor</tt></b> -> <tt>TProtocolReadMessageBeginInterceptor</tt> -> <tt>TProtocolReadFieldBeginInterceptor</tt> <-> * <tt>TProtocolReadTTypeInterceptor</tt> -> <tt>TProtocolReadMessageEndInterceptor</tt> * <p> * Based on Thrift 0.9.1+ * * @author HyunGil Jeong * * @see com.navercorp.pinpoint.plugin.thrift.interceptor.tprotocol.server.TProtocolReadMessageBeginInterceptor TProtocolReadMessageBeginInterceptor * @see com.navercorp.pinpoint.plugin.thrift.interceptor.tprotocol.server.TProtocolReadFieldBeginInterceptor TProtocolReadFieldBeginInterceptor * @see com.navercorp.pinpoint.plugin.thrift.interceptor.tprotocol.server.TProtocolReadTTypeInterceptor TProtocolReadTTypeInterceptor * @see com.navercorp.pinpoint.plugin.thrift.interceptor.tprotocol.server.TProtocolReadMessageEndInterceptor TProtocolReadMessageEndInterceptor */ @Scope(value = THRIFT_SERVER_SCOPE, executionPolicy = ExecutionPolicy.BOUNDARY) public class TBaseAsyncProcessorProcessInterceptor implements AroundInterceptor { private final PLogger logger = PLoggerFactory.getLogger(this.getClass()); private final boolean isDebug = logger.isDebugEnabled(); private final TraceContext traceContext; private final MethodDescriptor descriptor; private final InterceptorScope scope; public TBaseAsyncProcessorProcessInterceptor(TraceContext traceContext, MethodDescriptor descriptor, @Name(THRIFT_SERVER_SCOPE) InterceptorScope scope) { this.traceContext = traceContext; this.descriptor = descriptor; this.scope = scope; } @Override public void before(Object target, Object[] args) { if (isDebug) { logger.beforeInterceptor(target, args); } // process(final AsyncFrameBuffer fb) if (args.length != 1) { return; } // Set server markers if (args[0] instanceof AsyncFrameBuffer) { AsyncFrameBuffer frameBuffer = (AsyncFrameBuffer)args[0]; attachMarkersToInputProtocol(frameBuffer.getInputProtocol(), true); } } @Override public void after(Object target, Object[] args, Object result, Throwable throwable) { if (isDebug) { logger.afterInterceptor(target, args, result, throwable); } // Unset server markers if (args[0] instanceof AsyncFrameBuffer) { AsyncFrameBuffer frameBuffer = (AsyncFrameBuffer)args[0]; attachMarkersToInputProtocol(frameBuffer.getInputProtocol(), false); } final Trace trace = this.traceContext.currentRawTraceObject(); if (trace == null) { return; } this.traceContext.removeTraceObject(); if (trace.canSampled()) { try { processTraceObject(trace, target, args, throwable); } catch (Throwable t) { logger.warn("Error processing trace object. Cause:{}", t.getMessage(), t); } finally { trace.close(); } } } private boolean validateInputProtocol(Object iprot) { if (iprot instanceof TProtocol) { if (!(iprot instanceof ServerMarkerFlagFieldAccessor)) { if (isDebug) { logger.debug("Invalid target object. Need field accessor({}).", ServerMarkerFlagFieldAccessor.class.getName()); } return false; } if (!(iprot instanceof AsyncMarkerFlagFieldAccessor)) { if (isDebug) { logger.debug("Invalid target object. Need field accessor({}).", AsyncMarkerFlagFieldAccessor.class.getName()); } return false; } return true; } return false; } private void attachMarkersToInputProtocol(TProtocol iprot, boolean flag) { if (validateInputProtocol(iprot)) { ((ServerMarkerFlagFieldAccessor)iprot)._$PINPOINT$_setServerMarkerFlag(flag); ((AsyncMarkerFlagFieldAccessor)iprot)._$PINPOINT$_setAsyncMarkerFlag(flag); } } private void processTraceObject(final Trace trace, Object target, Object[] args, Throwable throwable) { // end spanEvent try { // TODO Might need a way to collect and record method arguments // trace.recordAttribute(...); SpanEventRecorder recorder = trace.currentSpanEventRecorder(); recorder.recordException(throwable); recorder.recordApi(this.descriptor); } catch (Throwable t) { logger.warn("Error processing trace object. Cause:{}", t.getMessage(), t); } finally { trace.traceBlockEnd(); } // end root span SpanRecorder recorder = trace.getSpanRecorder(); String methodUri = getMethodUri(target); recorder.recordRpcName(methodUri); } private String getMethodUri(Object target) { String methodUri = ThriftConstants.UNKNOWN_METHOD_URI; InterceptorScopeInvocation currentTransaction = this.scope.getCurrentInvocation(); Object attachment = currentTransaction.getAttachment(); if (attachment instanceof ThriftClientCallContext && target instanceof TBaseAsyncProcessor) { ThriftClientCallContext clientCallContext = (ThriftClientCallContext)attachment; String methodName = clientCallContext.getMethodName(); methodUri = ThriftUtils.getAsyncProcessorNameAsUri((TBaseAsyncProcessor<?>)target); StringBuilder sb = new StringBuilder(methodUri); if (!methodUri.endsWith("/")) { sb.append("/"); } sb.append(methodName); methodUri = sb.toString(); } return methodUri; } }
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.debugger.ui.tree.render; import com.intellij.debugger.DebuggerBundle; import com.intellij.debugger.DebuggerContext; import com.intellij.debugger.engine.DebugProcessImpl; import com.intellij.debugger.engine.DebuggerUtils; import com.intellij.debugger.engine.evaluation.EvaluateException; import com.intellij.debugger.engine.evaluation.EvaluationContext; import com.intellij.debugger.impl.DebuggerUtilsEx; import com.intellij.debugger.ui.tree.DebuggerTreeNode; import com.intellij.debugger.ui.tree.NodeDescriptor; import com.intellij.debugger.ui.tree.ValueDescriptor; import com.intellij.openapi.util.JDOMExternalizerUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.CommonClassNames; import com.intellij.psi.PsiElement; import com.intellij.ui.classFilter.ClassFilter; import com.intellij.xdebugger.impl.ui.XDebuggerUIConstants; import com.sun.jdi.*; import org.jdom.Element; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import static com.intellij.psi.CommonClassNames.JAVA_LANG_STRING; public class ToStringRenderer extends NodeRendererImpl implements OnDemandRenderer { public static final @NonNls String UNIQUE_ID = "ToStringRenderer"; private boolean USE_CLASS_FILTERS = false; private boolean ON_DEMAND; private ClassFilter[] myClassFilters = ClassFilter.EMPTY_ARRAY; public ToStringRenderer() { super("unnamed", true); } @Override public String getUniqueId() { return UNIQUE_ID; } @Override public String getName() { return "toString"; } @Override public void setName(String name) { // prohibit change } @Override public ToStringRenderer clone() { final ToStringRenderer cloned = (ToStringRenderer)super.clone(); final ClassFilter[] classFilters = (myClassFilters.length > 0)? new ClassFilter[myClassFilters.length] : ClassFilter.EMPTY_ARRAY; for (int idx = 0; idx < classFilters.length; idx++) { classFilters[idx] = myClassFilters[idx].clone(); } cloned.myClassFilters = classFilters; return cloned; } @Override public String calcLabel(final ValueDescriptor valueDescriptor, EvaluationContext evaluationContext, final DescriptorLabelListener labelListener) throws EvaluateException { if (!isShowValue(valueDescriptor, evaluationContext)) { return ""; } final Value value = valueDescriptor.getValue(); BatchEvaluator.getBatchEvaluator(evaluationContext.getDebugProcess()).invoke(new ToStringCommand(evaluationContext, value) { @Override public void evaluationResult(String message) { valueDescriptor.setValueLabel( StringUtil.notNullize(message) ); labelListener.labelChanged(); } @Override public void evaluationError(String message) { final String msg = value != null? message + " " + DebuggerBundle.message("evaluation.error.cannot.evaluate.tostring", value.type().name()) : message; valueDescriptor.setValueLabelFailed(new EvaluateException(msg, null)); labelListener.labelChanged(); } }); return XDebuggerUIConstants.COLLECTING_DATA_MESSAGE; } @NotNull @Override public String getLinkText() { return DebuggerBundle.message("message.node.toString"); } public boolean isUseClassFilters() { return USE_CLASS_FILTERS; } public void setUseClassFilters(boolean value) { USE_CLASS_FILTERS = value; } @Override public boolean isOnDemand(EvaluationContext evaluationContext, ValueDescriptor valueDescriptor) { if (ON_DEMAND || (USE_CLASS_FILTERS && !isFiltered(valueDescriptor.getType()))) { return true; } return OnDemandRenderer.super.isOnDemand(evaluationContext, valueDescriptor); } @Override public boolean isApplicable(Type type) { if (!(type instanceof ReferenceType)) { return false; } if (JAVA_LANG_STRING.equals(type.name())) { return false; // do not render 'String' objects for performance reasons } return overridesToString(type); } @SuppressWarnings({"HardCodedStringLiteral"}) private static boolean overridesToString(Type type) { if (type instanceof ClassType) { Method toStringMethod = ((ClassType)type).concreteMethodByName("toString", "()Ljava/lang/String;"); return toStringMethod != null && !CommonClassNames.JAVA_LANG_OBJECT.equals(toStringMethod.declaringType().name()); } return false; } @Override public void buildChildren(Value value, ChildrenBuilder builder, EvaluationContext evaluationContext) { DebugProcessImpl.getDefaultRenderer(value).buildChildren(value, builder, evaluationContext); } @Override public PsiElement getChildValueExpression(DebuggerTreeNode node, DebuggerContext context) throws EvaluateException { return DebugProcessImpl.getDefaultRenderer(((ValueDescriptor)node.getParent().getDescriptor()).getType()) .getChildValueExpression(node, context); } @Override public boolean isExpandable(Value value, EvaluationContext evaluationContext, NodeDescriptor parentDescriptor) { return DebugProcessImpl.getDefaultRenderer(value).isExpandable(value, evaluationContext, parentDescriptor); } @Override @SuppressWarnings({"HardCodedStringLiteral"}) public void readExternal(Element element) { super.readExternal(element); ON_DEMAND = Boolean.parseBoolean(JDOMExternalizerUtil.readField(element, "ON_DEMAND")); USE_CLASS_FILTERS = Boolean.parseBoolean(JDOMExternalizerUtil.readField(element, "USE_CLASS_FILTERS")); myClassFilters = DebuggerUtilsEx.readFilters(element.getChildren("filter")); } @Override @SuppressWarnings({"HardCodedStringLiteral"}) public void writeExternal(Element element) { super.writeExternal(element); if (ON_DEMAND) { JDOMExternalizerUtil.writeField(element, "ON_DEMAND", "true"); } if (USE_CLASS_FILTERS) { JDOMExternalizerUtil.writeField(element, "USE_CLASS_FILTERS", "true"); } DebuggerUtilsEx.writeFilters(element, "filter", myClassFilters); } public ClassFilter[] getClassFilters() { return myClassFilters; } public void setClassFilters(ClassFilter[] classFilters) { myClassFilters = classFilters != null ? classFilters : ClassFilter.EMPTY_ARRAY; } private boolean isFiltered(Type t) { if (t instanceof ReferenceType) { for (ClassFilter classFilter : myClassFilters) { if (classFilter.isEnabled() && DebuggerUtils.instanceOf(t, classFilter.getPattern())) { return true; } } } return DebuggerUtilsEx.isFiltered(t.name(), myClassFilters); } public boolean isOnDemand() { return ON_DEMAND; } public void setOnDemand(boolean value) { ON_DEMAND = value; } @Override public boolean hasOverhead() { return true; } }
/* * Copyright 2016 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.keycloak.models.jpa; import org.keycloak.models.ClientModel; import org.keycloak.models.CredentialValidationOutput; import org.keycloak.models.FederatedIdentityModel; import org.keycloak.models.GroupModel; import org.keycloak.models.KeycloakSession; import org.keycloak.models.ProtocolMapperModel; import org.keycloak.models.RealmModel; import org.keycloak.models.RequiredActionProviderModel; import org.keycloak.models.RoleContainerModel; import org.keycloak.models.RoleModel; import org.keycloak.models.UserCredentialModel; import org.keycloak.models.UserFederationProviderModel; import org.keycloak.models.UserModel; import org.keycloak.models.UserProvider; import org.keycloak.models.jpa.entities.FederatedIdentityEntity; import org.keycloak.models.jpa.entities.UserAttributeEntity; import org.keycloak.models.jpa.entities.UserEntity; import org.keycloak.models.utils.CredentialValidation; import org.keycloak.models.utils.KeycloakModelUtils; import javax.persistence.EntityManager; import javax.persistence.TypedQuery; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; /** * @author <a href="mailto:bill@burkecentral.com">Bill Burke</a> * @version $Revision: 1 $ */ public class JpaUserProvider implements UserProvider { private static final String EMAIL = "email"; private static final String USERNAME = "username"; private static final String FIRST_NAME = "firstName"; private static final String LAST_NAME = "lastName"; private final KeycloakSession session; protected EntityManager em; public JpaUserProvider(KeycloakSession session, EntityManager em) { this.session = session; this.em = em; } @Override public UserModel addUser(RealmModel realm, String id, String username, boolean addDefaultRoles, boolean addDefaultRequiredActions) { if (id == null) { id = KeycloakModelUtils.generateId(); } UserEntity entity = new UserEntity(); entity.setId(id); entity.setCreatedTimestamp(System.currentTimeMillis()); entity.setUsername(username.toLowerCase()); entity.setRealmId(realm.getId()); em.persist(entity); em.flush(); UserAdapter userModel = new UserAdapter(session, realm, em, entity); if (addDefaultRoles) { for (String r : realm.getDefaultRoles()) { userModel.grantRoleImpl(realm.getRole(r)); // No need to check if user has role as it's new user } for (ClientModel application : realm.getClients()) { for (String r : application.getDefaultRoles()) { userModel.grantRoleImpl(application.getRole(r)); // No need to check if user has role as it's new user } } for (GroupModel g : realm.getDefaultGroups()) { userModel.joinGroupImpl(g); // No need to check if user has group as it's new user } } if (addDefaultRequiredActions){ for (RequiredActionProviderModel r : realm.getRequiredActionProviders()) { if (r.isEnabled() && r.isDefaultAction()) { userModel.addRequiredAction(r.getAlias()); } } } return userModel; } @Override public UserModel addUser(RealmModel realm, String username) { return addUser(realm, KeycloakModelUtils.generateId(), username.toLowerCase(), true, true); } @Override public boolean removeUser(RealmModel realm, UserModel user) { UserEntity userEntity = em.find(UserEntity.class, user.getId()); if (userEntity == null) return false; removeUser(userEntity); session.getKeycloakSessionFactory().publish(new UserModel.UserRemovedEvent() { @Override public UserModel getUser() { return user; } @Override public KeycloakSession getKeycloakSession() { return session; } }); return true; } private void removeUser(UserEntity user) { String id = user.getId(); em.createNamedQuery("deleteUserRoleMappingsByUser").setParameter("user", user).executeUpdate(); em.createNamedQuery("deleteUserGroupMembershipsByUser").setParameter("user", user).executeUpdate(); em.createNamedQuery("deleteFederatedIdentityByUser").setParameter("user", user).executeUpdate(); em.createNamedQuery("deleteUserConsentRolesByUser").setParameter("user", user).executeUpdate(); em.createNamedQuery("deleteUserConsentProtMappersByUser").setParameter("user", user).executeUpdate(); em.createNamedQuery("deleteUserConsentsByUser").setParameter("user", user).executeUpdate(); em.flush(); // not sure why i have to do a clear() here. I was getting some messed up errors that Hibernate couldn't // un-delete the UserEntity. em.clear(); user = em.find(UserEntity.class, id); if (user != null) { em.remove(user); } em.flush(); } @Override public void addFederatedIdentity(RealmModel realm, UserModel user, FederatedIdentityModel identity) { FederatedIdentityEntity entity = new FederatedIdentityEntity(); entity.setRealmId(realm.getId()); entity.setIdentityProvider(identity.getIdentityProvider()); entity.setUserId(identity.getUserId()); entity.setUserName(identity.getUserName().toLowerCase()); entity.setToken(identity.getToken()); UserEntity userEntity = em.getReference(UserEntity.class, user.getId()); entity.setUser(userEntity); em.persist(entity); em.flush(); } @Override public void updateFederatedIdentity(RealmModel realm, UserModel federatedUser, FederatedIdentityModel federatedIdentityModel) { FederatedIdentityEntity federatedIdentity = findFederatedIdentity(federatedUser, federatedIdentityModel.getIdentityProvider()); federatedIdentity.setToken(federatedIdentityModel.getToken()); em.persist(federatedIdentity); em.flush(); } @Override public boolean removeFederatedIdentity(RealmModel realm, UserModel user, String identityProvider) { FederatedIdentityEntity entity = findFederatedIdentity(user, identityProvider); if (entity != null) { em.remove(entity); em.flush(); return true; } else { return false; } } @Override public void grantToAllUsers(RealmModel realm, RoleModel role) { int num = em.createNamedQuery("grantRoleToAllUsers") .setParameter("realmId", realm.getId()) .setParameter("roleId", role.getId()) .executeUpdate(); } @Override public void preRemove(RealmModel realm) { int num = em.createNamedQuery("deleteUserConsentRolesByRealm") .setParameter("realmId", realm.getId()).executeUpdate(); num = em.createNamedQuery("deleteUserConsentProtMappersByRealm") .setParameter("realmId", realm.getId()).executeUpdate(); num = em.createNamedQuery("deleteUserConsentsByRealm") .setParameter("realmId", realm.getId()).executeUpdate(); num = em.createNamedQuery("deleteUserRoleMappingsByRealm") .setParameter("realmId", realm.getId()).executeUpdate(); num = em.createNamedQuery("deleteUserRequiredActionsByRealm") .setParameter("realmId", realm.getId()).executeUpdate(); num = em.createNamedQuery("deleteFederatedIdentityByRealm") .setParameter("realmId", realm.getId()).executeUpdate(); num = em.createNamedQuery("deleteCredentialsByRealm") .setParameter("realmId", realm.getId()).executeUpdate(); num = em.createNamedQuery("deleteUserAttributesByRealm") .setParameter("realmId", realm.getId()).executeUpdate(); num = em.createNamedQuery("deleteUserGroupMembershipByRealm") .setParameter("realmId", realm.getId()).executeUpdate(); num = em.createNamedQuery("deleteUsersByRealm") .setParameter("realmId", realm.getId()).executeUpdate(); } @Override public void preRemove(RealmModel realm, UserFederationProviderModel link) { int num = em.createNamedQuery("deleteUserRoleMappingsByRealmAndLink") .setParameter("realmId", realm.getId()) .setParameter("link", link.getId()) .executeUpdate(); num = em.createNamedQuery("deleteUserRequiredActionsByRealmAndLink") .setParameter("realmId", realm.getId()) .setParameter("link", link.getId()) .executeUpdate(); num = em.createNamedQuery("deleteFederatedIdentityByRealmAndLink") .setParameter("realmId", realm.getId()) .setParameter("link", link.getId()) .executeUpdate(); num = em.createNamedQuery("deleteCredentialsByRealmAndLink") .setParameter("realmId", realm.getId()) .setParameter("link", link.getId()) .executeUpdate(); num = em.createNamedQuery("deleteUserAttributesByRealmAndLink") .setParameter("realmId", realm.getId()) .setParameter("link", link.getId()) .executeUpdate(); num = em.createNamedQuery("deleteUsersByRealmAndLink") .setParameter("realmId", realm.getId()) .setParameter("link", link.getId()) .executeUpdate(); } @Override public void preRemove(RealmModel realm, RoleModel role) { em.createNamedQuery("deleteUserConsentRolesByRole").setParameter("roleId", role.getId()).executeUpdate(); em.createNamedQuery("deleteUserRoleMappingsByRole").setParameter("roleId", role.getId()).executeUpdate(); } @Override public void preRemove(RealmModel realm, ClientModel client) { em.createNamedQuery("deleteUserConsentProtMappersByClient").setParameter("clientId", client.getId()).executeUpdate(); em.createNamedQuery("deleteUserConsentRolesByClient").setParameter("clientId", client.getId()).executeUpdate(); em.createNamedQuery("deleteUserConsentsByClient").setParameter("clientId", client.getId()).executeUpdate(); } @Override public void preRemove(ProtocolMapperModel protocolMapper) { em.createNamedQuery("deleteUserConsentProtMappersByProtocolMapper") .setParameter("protocolMapperId", protocolMapper.getId()) .executeUpdate(); } @Override public List<UserModel> getGroupMembers(RealmModel realm, GroupModel group) { TypedQuery<UserEntity> query = em.createNamedQuery("groupMembership", UserEntity.class); query.setParameter("groupId", group.getId()); List<UserEntity> results = query.getResultList(); List<UserModel> users = new ArrayList<UserModel>(); for (UserEntity user : results) { users.add(new UserAdapter(session, realm, em, user)); } return users; } @Override public void preRemove(RealmModel realm, GroupModel group) { em.createNamedQuery("deleteUserGroupMembershipsByGroup").setParameter("groupId", group.getId()).executeUpdate(); } @Override public UserModel getUserById(String id, RealmModel realm) { TypedQuery<UserEntity> query = em.createNamedQuery("getRealmUserById", UserEntity.class); query.setParameter("id", id); query.setParameter("realmId", realm.getId()); List<UserEntity> entities = query.getResultList(); if (entities.size() == 0) return null; return new UserAdapter(session, realm, em, entities.get(0)); } @Override public UserModel getUserByUsername(String username, RealmModel realm) { TypedQuery<UserEntity> query = em.createNamedQuery("getRealmUserByUsername", UserEntity.class); query.setParameter("username", username.toLowerCase()); query.setParameter("realmId", realm.getId()); List<UserEntity> results = query.getResultList(); if (results.size() == 0) return null; return new UserAdapter(session, realm, em, results.get(0)); } @Override public UserModel getUserByEmail(String email, RealmModel realm) { TypedQuery<UserEntity> query = em.createNamedQuery("getRealmUserByEmail", UserEntity.class); query.setParameter("email", email.toLowerCase()); query.setParameter("realmId", realm.getId()); List<UserEntity> results = query.getResultList(); return results.isEmpty() ? null : new UserAdapter(session, realm, em, results.get(0)); } @Override public void close() { } @Override public UserModel getUserByFederatedIdentity(FederatedIdentityModel identity, RealmModel realm) { TypedQuery<UserEntity> query = em.createNamedQuery("findUserByFederatedIdentityAndRealm", UserEntity.class); query.setParameter("realmId", realm.getId()); query.setParameter("identityProvider", identity.getIdentityProvider()); query.setParameter("userId", identity.getUserId()); List<UserEntity> results = query.getResultList(); if (results.isEmpty()) { return null; } else if (results.size() > 1) { throw new IllegalStateException("More results found for identityProvider=" + identity.getIdentityProvider() + ", userId=" + identity.getUserId() + ", results=" + results); } else { UserEntity user = results.get(0); return new UserAdapter(session, realm, em, user); } } @Override public UserModel getUserByServiceAccountClient(ClientModel client) { TypedQuery<UserEntity> query = em.createNamedQuery("getRealmUserByServiceAccount", UserEntity.class); query.setParameter("realmId", client.getRealm().getId()); query.setParameter("clientInternalId", client.getId()); List<UserEntity> results = query.getResultList(); if (results.isEmpty()) { return null; } else if (results.size() > 1) { throw new IllegalStateException("More service account linked users found for client=" + client.getClientId() + ", results=" + results); } else { UserEntity user = results.get(0); return new UserAdapter(session, client.getRealm(), em, user); } } @Override public List<UserModel> getUsers(RealmModel realm, boolean includeServiceAccounts) { return getUsers(realm, -1, -1, includeServiceAccounts); } @Override public int getUsersCount(RealmModel realm) { Object count = em.createNamedQuery("getRealmUserCount") .setParameter("realmId", realm.getId()) .getSingleResult(); return ((Number)count).intValue(); } @Override public List<UserModel> getUsers(RealmModel realm, int firstResult, int maxResults, boolean includeServiceAccounts) { String queryName = includeServiceAccounts ? "getAllUsersByRealm" : "getAllUsersByRealmExcludeServiceAccount" ; TypedQuery<UserEntity> query = em.createNamedQuery(queryName, UserEntity.class); query.setParameter("realmId", realm.getId()); if (firstResult != -1) { query.setFirstResult(firstResult); } if (maxResults != -1) { query.setMaxResults(maxResults); } List<UserEntity> results = query.getResultList(); List<UserModel> users = new ArrayList<UserModel>(); for (UserEntity entity : results) users.add(new UserAdapter(session, realm, em, entity)); return users; } @Override public List<UserModel> getGroupMembers(RealmModel realm, GroupModel group, int firstResult, int maxResults) { TypedQuery<UserEntity> query = em.createNamedQuery("groupMembership", UserEntity.class); query.setParameter("groupId", group.getId()); if (firstResult != -1) { query.setFirstResult(firstResult); } if (maxResults != -1) { query.setMaxResults(maxResults); } List<UserEntity> results = query.getResultList(); List<UserModel> users = new ArrayList<UserModel>(); for (UserEntity user : results) { users.add(new UserAdapter(session, realm, em, user)); } return users; } @Override public List<UserModel> searchForUser(String search, RealmModel realm) { return searchForUser(search, realm, -1, -1); } @Override public List<UserModel> searchForUser(String search, RealmModel realm, int firstResult, int maxResults) { TypedQuery<UserEntity> query = em.createNamedQuery("searchForUser", UserEntity.class); query.setParameter("realmId", realm.getId()); query.setParameter("search", "%" + search.toLowerCase() + "%"); if (firstResult != -1) { query.setFirstResult(firstResult); } if (maxResults != -1) { query.setMaxResults(maxResults); } List<UserEntity> results = query.getResultList(); List<UserModel> users = new ArrayList<UserModel>(); for (UserEntity entity : results) users.add(new UserAdapter(session, realm, em, entity)); return users; } @Override public List<UserModel> searchForUserByAttributes(Map<String, String> attributes, RealmModel realm) { return searchForUserByAttributes(attributes, realm, -1, -1); } @Override public List<UserModel> searchForUserByAttributes(Map<String, String> attributes, RealmModel realm, int firstResult, int maxResults) { StringBuilder builder = new StringBuilder("select u from UserEntity u where u.realmId = :realmId"); for (Map.Entry<String, String> entry : attributes.entrySet()) { String attribute = null; String parameterName = null; if (entry.getKey().equals(UserModel.USERNAME)) { attribute = "lower(u.username)"; parameterName = JpaUserProvider.USERNAME; } else if (entry.getKey().equalsIgnoreCase(UserModel.FIRST_NAME)) { attribute = "lower(u.firstName)"; parameterName = JpaUserProvider.FIRST_NAME; } else if (entry.getKey().equalsIgnoreCase(UserModel.LAST_NAME)) { attribute = "lower(u.lastName)"; parameterName = JpaUserProvider.LAST_NAME; } else if (entry.getKey().equalsIgnoreCase(UserModel.EMAIL)) { attribute = "lower(u.email)"; parameterName = JpaUserProvider.EMAIL; } if (attribute == null) continue; builder.append(" and "); builder.append(attribute).append(" like :").append(parameterName); } builder.append(" order by u.username"); String q = builder.toString(); TypedQuery<UserEntity> query = em.createQuery(q, UserEntity.class); query.setParameter("realmId", realm.getId()); for (Map.Entry<String, String> entry : attributes.entrySet()) { String parameterName = null; if (entry.getKey().equals(UserModel.USERNAME)) { parameterName = JpaUserProvider.USERNAME; } else if (entry.getKey().equalsIgnoreCase(UserModel.FIRST_NAME)) { parameterName = JpaUserProvider.FIRST_NAME; } else if (entry.getKey().equalsIgnoreCase(UserModel.LAST_NAME)) { parameterName = JpaUserProvider.LAST_NAME; } else if (entry.getKey().equalsIgnoreCase(UserModel.EMAIL)) { parameterName = JpaUserProvider.EMAIL; } if (parameterName == null) continue; query.setParameter(parameterName, "%" + entry.getValue().toLowerCase() + "%"); } if (firstResult != -1) { query.setFirstResult(firstResult); } if (maxResults != -1) { query.setMaxResults(maxResults); } List<UserEntity> results = query.getResultList(); List<UserModel> users = new ArrayList<UserModel>(); for (UserEntity entity : results) users.add(new UserAdapter(session, realm, em, entity)); return users; } @Override public List<UserModel> searchForUserByUserAttribute(String attrName, String attrValue, RealmModel realm) { TypedQuery<UserAttributeEntity> query = em.createNamedQuery("getAttributesByNameAndValue", UserAttributeEntity.class); query.setParameter("name", attrName); query.setParameter("value", attrValue); List<UserAttributeEntity> results = query.getResultList(); List<UserModel> users = new ArrayList<UserModel>(); for (UserAttributeEntity attr : results) { UserEntity user = attr.getUser(); users.add(new UserAdapter(session, realm, em, user)); } return users; } private FederatedIdentityEntity findFederatedIdentity(UserModel user, String identityProvider) { TypedQuery<FederatedIdentityEntity> query = em.createNamedQuery("findFederatedIdentityByUserAndProvider", FederatedIdentityEntity.class); UserEntity userEntity = em.getReference(UserEntity.class, user.getId()); query.setParameter("user", userEntity); query.setParameter("identityProvider", identityProvider); List<FederatedIdentityEntity> results = query.getResultList(); return results.size() > 0 ? results.get(0) : null; } @Override public Set<FederatedIdentityModel> getFederatedIdentities(UserModel user, RealmModel realm) { TypedQuery<FederatedIdentityEntity> query = em.createNamedQuery("findFederatedIdentityByUser", FederatedIdentityEntity.class); UserEntity userEntity = em.getReference(UserEntity.class, user.getId()); query.setParameter("user", userEntity); List<FederatedIdentityEntity> results = query.getResultList(); Set<FederatedIdentityModel> set = new HashSet<FederatedIdentityModel>(); for (FederatedIdentityEntity entity : results) { set.add(new FederatedIdentityModel(entity.getIdentityProvider(), entity.getUserId(), entity.getUserName(), entity.getToken())); } return set; } @Override public FederatedIdentityModel getFederatedIdentity(UserModel user, String identityProvider, RealmModel realm) { FederatedIdentityEntity entity = findFederatedIdentity(user, identityProvider); return (entity != null) ? new FederatedIdentityModel(entity.getIdentityProvider(), entity.getUserId(), entity.getUserName(), entity.getToken()) : null; } @Override public boolean validCredentials(KeycloakSession session, RealmModel realm, UserModel user, List<UserCredentialModel> input) { return CredentialValidation.validCredentials(session, realm, user, input); } @Override public boolean validCredentials(KeycloakSession session, RealmModel realm, UserModel user, UserCredentialModel... input) { return CredentialValidation.validCredentials(session, realm, user, input); } @Override public CredentialValidationOutput validCredentials(KeycloakSession session, RealmModel realm, UserCredentialModel... input) { // Not supported yet return null; } }
/* * Copyright (c) 2001-2004 Ant-Contrib project. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.sf.antcontrib.property; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.lang.reflect.Field; import java.util.Enumeration; import java.util.Hashtable; import java.util.Properties; import java.util.Vector; import org.apache.tools.ant.BuildException; import org.apache.tools.ant.Project; import org.apache.tools.ant.ProjectHelper; import org.apache.tools.ant.Task; /** * Similar to Property, but this property is mutable. In fact, much of the code * in this class is copy and paste from Property. In general, the standard Ant * property should be used, but occasionally it is useful to use a mutable * property. * <p> * This used to be a nice little task that took advantage of what is probably * a flaw in the Ant Project API -- setting a "user" property programatically * causes the project to overwrite a previously set property. Now this task * has become more violent and employs a technique known as "object rape" to * directly access the Project's private property hashtable. * <p>Developed for use with Antelope, migrated to ant-contrib Oct 2003. * * @author Dale Anson, danson@germane-software.com * @since Ant 1.5 * @version $Revision: 1.6 $ */ public class Variable extends Task { // attribute storage private String value = ""; private String name = null; private File file = null; private boolean remove = false; /** * Set the name of the property. Required unless 'file' is used. * * @param name the name of the property. */ public void setName( String name ) { this.name = name; } /** * Set the value of the property. Optional, defaults to "". * * @param value the value of the property. */ public void setValue( String value ) { this.value = value; } /** * Set the name of a file to read properties from. Optional. * * @param file the file to read properties from. */ public void setFile( File file ) { this.file = file; } /** * Determines whether the property should be removed from the project. * Default is false. Once removed, conditions that check for property * existence will find this property does not exist. * * @param b set to true to remove the property from the project. */ public void setUnset( boolean b ) { remove = b; } /** * Execute this task. * * @exception BuildException Description of the Exception */ public void execute() throws BuildException { if ( remove ) { if ( name == null || name.equals( "" ) ) { throw new BuildException( "The 'name' attribute is required with 'unset'." ); } removeProperty( name ); return ; } if ( file == null ) { // check for the required name attribute if ( name == null || name.equals( "" ) ) { throw new BuildException( "The 'name' attribute is required." ); } // check for the required value attribute if ( value == null ) { value = ""; } // adjust the property value if necessary -- is this necessary? // Doesn't Ant do this automatically? value = getProject().replaceProperties( value ); // set the property forceProperty( name, value ); } else { if ( !file.exists() ) { throw new BuildException( file.getAbsolutePath() + " does not exists." ); } loadFile( file ); } } /** * Remove a property from the project's property table and the userProperty table. * Note that Ant 1.6 uses a helper for this. */ private void removeProperty( String name ) { Hashtable properties = null; // Ant 1.5 stores properties in Project try { properties = ( Hashtable ) getValue( getProject(), "properties" ); if ( properties != null ) { properties.remove( name ); } } catch ( Exception e ) { // ignore, could be Ant 1.6 } try { properties = ( Hashtable ) getValue( getProject(), "userProperties" ); if ( properties != null ) { properties.remove( name ); } } catch ( Exception e ) { // ignore, could be Ant 1.6 } // Ant 1.6 uses a PropertyHelper, can check for it by checking for a // reference to "ant.PropertyHelper" try { Object property_helper = getProject().getReference( "ant.PropertyHelper" ); if ( property_helper != null ) { try { properties = ( Hashtable ) getValue( property_helper, "properties" ); if ( properties != null ) { properties.remove( name ); } } catch ( Exception e ) { // ignore } try { properties = ( Hashtable ) getValue( property_helper, "userProperties" ); if ( properties != null ) { properties.remove( name ); } } catch ( Exception e ) { // ignore } } } catch ( Exception e ) { // ignore, could be Ant 1.5 } } private void forceProperty( String name, String value ) { try { Hashtable properties = ( Hashtable ) getValue( getProject(), "properties" ); if ( properties == null ) { getProject().setUserProperty( name, value ); } else { properties.put( name, value ); } } catch ( Exception e ) { getProject().setUserProperty( name, value ); } } /** * Object rape: fondle the private parts of an object without it's * permission. * * @param thisClass The class to rape. * @param fieldName The field to fondle * @return The field value * @exception NoSuchFieldException Darn, nothing to fondle. */ private Field getField( Class thisClass, String fieldName ) throws NoSuchFieldException { if ( thisClass == null ) { throw new NoSuchFieldException( "Invalid field : " + fieldName ); } try { return thisClass.getDeclaredField( fieldName ); } catch ( NoSuchFieldException e ) { return getField( thisClass.getSuperclass(), fieldName ); } } /** * Object rape: fondle the private parts of an object without it's * permission. * * @param instance the object instance * @param fieldName the name of the field * @return an object representing the value of the * field * @exception IllegalAccessException foiled by the security manager * @exception NoSuchFieldException Darn, nothing to fondle */ private Object getValue( Object instance, String fieldName ) throws IllegalAccessException, NoSuchFieldException { Field field = getField( instance.getClass(), fieldName ); field.setAccessible( true ); return field.get( instance ); } /** * load variables from a file * * @param file file to load * @exception BuildException Description of the Exception */ private void loadFile( File file ) throws BuildException { Properties props = new Properties(); try { if ( file.exists() ) { FileInputStream fis = new FileInputStream( file ); try { props.load( fis ); } finally { if ( fis != null ) { fis.close(); } } addProperties( props ); } else { log( "Unable to find property file: " + file.getAbsolutePath(), Project.MSG_VERBOSE ); } } catch ( IOException ex ) { throw new BuildException( ex, location ); } } /** * iterate through a set of properties, resolve them, then assign them * * @param props The feature to be added to the Properties attribute */ protected void addProperties( Properties props ) { resolveAllProperties( props ); Enumeration e = props.keys(); while ( e.hasMoreElements() ) { String name = ( String ) e.nextElement(); String value = props.getProperty( name ); forceProperty( name, value ); } } /** * resolve properties inside a properties hashtable * * @param props properties object to resolve * @exception BuildException Description of the Exception */ private void resolveAllProperties( Properties props ) throws BuildException { for ( Enumeration e = props.keys(); e.hasMoreElements(); ) { String name = ( String ) e.nextElement(); String value = props.getProperty( name ); boolean resolved = false; while ( !resolved ) { Vector fragments = new Vector(); Vector propertyRefs = new Vector(); ProjectHelper.parsePropertyString( value, fragments, propertyRefs ); resolved = true; if ( propertyRefs.size() != 0 ) { StringBuffer sb = new StringBuffer(); Enumeration i = fragments.elements(); Enumeration j = propertyRefs.elements(); while ( i.hasMoreElements() ) { String fragment = ( String ) i.nextElement(); if ( fragment == null ) { String propertyName = ( String ) j.nextElement(); if ( propertyName.equals( name ) ) { throw new BuildException( "Property " + name + " was circularly " + "defined." ); } fragment = getProject().getProperty( propertyName ); if ( fragment == null ) { if ( props.containsKey( propertyName ) ) { fragment = props.getProperty( propertyName ); resolved = false; } else { fragment = "${" + propertyName + "}"; } } } sb.append( fragment ); } value = sb.toString(); props.put( name, value ); } } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.solr.core; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import org.apache.commons.lang.StringUtils; import org.apache.solr.cloud.CloudDescriptor; import org.apache.solr.common.SolrException; import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.util.IOUtils; import org.apache.solr.util.PropertiesUtil; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; import java.nio.file.Paths; import java.util.Locale; import java.util.Properties; import static com.google.common.base.Preconditions.checkNotNull; /** * A Solr core descriptor * * @since solr 1.3 */ public class CoreDescriptor { // Properties file name constants public static final String CORE_NAME = "name"; public static final String CORE_CONFIG = "config"; public static final String CORE_INSTDIR = "instanceDir"; public static final String CORE_ABS_INSTDIR = "absoluteInstDir"; public static final String CORE_DATADIR = "dataDir"; public static final String CORE_ULOGDIR = "ulogDir"; public static final String CORE_SCHEMA = "schema"; public static final String CORE_SHARD = "shard"; public static final String CORE_COLLECTION = "collection"; public static final String CORE_ROLES = "roles"; public static final String CORE_PROPERTIES = "properties"; public static final String CORE_LOADONSTARTUP = "loadOnStartup"; public static final String CORE_TRANSIENT = "transient"; public static final String CORE_NODE_NAME = "coreNodeName"; public static final String CORE_CONFIGSET = "configSet"; public static final String CORE_CONFIGSET_PROPERTIES = "configSetProperties"; public static final String SOLR_CORE_PROP_PREFIX = "solr.core."; public static final String DEFAULT_EXTERNAL_PROPERTIES_FILE = "conf" + File.separator + "solrcore.properties"; /** * Get the standard properties in persistable form * @return the standard core properties in persistable form */ public Properties getPersistableStandardProperties() { return originalCoreProperties; } /** * Get user-defined core properties in persistable form * @return user-defined core properties in persistable form */ public Properties getPersistableUserProperties() { return originalExtraProperties; } private static ImmutableMap<String, String> defaultProperties = new ImmutableMap.Builder<String, String>() .put(CORE_CONFIG, "solrconfig.xml") .put(CORE_SCHEMA, "schema.xml") .put(CORE_CONFIGSET_PROPERTIES, "configsetprops.json") .put(CORE_DATADIR, "data" + File.separator) .put(CORE_TRANSIENT, "false") .put(CORE_LOADONSTARTUP, "true") .build(); private static ImmutableList<String> requiredProperties = ImmutableList.of( CORE_NAME, CORE_INSTDIR, CORE_ABS_INSTDIR ); public static ImmutableList<String> standardPropNames = ImmutableList.of( CORE_NAME, CORE_CONFIG, CORE_INSTDIR, CORE_DATADIR, CORE_ULOGDIR, CORE_SCHEMA, CORE_PROPERTIES, CORE_CONFIGSET_PROPERTIES, CORE_LOADONSTARTUP, CORE_TRANSIENT, CORE_CONFIGSET, // cloud props CORE_SHARD, CORE_COLLECTION, CORE_ROLES, CORE_NODE_NAME, CloudDescriptor.NUM_SHARDS ); private final CoreContainer coreContainer; private final CloudDescriptor cloudDesc; /** The original standard core properties, before substitution */ protected final Properties originalCoreProperties = new Properties(); /** The original extra core properties, before substitution */ protected final Properties originalExtraProperties = new Properties(); /** The properties for this core, as available through getProperty() */ protected final Properties coreProperties = new Properties(); /** The properties for this core, substitutable by resource loaders */ protected final Properties substitutableProperties = new Properties(); /** * Create a new CoreDescriptor. * @param container the CoreDescriptor's container * @param name the CoreDescriptor's name * @param instanceDir a String containing the instanceDir * @param coreProps a Properties object of the properties for this core */ public CoreDescriptor(CoreContainer container, String name, String instanceDir, Properties coreProps) { this(container, name, instanceDir, coreProps, null); } public CoreDescriptor(CoreContainer container, String name, String instanceDir, String... properties) { this(container, name, instanceDir, toProperties(properties)); } private static Properties toProperties(String... properties) { Properties props = new Properties(); assert properties.length % 2 == 0; for (int i = 0; i < properties.length; i += 2) { props.setProperty(properties[i], properties[i+1]); } return props; } /** * Create a new CoreDescriptor. * @param container the CoreDescriptor's container * @param name the CoreDescriptor's name * @param instanceDir a String containing the instanceDir * @param coreProps a Properties object of the properties for this core * @param params additional params */ public CoreDescriptor(CoreContainer container, String name, String instanceDir, Properties coreProps, SolrParams params) { this.coreContainer = container; originalCoreProperties.setProperty(CORE_NAME, name); originalCoreProperties.setProperty(CORE_INSTDIR, instanceDir); Properties containerProperties = container.getContainerProperties(); name = PropertiesUtil.substituteProperty(checkPropertyIsNotEmpty(name, CORE_NAME), containerProperties); instanceDir = PropertiesUtil.substituteProperty(checkPropertyIsNotEmpty(instanceDir, CORE_INSTDIR), containerProperties); coreProperties.putAll(defaultProperties); coreProperties.put(CORE_NAME, name); coreProperties.put(CORE_INSTDIR, instanceDir); coreProperties.put(CORE_ABS_INSTDIR, convertToAbsolute(instanceDir, container.getCoreRootDirectory())); for (String propname : coreProps.stringPropertyNames()) { String propvalue = coreProps.getProperty(propname); if (isUserDefinedProperty(propname)) originalExtraProperties.put(propname, propvalue); else originalCoreProperties.put(propname, propvalue); if (!requiredProperties.contains(propname)) // Required props are already dealt with coreProperties.setProperty(propname, PropertiesUtil.substituteProperty(propvalue, containerProperties)); } loadExtraProperties(); buildSubstitutableProperties(); // TODO maybe make this a CloudCoreDescriptor subclass? if (container.isZooKeeperAware()) { cloudDesc = new CloudDescriptor(name, coreProperties, this); if (params != null) { cloudDesc.setParams(params); } } else { cloudDesc = null; } SolrCore.log.info("Created CoreDescriptor: " + coreProperties); } /** * Load properties specified in an external properties file. * * The file to load can be specified in a {@code properties} property on * the original Properties object used to create this CoreDescriptor. If * this has not been set, then we look for {@code conf/solrcore.properties} * underneath the instance dir. * * File paths are taken as read from the core's instance directory * if they are not absolute. */ protected void loadExtraProperties() { String filename = coreProperties.getProperty(CORE_PROPERTIES, DEFAULT_EXTERNAL_PROPERTIES_FILE); File propertiesFile = resolvePaths(filename); if (propertiesFile.exists()) { FileInputStream in = null; try { in = new FileInputStream(propertiesFile); Properties externalProps = new Properties(); externalProps.load(new InputStreamReader(in, StandardCharsets.UTF_8)); coreProperties.putAll(externalProps); } catch (IOException e) { String message = String.format(Locale.ROOT, "Could not load properties from %s: %s:", propertiesFile.getAbsoluteFile(), e.toString()); throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, message); } finally { IOUtils.closeQuietly(in); } } } /** * Create the properties object used by resource loaders, etc, for property * substitution. The default solr properties are prefixed with 'solr.core.', so, * e.g., 'name' becomes 'solr.core.name' */ protected void buildSubstitutableProperties() { for (String propName : coreProperties.stringPropertyNames()) { String propValue = coreProperties.getProperty(propName); if (!isUserDefinedProperty(propName)) propName = SOLR_CORE_PROP_PREFIX + propName; substitutableProperties.setProperty(propName, propValue); } } protected File resolvePaths(String filepath) { File file = new File(filepath); if (file.isAbsolute()) return file; return new File(getInstanceDir(), filepath); } /** * Is this property a Solr-standard property, or is it an extra property * defined per-core by the user? * @param propName the Property name * @return {@code true} if this property is user-defined */ protected static boolean isUserDefinedProperty(String propName) { return !standardPropNames.contains(propName); } public static String checkPropertyIsNotEmpty(String value, String propName) { if (StringUtils.isEmpty(value)) { String message = String.format(Locale.ROOT, "Cannot create core with empty %s value", propName); throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, message); } return value; } /** * Create a new CoreDescriptor with a given name and instancedir * @param container the CoreDescriptor's container * @param name the CoreDescriptor's name * @param instanceDir the CoreDescriptor's instancedir */ public CoreDescriptor(CoreContainer container, String name, String instanceDir) { this(container, name, instanceDir, new Properties()); } /** * Create a new CoreDescriptor using the properties of an existing one * @param coreName the new CoreDescriptor's name * @param other the CoreDescriptor to copy */ public CoreDescriptor(String coreName, CoreDescriptor other) { this.coreContainer = other.coreContainer; this.cloudDesc = other.cloudDesc; this.originalExtraProperties.putAll(other.originalExtraProperties); this.originalCoreProperties.putAll(other.originalCoreProperties); this.coreProperties.putAll(other.coreProperties); this.substitutableProperties.putAll(other.substitutableProperties); this.coreProperties.setProperty(CORE_NAME, coreName); this.originalCoreProperties.setProperty(CORE_NAME, coreName); this.substitutableProperties.setProperty(SOLR_CORE_PROP_PREFIX + CORE_NAME, coreName); } public String getPropertiesName() { return coreProperties.getProperty(CORE_PROPERTIES); } public String getDataDir() { return coreProperties.getProperty(CORE_DATADIR); } public boolean usingDefaultDataDir() { return defaultProperties.get(CORE_DATADIR).equals(coreProperties.getProperty(CORE_DATADIR)); } /**@return the core instance directory. */ public String getRawInstanceDir() { return coreProperties.getProperty(CORE_INSTDIR); } private static String convertToAbsolute(String instDir, String solrHome) { checkNotNull(instDir); return SolrResourceLoader.normalizeDir(Paths.get(solrHome).resolve(instDir).toString()); } /** * * @return the core instance directory, prepended with solr_home if not an absolute path. */ public String getInstanceDir() { return coreProperties.getProperty(CORE_ABS_INSTDIR); } /**@return the core configuration resource name. */ public String getConfigName() { return coreProperties.getProperty(CORE_CONFIG); } /**@return the core schema resource name. */ public String getSchemaName() { return coreProperties.getProperty(CORE_SCHEMA); } /**@return the initial core name */ public String getName() { return coreProperties.getProperty(CORE_NAME); } public String getCollectionName() { return cloudDesc == null ? null : cloudDesc.getCollectionName(); } public CoreContainer getCoreContainer() { return coreContainer; } public CloudDescriptor getCloudDescriptor() { return cloudDesc; } public boolean isLoadOnStartup() { String tmp = coreProperties.getProperty(CORE_LOADONSTARTUP, "false"); return Boolean.parseBoolean(tmp); } public boolean isTransient() { String tmp = coreProperties.getProperty(CORE_TRANSIENT, "false"); return PropertiesUtil.toBoolean(tmp); } public String getUlogDir() { return coreProperties.getProperty(CORE_ULOGDIR); } /** * Returns a specific property defined on this CoreDescriptor * @param prop - value to read from the properties structure. * @param defVal - return if no property found. * @return associated string. May be null. */ public String getCoreProperty(String prop, String defVal) { return coreProperties.getProperty(prop, defVal); } /** * Returns all substitutable properties defined on this CoreDescriptor * @return all substitutable properties defined on this CoreDescriptor */ public Properties getSubstitutableProperties() { return substitutableProperties; } @Override public String toString() { return new StringBuilder("CoreDescriptor[name=") .append(this.getName()) .append(";instanceDir=") .append(this.getInstanceDir()) .append("]") .toString(); } public String getConfigSet() { return coreProperties.getProperty(CORE_CONFIGSET); } public String getConfigSetPropertiesName() { return coreProperties.getProperty(CORE_CONFIGSET_PROPERTIES); } }
/* * Copyright (c) 2005-2011, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.apimgt.impl; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.carbon.CarbonConstants; import org.wso2.carbon.apimgt.api.APIDefinition; import org.wso2.carbon.apimgt.api.APIManagementException; import org.wso2.carbon.apimgt.api.APIManager; import org.wso2.carbon.apimgt.api.APIMgtResourceAlreadyExistsException; import org.wso2.carbon.apimgt.api.APIMgtResourceNotFoundException; import org.wso2.carbon.apimgt.api.model.API; import org.wso2.carbon.apimgt.api.model.APIIdentifier; import org.wso2.carbon.apimgt.api.model.APIKey; import org.wso2.carbon.apimgt.api.model.Application; import org.wso2.carbon.apimgt.api.model.Documentation; import org.wso2.carbon.apimgt.api.model.DocumentationType; import org.wso2.carbon.apimgt.api.model.ResourceFile; import org.wso2.carbon.apimgt.api.model.SubscribedAPI; import org.wso2.carbon.apimgt.api.model.Subscriber; import org.wso2.carbon.apimgt.api.model.Tier; import org.wso2.carbon.apimgt.impl.dao.ApiMgtDAO; import org.wso2.carbon.apimgt.impl.definitions.APIDefinitionFromSwagger20; import org.wso2.carbon.apimgt.impl.internal.ServiceReferenceHolder; import org.wso2.carbon.apimgt.impl.utils.APINameComparator; import org.wso2.carbon.apimgt.impl.utils.APIUtil; import org.wso2.carbon.apimgt.impl.utils.LRUCache; import org.wso2.carbon.apimgt.impl.utils.TierNameComparator; import org.wso2.carbon.context.PrivilegedCarbonContext; import org.wso2.carbon.governance.api.common.dataobjects.GovernanceArtifact; import org.wso2.carbon.governance.api.generic.GenericArtifactManager; import org.wso2.carbon.governance.api.generic.dataobjects.GenericArtifact; import org.wso2.carbon.registry.core.ActionConstants; import org.wso2.carbon.registry.core.Association; import org.wso2.carbon.registry.core.Collection; import org.wso2.carbon.registry.core.Registry; import org.wso2.carbon.registry.core.RegistryConstants; import org.wso2.carbon.registry.core.Resource; import org.wso2.carbon.registry.core.config.RegistryContext; import org.wso2.carbon.registry.core.exceptions.RegistryException; import org.wso2.carbon.registry.core.jdbc.realm.RegistryAuthorizationManager; import org.wso2.carbon.registry.core.session.UserRegistry; import org.wso2.carbon.registry.core.utils.RegistryUtils; import org.wso2.carbon.user.api.AuthorizationManager; import org.wso2.carbon.user.core.UserRealm; import org.wso2.carbon.user.core.UserStoreException; import org.wso2.carbon.utils.multitenancy.MultitenantConstants; import org.wso2.carbon.utils.multitenancy.MultitenantUtils; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.Collections; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; /** * The basic abstract implementation of the core APIManager interface. This implementation uses * the governance system registry for storing APIs and related metadata. */ public abstract class AbstractAPIManager implements APIManager { protected Log log = LogFactory.getLog(getClass()); protected Registry registry; protected UserRegistry configRegistry; protected ApiMgtDAO apiMgtDAO; protected int tenantId = MultitenantConstants.INVALID_TENANT_ID; //-1 the issue does not occur.; protected String tenantDomain; protected String username; private LRUCache<String, GenericArtifactManager> genericArtifactCache = new LRUCache<String, GenericArtifactManager>( 5); // API definitions from swagger v2.0 protected static final APIDefinition definitionFromSwagger20 = new APIDefinitionFromSwagger20(); public AbstractAPIManager() throws APIManagementException { } public AbstractAPIManager(String username) throws APIManagementException { apiMgtDAO = ApiMgtDAO.getInstance(); try { if (username == null) { this.registry = ServiceReferenceHolder.getInstance().getRegistryService().getGovernanceUserRegistry(); this.configRegistry = ServiceReferenceHolder.getInstance().getRegistryService().getConfigSystemRegistry(); this.username= CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME; ServiceReferenceHolder.setUserRealm((ServiceReferenceHolder.getInstance().getRealmService().getBootstrapRealm())); } else { String tenantDomainName = MultitenantUtils.getTenantDomain(username); String tenantUserName = MultitenantUtils.getTenantAwareUsername(username); int tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager().getTenantId(tenantDomainName); this.tenantId=tenantId; this.tenantDomain=tenantDomainName; this.username=tenantUserName; APIUtil.loadTenantRegistry(tenantId); this.registry = ServiceReferenceHolder.getInstance(). getRegistryService().getGovernanceUserRegistry(tenantUserName, tenantId); this.configRegistry = ServiceReferenceHolder.getInstance().getRegistryService().getConfigSystemRegistry(tenantId); //load resources for each tenants. APIUtil.loadloadTenantAPIRXT( tenantUserName, tenantId); APIUtil.loadTenantAPIPolicy( tenantUserName, tenantId); //Check whether GatewayType is "Synapse" before attempting to load Custom-Sequences into registry APIManagerConfiguration configuration = ServiceReferenceHolder.getInstance() .getAPIManagerConfigurationService().getAPIManagerConfiguration(); String gatewayType = configuration.getFirstProperty(APIConstants.API_GATEWAY_TYPE); if (APIConstants.API_GATEWAY_TYPE_SYNAPSE.equalsIgnoreCase(gatewayType)) { APIUtil.writeDefinedSequencesToTenantRegistry(tenantId); } ServiceReferenceHolder.setUserRealm((UserRealm) (ServiceReferenceHolder.getInstance(). getRealmService().getTenantUserRealm(tenantId))); } ServiceReferenceHolder.setUserRealm(ServiceReferenceHolder.getInstance(). getRegistryService().getConfigSystemRegistry().getUserRealm()); registerCustomQueries(configRegistry, username); } catch (RegistryException e) { handleException("Error while obtaining registry objects", e); } catch (org.wso2.carbon.user.api.UserStoreException e) { handleException("Error while getting user registry for user:"+username, e); } } /** * method to register custom registry queries * @param registry Registry instance to use * @throws RegistryException n error */ private void registerCustomQueries(UserRegistry registry, String username) throws RegistryException, APIManagementException { String tagsQueryPath = RegistryConstants.QUERIES_COLLECTION_PATH + "/tag-summary"; String latestAPIsQueryPath = RegistryConstants.QUERIES_COLLECTION_PATH + "/latest-apis"; String resourcesByTag = RegistryConstants.QUERIES_COLLECTION_PATH + "/resource-by-tag"; String path = RegistryUtils.getAbsolutePath(RegistryContext.getBaseInstance(), APIUtil.getMountedPath(RegistryContext.getBaseInstance(), RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH) + APIConstants.GOVERNANCE_COMPONENT_REGISTRY_LOCATION); if (username == null) { try { UserRealm realm = ServiceReferenceHolder.getUserRealm(); RegistryAuthorizationManager authorizationManager = new RegistryAuthorizationManager(realm); authorizationManager.authorizeRole(APIConstants.ANONYMOUS_ROLE, path, ActionConstants.GET); } catch (UserStoreException e) { handleException("Error while setting the permissions", e); } }else if (!MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { int tenantId; try { tenantId = ServiceReferenceHolder.getInstance().getRealmService(). getTenantManager().getTenantId(tenantDomain); AuthorizationManager authManager = ServiceReferenceHolder.getInstance().getRealmService(). getTenantUserRealm(tenantId).getAuthorizationManager(); authManager.authorizeRole(APIConstants.ANONYMOUS_ROLE, path, ActionConstants.GET); } catch (org.wso2.carbon.user.api.UserStoreException e) { handleException("Error while setting the permissions", e); } } if (!registry.resourceExists(tagsQueryPath)) { Resource resource = registry.newResource(); //Tag Search Query //'MOCK_PATH' used to bypass ChrootWrapper -> filterSearchResult. A valid registry path is // a must for executeQuery results to be passed to client side String sql1 = "SELECT '" + APIUtil.getMountedPath(RegistryContext.getBaseInstance(), RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH) + APIConstants.GOVERNANCE_COMPONENT_REGISTRY_LOCATION + "' AS MOCK_PATH, " + " RT.REG_TAG_NAME AS TAG_NAME, " + " COUNT(RT.REG_TAG_NAME) AS USED_COUNT " + "FROM " + " REG_RESOURCE_TAG RRT, " + " REG_TAG RT, " + " REG_RESOURCE R, " + " REG_RESOURCE_PROPERTY RRP, " + " REG_PROPERTY RP " + "WHERE " + " RT.REG_ID = RRT.REG_TAG_ID " + " AND R.REG_MEDIA_TYPE = 'application/vnd.wso2-api+xml' " + " AND RRT.REG_VERSION = R.REG_VERSION " + " AND RRP.REG_VERSION = R.REG_VERSION " + " AND RP.REG_NAME = 'STATUS' " + " AND RRP.REG_PROPERTY_ID = RP.REG_ID " + " AND (RP.REG_VALUE !='DEPRECATED' AND RP.REG_VALUE !='CREATED' AND RP.REG_VALUE !='BLOCKED' AND RP.REG_VALUE !='RETIRED') " + "GROUP BY " + " RT.REG_TAG_NAME"; resource.setContent(sql1); resource.setMediaType(RegistryConstants.SQL_QUERY_MEDIA_TYPE); resource.addProperty(RegistryConstants.RESULT_TYPE_PROPERTY_NAME, RegistryConstants.TAG_SUMMARY_RESULT_TYPE); registry.put(tagsQueryPath, resource); } if (!registry.resourceExists(latestAPIsQueryPath)) { //Recently added APIs Resource resource = registry.newResource(); String sql = "SELECT " + " RR.REG_PATH_ID AS REG_PATH_ID, " + " RR.REG_NAME AS REG_NAME " + "FROM " + " REG_RESOURCE RR, " + " REG_RESOURCE_PROPERTY RRP, " + " REG_PROPERTY RP " + "WHERE " + " RR.REG_MEDIA_TYPE = 'application/vnd.wso2-api+xml' " + " AND RRP.REG_VERSION = RR.REG_VERSION " + " AND RP.REG_NAME = 'STATUS' " + " AND RRP.REG_PROPERTY_ID = RP.REG_ID " + " AND (RP.REG_VALUE !='DEPRECATED' AND RP.REG_VALUE !='CREATED') " + "ORDER BY " + " RR.REG_LAST_UPDATED_TIME " + "DESC "; resource.setContent(sql); resource.setMediaType(RegistryConstants.SQL_QUERY_MEDIA_TYPE); resource.addProperty(RegistryConstants.RESULT_TYPE_PROPERTY_NAME, RegistryConstants.RESOURCES_RESULT_TYPE); registry.put(latestAPIsQueryPath, resource); } if(!registry.resourceExists(resourcesByTag)){ Resource resource = registry.newResource(); String sql = "SELECT '" + APIUtil.getMountedPath(RegistryContext.getBaseInstance(), RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH) + APIConstants.GOVERNANCE_COMPONENT_REGISTRY_LOCATION + "' AS MOCK_PATH, " + " R.REG_UUID AS REG_UUID " + "FROM " + " REG_RESOURCE_TAG RRT, " + " REG_TAG RT, " + " REG_RESOURCE R, " + " REG_PATH RP " + "WHERE " + " RT.REG_TAG_NAME = ? " + " AND R.REG_MEDIA_TYPE = 'application/vnd.wso2-api+xml' " + " AND RP.REG_PATH_ID = R.REG_PATH_ID " + " AND RT.REG_ID = RRT.REG_TAG_ID " + " AND RRT.REG_VERSION = R.REG_VERSION "; resource.setContent(sql); resource.setMediaType(RegistryConstants.SQL_QUERY_MEDIA_TYPE); resource.addProperty(RegistryConstants.RESULT_TYPE_PROPERTY_NAME, RegistryConstants.RESOURCE_UUID_RESULT_TYPE); registry.put(resourcesByTag, resource); } } public void cleanup() { } public List<API> getAllAPIs() throws APIManagementException { List<API> apiSortedList = new ArrayList<API>(); boolean isTenantFlowStarted = false; try { if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { isTenantFlowStarted = true; PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry, APIConstants.API_KEY); GenericArtifact[] artifacts = artifactManager.getAllGenericArtifacts(); for (GenericArtifact artifact : artifacts) { API api = null; try { api = APIUtil.getAPI(artifact); } catch (APIManagementException e) { //log and continue since we want to load the rest of the APIs. log.error("Error while loading API " + artifact.getAttribute(APIConstants.API_OVERVIEW_NAME), e); } if (api != null) { apiSortedList.add(api); } } } catch (RegistryException e) { handleException("Failed to get APIs from the registry", e); } finally { if (isTenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } Collections.sort(apiSortedList, new APINameComparator()); return apiSortedList; } public API getAPI(APIIdentifier identifier) throws APIManagementException { String apiPath = APIUtil.getAPIPath(identifier); Registry registry; try { String apiTenantDomain = MultitenantUtils.getTenantDomain( APIUtil.replaceEmailDomainBack(identifier.getProviderName())); int apiTenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager() .getTenantId(apiTenantDomain); if (!MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(apiTenantDomain)) { APIUtil.loadTenantRegistry(apiTenantId); } if (this.tenantDomain == null || !this.tenantDomain.equals(apiTenantDomain)) { //cross tenant scenario registry = ServiceReferenceHolder.getInstance().getRegistryService().getGovernanceUserRegistry( MultitenantUtils.getTenantAwareUsername( APIUtil.replaceEmailDomainBack(identifier.getProviderName())), apiTenantId); } else { registry = this.registry; } GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry, APIConstants.API_KEY); Resource apiResource = registry.get(apiPath); String artifactId = apiResource.getUUID(); if (artifactId == null) { throw new APIManagementException("artifact id is null for : " + apiPath); } GenericArtifact apiArtifact = artifactManager.getGenericArtifact(artifactId); API api = APIUtil.getAPIForPublishing(apiArtifact, registry); //check for API visibility if (APIConstants.API_GLOBAL_VISIBILITY.equals(api.getVisibility())) { //global api return api; } if (this.tenantDomain == null || !this.tenantDomain.equals(apiTenantDomain)) { throw new APIManagementException("User " + username + " does not have permission to view API : " + api.getId().getApiName()); } return api; } catch (RegistryException e) { handleException("Failed to get API from : " + apiPath, e); return null; } catch (org.wso2.carbon.user.api.UserStoreException e) { handleException("Failed to get API from : " + apiPath, e); return null; } } /** * Get API by registry artifact id * * @param uuid Registry artifact id * @param requestedTenantDomain tenantDomain for the registry * @return API of the provided artifact id * @throws APIManagementException */ public API getAPIbyUUID(String uuid, String requestedTenantDomain) throws APIManagementException { try { Registry registry; if (requestedTenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals (requestedTenantDomain)) { int id = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager() .getTenantId(requestedTenantDomain); registry = ServiceReferenceHolder.getInstance(). getRegistryService().getGovernanceSystemRegistry(id); } else { if (this.tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(this.tenantDomain)) { // at this point, requested tenant = carbon.super but logged in user is anonymous or tenant registry = ServiceReferenceHolder.getInstance(). getRegistryService().getGovernanceSystemRegistry(MultitenantConstants.SUPER_TENANT_ID); } else { // both requested tenant and logged in user's tenant are carbon.super registry = this.registry; } } GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry, APIConstants.API_KEY); GenericArtifact apiArtifact = artifactManager.getGenericArtifact(uuid); if (apiArtifact != null) { return APIUtil.getAPIForPublishing(apiArtifact, registry); } else { handleResourceNotFoundException( "Failed to get API. API artifact corresponding to artifactId " + uuid + " does not exist"); return null; } } catch (RegistryException e) { handleException("Failed to get API", e); } catch (org.wso2.carbon.user.api.UserStoreException e) { handleException("Failed to get API", e); return null; } return null; } /** * Get minimal details of API by registry artifact id * * @param uuid Registry artifact id * @return API of the provided artifact id * @throws APIManagementException */ public API getLightweightAPIByUUID(String uuid, String requestedTenantDomain) throws APIManagementException { try { Registry registry; if (requestedTenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals (requestedTenantDomain)) { int id = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager() .getTenantId(requestedTenantDomain); registry = ServiceReferenceHolder.getInstance(). getRegistryService().getGovernanceSystemRegistry(id); } else { if (this.tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(this.tenantDomain)) { // at this point, requested tenant = carbon.super but logged in user is anonymous or tenant registry = ServiceReferenceHolder.getInstance(). getRegistryService().getGovernanceSystemRegistry(MultitenantConstants.SUPER_TENANT_ID); } else { // both requested tenant and logged in user's tenant are carbon.super registry = this.registry; } } GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry, APIConstants.API_KEY); GenericArtifact apiArtifact = artifactManager.getGenericArtifact(uuid); if (apiArtifact != null) { return APIUtil.getAPIInformation(apiArtifact, registry); } else { handleResourceNotFoundException( "Failed to get API. API artifact corresponding to artifactId " + uuid + " does not exist"); } } catch (RegistryException e) { handleException("Failed to get API with uuid " + uuid, e); } catch (org.wso2.carbon.user.api.UserStoreException e) { handleException("Failed to get tenant Id while getting API with uuid " + uuid, e); } return null; } /** * Get minimal details of API by API identifier * * @param identifier APIIdentifier object * @return API of the provided APIIdentifier * @throws APIManagementException */ public API getLightweightAPI(APIIdentifier identifier) throws APIManagementException { String apiPath = APIUtil.getAPIPath(identifier); boolean tenantFlowStarted = false; try { String tenantDomain = MultitenantUtils.getTenantDomain( APIUtil.replaceEmailDomainBack(identifier.getProviderName())); PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); tenantFlowStarted = true; Registry registry = getRegistry(identifier, apiPath); if (registry != null) { Resource apiResource = registry.get(apiPath); String artifactId = apiResource.getUUID(); if (artifactId == null) { throw new APIManagementException("artifact id is null for : " + apiPath); } GenericArtifactManager artifactManager = getGenericArtifactManager(identifier, registry); GovernanceArtifact apiArtifact = artifactManager.getGenericArtifact(artifactId); return APIUtil.getAPIInformation(apiArtifact, registry); } else { handleException("Failed to get registry from api identifier: " + identifier); return null; } } catch (RegistryException e) { handleException("Failed to get API from : " + apiPath, e); return null; } finally { if (tenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } } private GenericArtifactManager getGenericArtifactManager(APIIdentifier identifier, Registry registry) throws APIManagementException { String tenantDomain = MultitenantUtils .getTenantDomain(APIUtil.replaceEmailDomainBack(identifier.getProviderName())); GenericArtifactManager manager = genericArtifactCache.get(tenantDomain); if (manager != null) { return manager; } manager = APIUtil.getArtifactManager(registry, APIConstants.API_KEY); genericArtifactCache.put(tenantDomain, manager); return manager; } private Registry getRegistry(APIIdentifier identifier, String apiPath) throws APIManagementException { Registry passRegistry; try { String tenantDomain = MultitenantUtils .getTenantDomain(APIUtil.replaceEmailDomainBack(identifier.getProviderName())); if (!MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { int id = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager() .getTenantId(tenantDomain); // explicitly load the tenant's registry APIUtil.loadTenantRegistry(id); passRegistry = ServiceReferenceHolder.getInstance().getRegistryService() .getGovernanceSystemRegistry(id); } else { if (this.tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(this.tenantDomain)) { // explicitly load the tenant's registry APIUtil.loadTenantRegistry(MultitenantConstants.SUPER_TENANT_ID); passRegistry = ServiceReferenceHolder.getInstance().getRegistryService().getGovernanceUserRegistry( identifier.getProviderName(), MultitenantConstants.SUPER_TENANT_ID); } else { passRegistry = this.registry; } } } catch (RegistryException e) { handleException("Failed to get API from registry on path of : " + apiPath, e); return null; } catch (org.wso2.carbon.user.api.UserStoreException e) { handleException("Failed to get API from registry on path of : " + apiPath, e); return null; } return passRegistry; } public API getAPI(String apiPath) throws APIManagementException { try { GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry, APIConstants.API_KEY); Resource apiResource = registry.get(apiPath); String artifactId = apiResource.getUUID(); if (artifactId == null) { throw new APIManagementException("artifact id is null for : " + apiPath); } GenericArtifact apiArtifact = artifactManager.getGenericArtifact(artifactId); return APIUtil.getAPI(apiArtifact); } catch (RegistryException e) { handleException("Failed to get API from : " + apiPath, e); return null; } } public boolean isAPIAvailable(APIIdentifier identifier) throws APIManagementException { String path = APIConstants.API_ROOT_LOCATION + RegistryConstants.PATH_SEPARATOR + identifier.getProviderName() + RegistryConstants.PATH_SEPARATOR + identifier.getApiName() + RegistryConstants.PATH_SEPARATOR + identifier.getVersion(); try { return registry.resourceExists(path); } catch (RegistryException e) { handleException("Failed to check availability of api :" + path, e); return false; } } public Set<String> getAPIVersions(String providerName, String apiName) throws APIManagementException { Set<String> versionSet = new HashSet<String>(); String apiPath = APIConstants.API_LOCATION + RegistryConstants.PATH_SEPARATOR + providerName + RegistryConstants.PATH_SEPARATOR + apiName; try { Resource resource = registry.get(apiPath); if (resource instanceof Collection) { Collection collection = (Collection) resource; String[] versionPaths = collection.getChildren(); if (versionPaths == null || versionPaths.length == 0) { return versionSet; } for (String path : versionPaths) { versionSet.add(path.substring(apiPath.length() + 1)); } } else { throw new APIManagementException("API version must be a collection " + apiName); } } catch (RegistryException e) { handleException("Failed to get versions for API: " + apiName, e); } return versionSet; } /** * Returns the swagger 2.0 definition of the given API * * @param apiId id of the APIIdentifier * @return An String containing the swagger 2.0 definition * @throws APIManagementException */ @Override public String getSwagger20Definition(APIIdentifier apiId) throws APIManagementException { String apiTenantDomain = MultitenantUtils.getTenantDomain( APIUtil.replaceEmailDomainBack(apiId.getProviderName())); String swaggerDoc = null; try { Registry registryType; //Tenant store anonymous mode if current tenant and the required tenant is not matching if (this.tenantDomain == null || isTenantDomainNotMatching(apiTenantDomain)) { int tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager().getTenantId( apiTenantDomain); registryType = ServiceReferenceHolder.getInstance().getRegistryService().getGovernanceUserRegistry( CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME, tenantId); } else { registryType = registry; } swaggerDoc = definitionFromSwagger20.getAPIDefinition(apiId, registryType); } catch (org.wso2.carbon.user.api.UserStoreException e) { handleException("Failed to get swagger documentation of API : " + apiId, e); } catch (RegistryException e) { handleException("Failed to get swagger documentation of API : " + apiId, e); } return swaggerDoc; } public String addResourceFile(String resourcePath, ResourceFile resourceFile) throws APIManagementException { try { Resource thumb = registry.newResource(); thumb.setContentStream(resourceFile.getContent()); thumb.setMediaType(resourceFile.getContentType()); registry.put(resourcePath, thumb); if(MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equalsIgnoreCase(tenantDomain)){ return RegistryConstants.PATH_SEPARATOR + "registry" + RegistryConstants.PATH_SEPARATOR + "resource" + RegistryConstants.PATH_SEPARATOR + "_system" + RegistryConstants.PATH_SEPARATOR + "governance" + resourcePath; } else{ return "/t/"+tenantDomain+ RegistryConstants.PATH_SEPARATOR + "registry" + RegistryConstants.PATH_SEPARATOR + "resource" + RegistryConstants.PATH_SEPARATOR + "_system" + RegistryConstants.PATH_SEPARATOR + "governance" + resourcePath; } } catch (RegistryException e) { handleException("Error while adding the resource to the registry", e); } return null; } /** * Checks whether the given document already exists for the given api * * @param identifier API Identifier * @param docName Name of the document * @return true if document already exists for the given api * @throws APIManagementException if failed to check existence of the documentation */ public boolean isDocumentationExist(APIIdentifier identifier, String docName) throws APIManagementException { String docPath = APIConstants.API_ROOT_LOCATION + RegistryConstants.PATH_SEPARATOR + identifier.getProviderName() + RegistryConstants.PATH_SEPARATOR + identifier.getApiName() + RegistryConstants.PATH_SEPARATOR + identifier.getVersion() + RegistryConstants.PATH_SEPARATOR + APIConstants.DOC_DIR + RegistryConstants.PATH_SEPARATOR + docName; try { return registry.resourceExists(docPath); } catch (RegistryException e) { handleException("Failed to check existence of the document :" + docPath, e); } return false; } public List<Documentation> getAllDocumentation(APIIdentifier apiId) throws APIManagementException { List<Documentation> documentationList = new ArrayList<Documentation>(); String apiResourcePath = APIUtil.getAPIPath(apiId); try { Association[] docAssociations = registry.getAssociations(apiResourcePath, APIConstants.DOCUMENTATION_ASSOCIATION); for (Association association : docAssociations) { String docPath = association.getDestinationPath(); Resource docResource = registry.get(docPath); GenericArtifactManager artifactManager = new GenericArtifactManager(registry, APIConstants.DOCUMENTATION_KEY); GenericArtifact docArtifact = artifactManager.getGenericArtifact(docResource.getUUID()); Documentation doc = APIUtil.getDocumentation(docArtifact); Date contentLastModifiedDate; Date docLastModifiedDate = docResource.getLastModified(); if (Documentation.DocumentSourceType.INLINE.equals(doc.getSourceType())) { String contentPath = APIUtil.getAPIDocContentPath(apiId, doc.getName()); contentLastModifiedDate = registry.get(contentPath).getLastModified(); doc.setLastUpdated((contentLastModifiedDate.after(docLastModifiedDate) ? contentLastModifiedDate : docLastModifiedDate)); } else { doc.setLastUpdated(docLastModifiedDate); } documentationList.add(doc); } } catch (RegistryException e) { handleException("Failed to get documentations for api " + apiId.getApiName(), e); } return documentationList; } public List<Documentation> getAllDocumentation(APIIdentifier apiId,String loggedUsername) throws APIManagementException { List<Documentation> documentationList = new ArrayList<Documentation>(); String apiResourcePath = APIUtil.getAPIPath(apiId); try { String tenantDomain = MultitenantUtils.getTenantDomain(APIUtil.replaceEmailDomainBack(apiId.getProviderName())); Registry registryType; /* If the API provider is a tenant, load tenant registry*/ boolean isTenantMode=(tenantDomain != null); if ((isTenantMode && this.tenantDomain==null) || (isTenantMode && isTenantDomainNotMatching(tenantDomain))) {//Tenant store anonymous mode int tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager() .getTenantId(tenantDomain); registryType = ServiceReferenceHolder.getInstance(). getRegistryService().getGovernanceUserRegistry(CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME, tenantId); } else { registryType = registry; } Association[] docAssociations = registryType.getAssociations(apiResourcePath, APIConstants.DOCUMENTATION_ASSOCIATION); for (Association association : docAssociations) { String docPath = association.getDestinationPath(); Resource docResource = null; try { docResource = registryType.get(docPath); } catch (org.wso2.carbon.registry.core.secure.AuthorizationFailedException e) { //do nothing. Permission not allowed to access the doc. }catch (RegistryException e){ handleException("Failed to get documentations for api " + apiId.getApiName(), e); } if (docResource != null) { GenericArtifactManager artifactManager = new GenericArtifactManager(registryType, APIConstants.DOCUMENTATION_KEY); GenericArtifact docArtifact = artifactManager.getGenericArtifact( docResource.getUUID()); Documentation doc = APIUtil.getDocumentation(docArtifact, apiId.getProviderName()); Date contentLastModifiedDate; Date docLastModifiedDate = docResource.getLastModified(); if (Documentation.DocumentSourceType.INLINE.equals(doc.getSourceType())) { String contentPath = APIUtil.getAPIDocContentPath(apiId, doc.getName()); try { contentLastModifiedDate = registryType.get(contentPath).getLastModified(); doc.setLastUpdated((contentLastModifiedDate.after(docLastModifiedDate) ? contentLastModifiedDate : docLastModifiedDate)); } catch (org.wso2.carbon.registry.core.secure.AuthorizationFailedException e) { //do nothing. Permission not allowed to access the doc. } } else { doc.setLastUpdated(docLastModifiedDate); } documentationList.add(doc); } } } catch (RegistryException e) { handleException("Failed to get documentations for api " + apiId.getApiName(), e); } catch (org.wso2.carbon.user.api.UserStoreException e) { handleException("Failed to get documentations for api " + apiId.getApiName(), e); } return documentationList; } private boolean isTenantDomainNotMatching(String tenantDomain) { if (this.tenantDomain != null) { return !(this.tenantDomain.equals(tenantDomain)); } return true; } public Documentation getDocumentation(APIIdentifier apiId, DocumentationType docType, String docName) throws APIManagementException { Documentation documentation = null; String docPath = APIUtil.getAPIDocPath(apiId) + docName; GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry, APIConstants.DOCUMENTATION_KEY); try { Resource docResource = registry.get(docPath); GenericArtifact artifact = artifactManager.getGenericArtifact(docResource.getUUID()); documentation = APIUtil.getDocumentation(artifact); } catch (RegistryException e) { handleException("Failed to get documentation details", e); } return documentation; } /** * Get a documentation by artifact Id * * @param docId artifact id of the document * @param requestedTenantDomain tenant domain of the registry where the artifact is located * @return Document object which represents the artifact id * @throws APIManagementException */ public Documentation getDocumentation(String docId, String requestedTenantDomain) throws APIManagementException { Documentation documentation = null; try { Registry registryType; boolean isTenantMode = (requestedTenantDomain != null); //Tenant store anonymous mode if current tenant and the required tenant is not matching if ((isTenantMode && this.tenantDomain == null) || (isTenantMode && isTenantDomainNotMatching( requestedTenantDomain))) { int tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager() .getTenantId(requestedTenantDomain); registryType = ServiceReferenceHolder.getInstance(). getRegistryService() .getGovernanceUserRegistry(CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME, tenantId); } else { registryType = registry; } GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registryType, APIConstants.DOCUMENTATION_KEY); GenericArtifact artifact = artifactManager.getGenericArtifact(docId); if (null != artifact) { documentation = APIUtil.getDocumentation(artifact); } } catch (RegistryException e) { handleException("Failed to get documentation details", e); } catch (org.wso2.carbon.user.api.UserStoreException e) { handleException("Failed to get documentation details", e); } return documentation; } public String getDocumentationContent(APIIdentifier identifier, String documentationName) throws APIManagementException { String contentPath = APIUtil.getAPIDocPath(identifier) + APIConstants.INLINE_DOCUMENT_CONTENT_DIR + RegistryConstants.PATH_SEPARATOR + documentationName; String tenantDomain = MultitenantUtils.getTenantDomain(APIUtil.replaceEmailDomainBack(identifier.getProviderName())); Registry registry; boolean isTenantFlowStarted = false; try { if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { PrivilegedCarbonContext.startTenantFlow(); isTenantFlowStarted = true; PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } /* If the API provider is a tenant, load tenant registry*/ if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { int id = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager().getTenantId(tenantDomain); registry = ServiceReferenceHolder.getInstance(). getRegistryService().getGovernanceSystemRegistry(id); } else { if (this.tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(this.tenantDomain)) { registry = ServiceReferenceHolder.getInstance(). getRegistryService().getGovernanceUserRegistry(identifier.getProviderName(), MultitenantConstants.SUPER_TENANT_ID); } else { registry = this.registry; } } if (registry.resourceExists(contentPath)) { Resource docContent = registry.get(contentPath); Object content = docContent.getContent(); if (content != null) { return new String((byte[]) docContent.getContent(), Charset.defaultCharset()); } } } catch (RegistryException e) { String msg = "No document content found for documentation: " + documentationName + " of API: "+identifier.getApiName(); handleException(msg, e); } catch (org.wso2.carbon.user.api.UserStoreException e) { handleException("Failed to get ddocument content found for documentation: " + documentationName + " of API: "+identifier.getApiName(), e); } finally { if (isTenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } return null; } public Subscriber getSubscriberById(String accessToken) throws APIManagementException { return apiMgtDAO.getSubscriberById(accessToken); } public boolean isContextExist(String context) throws APIManagementException { // Since we don't have tenant in the APIM table, we do the filtering using this hack if(context!=null && context.startsWith("/t/")) context = context.replace("/t/" + MultitenantUtils.getTenantDomainFromUrl(context),""); //removing prefix if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { context = "/t/" + tenantDomain + context; } return apiMgtDAO.isContextExist(context); } public boolean isScopeKeyExist(String scopeKey, int tenantid) throws APIManagementException { return apiMgtDAO.isScopeKeyExist(scopeKey, tenantid); } public boolean isScopeKeyAssigned(APIIdentifier identifier, String scopeKey, int tenantid) throws APIManagementException { return apiMgtDAO.isScopeKeyAssigned(identifier, scopeKey, tenantid); } public boolean isApiNameExist(String apiName) throws APIManagementException { String tenantName = MultitenantConstants.SUPER_TENANT_DOMAIN_NAME; if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { tenantName = tenantDomain; } return apiMgtDAO.isApiNameExist(apiName, tenantName); } public void addSubscriber(Subscriber subscriber, String groupingId) throws APIManagementException { apiMgtDAO.addSubscriber(subscriber, groupingId); } public void addSubscriber(String username, String groupingId) throws APIManagementException { Subscriber subscriber = new Subscriber(username); subscriber.setSubscribedDate(new Date()); //TODO : need to set the proper email subscriber.setEmail(""); try { int tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager() .getTenantId(MultitenantUtils.getTenantDomain(username)); subscriber.setTenantId(tenantId); apiMgtDAO.addSubscriber(subscriber, groupingId); } catch (APIManagementException e) { handleException("Error while adding the subscriber " + subscriber.getName(), e); } catch (org.wso2.carbon.user.api.UserStoreException e) { handleException("Error while adding the subscriber " + subscriber.getName(), e); } } public void updateSubscriber(Subscriber subscriber) throws APIManagementException { apiMgtDAO.updateSubscriber(subscriber); } public Subscriber getSubscriber(int subscriberId) throws APIManagementException { return apiMgtDAO.getSubscriber(subscriberId); } public ResourceFile getIcon(APIIdentifier identifier) throws APIManagementException { String artifactPath = APIConstants.API_IMAGE_LOCATION + RegistryConstants.PATH_SEPARATOR + identifier.getProviderName() + RegistryConstants.PATH_SEPARATOR + identifier.getApiName() + RegistryConstants.PATH_SEPARATOR + identifier.getVersion(); String thumbPath = artifactPath + RegistryConstants.PATH_SEPARATOR + APIConstants.API_ICON_IMAGE; try { if (registry.resourceExists(thumbPath)) { Resource res = registry.get(thumbPath); return new ResourceFile(res.getContentStream(), res.getMediaType()); } } catch (RegistryException e) { handleException("Error while loading API icon from the registry", e); } return null; } public Set<API> getSubscriberAPIs(Subscriber subscriber) throws APIManagementException { SortedSet<API> apiSortedSet = new TreeSet<API>(new APINameComparator()); Set<SubscribedAPI> subscribedAPIs = apiMgtDAO.getSubscribedAPIs(subscriber, null); boolean isTenantFlowStarted = false; try { if(tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)){ isTenantFlowStarted = true; PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } for (SubscribedAPI subscribedAPI : subscribedAPIs) { String apiPath = APIUtil.getAPIPath(subscribedAPI.getApiId()); Resource resource; try { resource = registry.get(apiPath); GenericArtifactManager artifactManager = new GenericArtifactManager(registry, APIConstants.API_KEY); GenericArtifact artifact = artifactManager.getGenericArtifact( resource.getUUID()); API api = APIUtil.getAPI(artifact, registry); if (api != null) { apiSortedSet.add(api); } } catch (RegistryException e) { handleException("Failed to get APIs for subscriber: " + subscriber.getName(), e); } } } finally { if (isTenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } return apiSortedSet; } /** * Returns the corresponding application given the uuid * @param uuid uuid of the Application * @return it will return Application corresponds to the uuid provided. * @throws APIManagementException */ public Application getApplicationByUUID(String uuid) throws APIManagementException { return apiMgtDAO.getApplicationByUUID(uuid); } /** returns the SubscribedAPI object which is related to the UUID * * @param uuid UUID of Subscription * @return SubscribedAPI object which is related to the UUID * @throws APIManagementException */ public SubscribedAPI getSubscriptionByUUID(String uuid) throws APIManagementException { return apiMgtDAO.getSubscriptionByUUID(uuid); } protected final void handleException(String msg, Exception e) throws APIManagementException { log.error(msg, e); throw new APIManagementException(msg, e); } protected final void handleException(String msg) throws APIManagementException { log.error(msg); throw new APIManagementException(msg); } protected final void handleResourceAlreadyExistsException(String msg) throws APIMgtResourceAlreadyExistsException { log.error(msg); throw new APIMgtResourceAlreadyExistsException(msg); } protected final void handleResourceNotFoundException(String msg) throws APIMgtResourceNotFoundException { log.error(msg); throw new APIMgtResourceNotFoundException(msg); } public boolean isApplicationTokenExists(String accessToken) throws APIManagementException { return apiMgtDAO.isAccessTokenExists(accessToken); } public boolean isApplicationTokenRevoked(String accessToken) throws APIManagementException { return apiMgtDAO.isAccessTokenRevoked(accessToken); } public APIKey getAccessTokenData(String accessToken) throws APIManagementException { return apiMgtDAO.getAccessTokenData(accessToken); } public Map<Integer, APIKey> searchAccessToken(String searchType, String searchTerm, String loggedInUser) throws APIManagementException { if (searchType == null) { return apiMgtDAO.getAccessTokens(searchTerm); } else { if ("User".equalsIgnoreCase(searchType)) { return apiMgtDAO.getAccessTokensByUser(searchTerm, loggedInUser); } else if ("Before".equalsIgnoreCase(searchType)) { return apiMgtDAO.getAccessTokensByDate(searchTerm, false, loggedInUser); } else if ("After".equalsIgnoreCase(searchType)) { return apiMgtDAO.getAccessTokensByDate(searchTerm, true, loggedInUser); } else { return apiMgtDAO.getAccessTokens(searchTerm); } } } public Set<APIIdentifier> getAPIByAccessToken(String accessToken) throws APIManagementException{ return apiMgtDAO.getAPIByAccessToken(accessToken); } public API getAPI(APIIdentifier identifier,APIIdentifier oldIdentifier, String oldContext) throws APIManagementException { String apiPath = APIUtil.getAPIPath(identifier); try { GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry, APIConstants.API_KEY); Resource apiResource = registry.get(apiPath); String artifactId = apiResource.getUUID(); if (artifactId == null) { throw new APIManagementException("artifact id is null for : " + apiPath); } GenericArtifact apiArtifact = artifactManager.getGenericArtifact(artifactId); return APIUtil.getAPI(apiArtifact, registry,oldIdentifier, oldContext); } catch (RegistryException e) { handleException("Failed to get API from : " + apiPath, e); return null; } } @Override public Set<Tier> getAllTiers() throws APIManagementException { Set<Tier> tiers = new TreeSet<Tier>(new TierNameComparator()); Map<String, Tier> tierMap; if (tenantId == MultitenantConstants.INVALID_TENANT_ID) { tierMap = APIUtil.getAllTiers(); } else { boolean isTenantFlowStarted = false; try { if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { PrivilegedCarbonContext.startTenantFlow(); isTenantFlowStarted = true; PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } tierMap = APIUtil.getAllTiers(tenantId); } finally { if (isTenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } } tiers.addAll(tierMap.values()); return tiers; } @Override public Set<Tier> getAllTiers(String tenantDomain) throws APIManagementException { Set<Tier> tiers = new TreeSet<Tier>(new TierNameComparator()); Map<String, Tier> tierMap; boolean isTenantFlowStarted = false; try { if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { PrivilegedCarbonContext.startTenantFlow(); isTenantFlowStarted = true; PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } int requestedTenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId(); if (requestedTenantId == MultitenantConstants.SUPER_TENANT_ID || requestedTenantId == MultitenantConstants.INVALID_TENANT_ID) { tierMap = APIUtil.getAllTiers(); } else { tierMap = APIUtil.getAllTiers(requestedTenantId); } } finally { if (isTenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } tiers.addAll(tierMap.values()); return tiers; } /** * Returns a list of pre-defined # {@link org.wso2.carbon.apimgt.api.model.Tier} in the system. * * @return Set<Tier> */ public Set<Tier> getTiers() throws APIManagementException { Set<Tier> tiers = new TreeSet<Tier>(new TierNameComparator()); Map<String, Tier> tierMap; if (tenantId == MultitenantConstants.INVALID_TENANT_ID) { tierMap = APIUtil.getTiers(); } else { PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantId(tenantId, true); tierMap = APIUtil.getTiers(tenantId); PrivilegedCarbonContext.endTenantFlow(); } tiers.addAll(tierMap.values()); return tiers; } /** * Returns a list of pre-defined # {@link org.wso2.carbon.apimgt.api.model.Tier} in the system. * * @return Set<Tier> */ public Set<Tier> getTiers(String tenantDomain) throws APIManagementException { PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); Set<Tier> tiers = new TreeSet<Tier>(new TierNameComparator()); Map<String, Tier> tierMap; int requestedTenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId(); if (requestedTenantId == MultitenantConstants.SUPER_TENANT_ID || requestedTenantId == MultitenantConstants.INVALID_TENANT_ID) { tierMap = APIUtil.getTiers(); } else { tierMap = APIUtil.getTiers(requestedTenantId); } tiers.addAll(tierMap.values()); PrivilegedCarbonContext.endTenantFlow(); return tiers; } /** * Returns a list of pre-defined # {@link org.wso2.carbon.apimgt.api.model.Tier} in the system. * * @param tierType type of the tiers (api,resource ot application) * @param username current logged user * @return Set<Tier> return list of tier names * @throws APIManagementException APIManagementException if failed to get the predefined tiers */ public Set<Tier> getTiers(int tierType, String username) throws APIManagementException { Set<Tier> tiers = new TreeSet<Tier>(new TierNameComparator()); String tenantDomain = MultitenantUtils.getTenantDomain(username); Map<String, Tier> tierMap = APIUtil.getTiers(tierType, tenantDomain); tiers.addAll(tierMap.values()); return tiers; } /** * Returns a list of pre-defined # {@link org.wso2.carbon.apimgt.api.model.Tier} in the system. * * @return Map<String, String> */ public Map<String,String> getTenantDomainMappings(String tenantDomain, String apiType) throws APIManagementException { return APIUtil.getDomainMappings(tenantDomain, apiType); } public boolean isDuplicateContextTemplate(String contextTemplate) throws APIManagementException{ if (contextTemplate != null && contextTemplate.startsWith("/t/")) contextTemplate = contextTemplate.replace("/t/" + MultitenantUtils.getTenantDomainFromUrl(contextTemplate), ""); if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { contextTemplate = "/t/" + tenantDomain + contextTemplate; } return apiMgtDAO.isDuplicateContextTemplate(contextTemplate); } }
/* * Copyright (c) 2009-2015 jMonkeyEngine * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * * Neither the name of 'jMonkeyEngine' nor the names of its contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.jme3.math; import org.junit.Test; import static org.junit.Assert.*; import org.junit.Rule; import org.junit.rules.ExpectedException; public class Vector3fTest { @Rule public ExpectedException thrown = ExpectedException.none(); @Test public void testAdd() { final Vector3f target = new Vector3f(1.0f, Float.NaN, 5.36f); final Vector3f vec = new Vector3f(-1.0f, 2.93f, -5.36f); final Vector3f retval = target.add(vec); assertNotNull(retval); assertEquals(0.0f, retval.x, 0.0f); assertEquals(Float.NaN, retval.y, 0.0f); assertEquals(0.0f, retval.z, 0.0f); } @Test public void testAdd2() { final Vector3f target = new Vector3f(0.0f, -7.52f, 3.1f); final Vector3f retval = target.add(1.42f, 7.52f, 1.1f); assertNotNull(retval); assertEquals(1.42f, retval.x, 0.0f); assertEquals(0.0f, retval.y, 0.0f); assertEquals(4.2f, retval.z, 0.0f); } @Test public void testAdd3() { final Vector3f target = new Vector3f(0.0f, -7.52f, 3.1f); final Vector3f other = new Vector3f(1.42f, 7.52f, 1.1f); final Vector3f result = new Vector3f(); final Vector3f retval = target.add(other, result); assertNotNull(retval); assertEquals(retval, result); assertEquals(1.42f, retval.x, 0.0f); assertEquals(0.0f, retval.y, 0.0f); assertEquals(4.2f, retval.z, 0.0f); } @Test public void testAdd4() { thrown.expect(NullPointerException.class); final Vector3f target = new Vector3f(0.0f, -7.52f, 3.1f); final Vector3f other = new Vector3f(1.42f, 7.52f, 1.1f); final Vector3f result = null; target.add(other, result); } @Test public void testAdd5() { final Vector3f target = new Vector3f(0.0f, -7.52f, 3.1f); final Vector3f retval = target.add(null); assertNull(retval); } @Test public void testAddLocal() { final Vector3f target = new Vector3f(0.0f, -7.52f, 3.1f); final Vector3f retval = target.addLocal(null); assertNull(retval); } @Test public void testAddLocal2() { final Vector3f target = new Vector3f(0.0f, -7.52f, 3.1f); final Vector3f retval = target.addLocal(new Vector3f(2.0f, 6.2f, 8.3f)); assertNotNull(retval); assertEquals(retval.x, 2.0f, 0.0f); assertEquals(retval.y, -1.32f, 0.01f); assertEquals(retval.z, 11.4f, 0.0f); } @Test public void testAddLocal3() { final Vector3f target = new Vector3f(0.0f, -7.52f, 3.1f); final Vector3f retval = target.addLocal(2.0f, 6.2f, 8.3f); assertNotNull(retval); assertEquals(retval.x, 2.0f, 0.0f); assertEquals(retval.y, -1.32f, 0.01f); assertEquals(retval.z, 11.4f, 0.0f); } @Test public void testDot() { final Vector3f target = new Vector3f(0.0f, -1.5f, 3.0f); assertEquals(19.5f, target.dot(new Vector3f(2.0f, 3.0f, 8.0f)), 0.0f); } @Test public void testDot2() { final Vector3f target = new Vector3f(0.0f, -1.5f, 3.0f); assertEquals(0.0f, target.dot(null), 0.0f); } @Test public void testAngleBetween() { final Vector3f target = new Vector3f(Float.NaN, 6.08159e-39f, 5.33333f); final Vector3f otherVector = new Vector3f(3.76643e-39f, -2.97033e+38f, 0.09375f); assertEquals(3.141f, target.angleBetween(otherVector), 0.001f); } @Test public void testAngleBetween2() { final Vector3f target = new Vector3f(-0.779272f, -2.08408e+38f, 5.33333f); final Vector3f otherVector = new Vector3f(4.50029e-39f, -1.7432f, 0.09375f); assertEquals(0.0f, target.angleBetween(otherVector), 0.0f); } @Test public void testAngleBetween3() { final Vector3f target = new Vector3f(-8.57f, 5.93f, 5.33f); final Vector3f otherVector = new Vector3f(6.59f, -2.04f, -0.09f); assertEquals(3.141f, target.angleBetween(otherVector), 0.01f); } @Test public void testAngleBetween4() { final Vector3f target = new Vector3f(0.0f, -1.0f, 0.0f); final Vector3f otherVector = new Vector3f(1.0f, 0.0f, 0.0f); assertEquals(1.57f, target.angleBetween(otherVector), 0.01f); } @Test public void testCross() { final Vector3f target = new Vector3f(-1.55f, 2.07f, -0.0f); final Vector3f v = new Vector3f(4.39f, 1.11f, 0.0f); final Vector3f result = new Vector3f(0.0f, 0.0f, 0.0f); final Vector3f retval = target.cross(v, result); assertEquals(retval, result); assertNotNull(result); assertEquals(0.0f, result.x, 0.0f); assertEquals(0.0f, result.y, 0.0f); assertEquals(-10.807f, result.z, 0.01f); assertNotNull(retval); assertEquals(0.0f, retval.x, 0.0f); assertEquals(0.0f, retval.y, 0.0f); assertEquals(-10.807f, retval.z, 0.01f); } @Test public void testCross2() { final Vector3f target = new Vector3f(Float.NaN, 0.042f, 1.76f); final Vector3f v = new Vector3f(0.0012f, 7.64f, 4.50f); final Vector3f retval = target.cross(v); assertNotNull(retval); assertEquals(-13.257f, retval.x, 0.001f); assertEquals(Float.NaN, retval.y, 0.0f); assertEquals(Float.NaN, retval.z, 0.0f); } @Test public void testCross3() { final Vector3f target = new Vector3f(7.814f, 2.570f, 1.320f); final Vector3f result = new Vector3f(0.0f, 0.0f, 0.0f); final Vector3f retval = target.cross(1.607f, -6.762f, -0.007f, result); assertEquals(result, retval); assertNotNull(retval); assertEquals(8.90785f, retval.x, 0.0001f); assertEquals(2.17593f, retval.y, 0.0001f); assertEquals(-56.96825f, retval.z, 0.0001f); } @Test public void testCrossLocal() { final Vector3f target = new Vector3f(-1.80144e+16f, 0.0f, 8.4323e+06f); final Vector3f v = new Vector3f(8.9407e-08f, 0.0f, -1.05324e-35f); final Vector3f retval = target.crossLocal(v); assertEquals(target, retval); assertNotNull(retval); assertEquals(-0.0f, retval.x, 0.0f); assertEquals(0.753f, retval.y, 0.01f); assertEquals(-0.0f, retval.z, 0.0f); } @Test public void testDistance() { final Vector3f target = new Vector3f(3.86405e+18f, 3.02146e+23f, 0.171875f); final Vector3f v = new Vector3f(-2.0f, -1.61503e+19f, 0.171875f); assertEquals(Float.POSITIVE_INFINITY, target.distance(v), 0.0f); } @Test public void testDistance2() { final Vector3f target = new Vector3f(5.0f, 4.0f, 6.0f); final Vector3f v = new Vector3f(-2.0f, -7.0f, 0.5f); assertEquals(14.150971f, target.distance(v), 0.0f); } @Test public void testDivide_byVector() { final Vector3f target = new Vector3f(0.0f, 8.63998e+37f, 3.23117e-27f); final Vector3f divideBy = new Vector3f(0.0f, Float.POSITIVE_INFINITY, Float.POSITIVE_INFINITY); final Vector3f retval = target.divide(divideBy); assertNotNull(retval); assertEquals(Float.NaN, retval.x, 0.0f); assertEquals(0.0f, retval.y, 0.0f); assertEquals(0.0f, retval.z, 0.0f); } @Test public void testDivide_byScalar() { final Vector3f target = new Vector3f(2e+28f, 7e+19f, 3.e+23f); final Vector3f retval = target.divide(0.0f); assertNotNull(retval); assertEquals(Float.POSITIVE_INFINITY, retval.x, 0.0f); assertEquals(Float.POSITIVE_INFINITY, retval.y, 0.0f); assertEquals(Float.POSITIVE_INFINITY, retval.z, 0.0f); } @Test public void testDivide_byScalar2() { final Vector3f target = new Vector3f(1.98f, 7.43f, 9.61f); final Vector3f retval = target.divide(3.5f); assertNotNull(retval); assertEquals(0.5657f, retval.x, 0.001f); assertEquals(2.1228f, retval.y, 0.001f); assertEquals(2.7457f, retval.z, 0.001f); } @Test public void testDivideLocal_byScalar() { final Vector3f target = new Vector3f(1.98f, 7.43f, 9.61f); final Vector3f retval = target.divideLocal(3.5f); assertNotNull(retval); assertEquals(0.5657f, retval.x, 0.001f); assertEquals(2.1228f, retval.y, 0.001f); assertEquals(2.7457f, retval.z, 0.001f); } @Test public void testDivideLocal2_byVector() { final Vector3f target = new Vector3f(1.98f, 7.43f, 9.61f); final Vector3f retval = target.divideLocal(new Vector3f(1.2f, 2.5f, 6.3f)); assertNotNull(retval); assertEquals(1.65f, retval.x, 0.001f); assertEquals(2.972f, retval.y, 0.001f); assertEquals(1.5253967f, retval.z, 0.001f); } @Test public void testGenerateComplementBasis() { final Vector3f u = new Vector3f(); final Vector3f v = new Vector3f(); final Vector3f w = new Vector3f(-7.0f, Float.NaN, Float.NaN); Vector3f.generateComplementBasis(u, v, w); assertNotNull(v); assertEquals(Float.NaN, v.x, 0.0f); assertEquals(Float.NaN, v.y, 0.0f); assertEquals(Float.NaN, v.z, 0.0f); assertNotNull(u); assertEquals(0.0f, u.x, 0.0f); assertEquals(Float.NaN, u.y, 0.0f); assertEquals(Float.NaN, u.z, 0.0f); } @Test public void testGenerateComplementBasis2() { final Vector3f u = new Vector3f(); final Vector3f v = new Vector3f(); final Vector3f w = new Vector3f(-7.0f, 1.075f, Float.NaN); Vector3f.generateComplementBasis(u, v, w); assertNotNull(v); assertEquals(Float.NaN, v.x, 0.0f); assertEquals(Float.NaN, v.y, 0.0f); assertEquals(Float.NaN, v.z, 0.0f); assertNotNull(u); assertEquals(Float.NaN, u.x, 0.0f); assertEquals(0.0f, u.y, 0.0f); assertEquals(Float.NaN, u.z, 0.0f); } @Test public void testGenerateComplementBasis3() { final Vector3f u = new Vector3f(); final Vector3f v = new Vector3f(); final Vector3f w = new Vector3f(-7.0f, 1.075f, 4.3f); Vector3f.generateComplementBasis(u, v, w); assertNotNull(v); assertEquals(-0.9159, v.x, 0.001f); assertEquals(-8.2152, v.y, 0.001f); assertEquals(0.5626, v.z, 0.001f); assertNotNull(u); assertEquals(-0.5234f, u.x, 0.001f); assertEquals(0.0f, u.y, 0.0f); assertEquals(-0.8520f, u.z, 0.001f); } @Test public void testGenerateOrthonormalBasis() { final Vector3f u = new Vector3f(); final Vector3f v = new Vector3f(); final Vector3f w = new Vector3f(1.6e-37f, -2.24e-44f, -2.08e-36f); Vector3f.generateOrthonormalBasis(u, v, w); assertNotNull(v); assertEquals(Float.NEGATIVE_INFINITY, v.x, 0.0f); assertEquals(Float.NEGATIVE_INFINITY, v.y, 0.0f); assertEquals(Float.POSITIVE_INFINITY, v.z, 0.0f); assertNotNull(u); assertEquals(Float.POSITIVE_INFINITY, u.x, 0.0f); assertEquals(0.0f, u.y, 0.0f); assertEquals(Float.POSITIVE_INFINITY, u.z, 0.0f); } @Test public void testGenerateOrthonormalBasis2() { final Vector3f u = new Vector3f(); final Vector3f v = new Vector3f(); final Vector3f w = new Vector3f(2e+20f, -5e-20f, -14e+20f); Vector3f.generateOrthonormalBasis(u, v, w); assertNotNull(v); assertEquals(Float.NaN, v.x, 0.0f); assertEquals(Float.NaN, v.y, 0.0f); assertEquals(Float.NaN, v.z, 0.0f); assertNotNull(u); assertEquals(Float.NaN, u.x, 0.0f); assertEquals(0.0f, u.y, 0.0f); assertEquals(Float.NaN, u.z, 0.0f); assertNotNull(w); assertEquals(0.0f, w.x, 0.0f); assertEquals(-0.0f, w.y, 0.0f); assertEquals(-0.0f, w.z, 0.0f); } @Test public void testGenerateOrthonormalBasis3() { final Vector3f u = new Vector3f(); final Vector3f v = new Vector3f(); final Vector3f w = new Vector3f(-1.24672e-39f, -1.25343e-39f, -2.08336e-36f); Vector3f.generateOrthonormalBasis(u, v, w); assertNotNull(v); assertEquals(Float.NEGATIVE_INFINITY, v.x, 0.0f); assertEquals(Float.POSITIVE_INFINITY, v.y, 0.0f); assertEquals(Float.POSITIVE_INFINITY, v.z, 0.0f); assertNotNull(u); assertEquals(0.0f, u.x, 0.0f); assertEquals(Float.NEGATIVE_INFINITY, u.y, 0.0f); assertEquals(Float.POSITIVE_INFINITY, u.z, 0.0f); } @Test public void testGenerateOrthonormalBasis4() { final Vector3f u = new Vector3f(); final Vector3f v = new Vector3f(); final Vector3f w = new Vector3f(-7.0f, 1.075f, 4.3f); Vector3f.generateOrthonormalBasis(u, v, w); assertNotNull(v); assertEquals(-0.1105, v.x, 0.001f); assertEquals(-0.9915, v.y, 0.001f); assertEquals(0.0679, v.z, 0.001f); assertNotNull(u); assertEquals(-0.5234f, u.x, 0.001f); assertEquals(0.0f, u.y, 0.0f); assertEquals(-0.8520f, u.z, 0.001f); } @Test public void testGet_illegalArgumentException() { thrown.expect(IllegalArgumentException.class); new Vector3f(0.0f, 0.0f, 0.0f).get(536_870_914); } @Test public void testGet() { final Vector3f target = new Vector3f(0.0f, 0.5f, 1.5f); assertEquals(0.0f, target.get(0), 0.0f); assertEquals(0.5f, target.get(1), 0.0f); assertEquals(1.5f, target.get(2), 0.0f); } @Test public void testInterpolateLocal() { final Vector3f target = new Vector3f(); final Vector3f beginVec = new Vector3f(0.0f, -9.094f, 0.0f); final Vector3f finalVec = new Vector3f(-0.0f, 1.355f, 1.414f); final Vector3f retval = target.interpolateLocal(beginVec, finalVec, -4.056f); assertEquals(target, retval); assertNotNull(retval); assertEquals(0.0f, retval.x, 0.0f); assertEquals(-51.475147f, retval.y, 0.01f); assertEquals(-5.736f, retval.z, 0.001f); } @Test public void testInterpolateLocal2() { final Vector3f target = new Vector3f(1.5f, 3.5f, 8.2f); final Vector3f other = new Vector3f(5.0f, 1.5f, 2.0f); final Vector3f retval = target.interpolateLocal(other, 3.0f); assertEquals(target, retval); assertNotNull(retval); assertEquals(12.0f, retval.x, 0.0f); assertEquals(-2.5f, retval.y, 0.01f); assertEquals(-10.4f, retval.z, 0.001f); } @Test public void testIsSimilar() { final Vector3f target = new Vector3f(-1.14f, 8.50f, 1.88f); final Vector3f other = new Vector3f(-1.52f, 8.50f, 3.76f); assertTrue(target.isSimilar(other, 2.0f)); final Vector3f target_2 = new Vector3f(-1.14f, 8.50f, 1.88f); final Vector3f other_2 = new Vector3f(-1.52f, 8.50f, 3.76f); assertFalse(target_2.isSimilar(other_2, 0.0f)); final Vector3f target_3 = new Vector3f(-1.14f, 8.50f, 1.88f); final Vector3f other_3 = null; assertFalse(target_3.isSimilar(other_3, 0.0f)); final Vector3f target_4 = new Vector3f(-1.14f, -1.14f, 1.88f); final Vector3f other_4 = new Vector3f(-1.52f, -1.52f, 3.76f); assertFalse(target_4.isSimilar(other_4, 1.2f)); final Vector3f target_5 = new Vector3f(-1.14f, -1.14f, 1.88f); final Vector3f other_5 = new Vector3f(-1.52f, -1.52f, 3.76f); assertFalse(target_5.isSimilar(other_5, 1.2f)); final Vector3f target_6 = new Vector3f(-1.14f, -11.14f, 1.0f); final Vector3f other_6 = new Vector3f(-1.1f, -1.52f, 1.0f); assertFalse(target_6.isSimilar(other_6, 1.2f)); } @Test public void testIsUnitVector() { assertFalse(new Vector3f(1.07f, 2.12f, 3.32f).isUnitVector()); assertFalse(new Vector3f(1.07f, 2.12f, Float.NaN).isUnitVector()); assertTrue(new Vector3f(1.0f, 0.0f, 0.0f).isUnitVector()); assertTrue(new Vector3f(0.0f, 1.0f, 0.0f).isUnitVector()); assertTrue(new Vector3f(0.0f, 0.0f, 1.0f).isUnitVector()); assertTrue(new Vector3f(0.0f, 0.0f, -1.0f).isUnitVector()); assertTrue(new Vector3f(-1.0f, 0.0f, 0.0f).isUnitVector()); assertTrue(new Vector3f(0.0f, -1.0f, 0.0f).isUnitVector()); } @Test public void testIsValidVector() { assertFalse(Vector3f.isValidVector(new Vector3f(Float.NaN, 2.1f, 3.0f))); assertFalse(Vector3f.isValidVector(new Vector3f(Float.POSITIVE_INFINITY, 1.5f, 1.9f))); assertFalse(Vector3f.isValidVector(new Vector3f(Float.NEGATIVE_INFINITY, 2.5f, 8.2f))); assertFalse(Vector3f.isValidVector(null)); assertTrue(Vector3f.isValidVector(new Vector3f())); assertTrue(Vector3f.isValidVector(new Vector3f(1.5f, 5.7f, 8.2f))); } @Test public void testLength() { assertEquals(0.0f, new Vector3f(1.88079e-37f, 0.0f, 1.55077e-36f).length(), 0.0f); assertEquals(Float.NaN, new Vector3f(Float.NaN, 0.0f, 1.55077e-36f).length(), 0.0f); assertEquals(Float.POSITIVE_INFINITY, new Vector3f(Float.POSITIVE_INFINITY, 0.0f, 1.0f).length(), 0.0f); assertEquals(4.0124f, new Vector3f(1.9f, 3.2f, 1.5f).length(), 0.001f); assertEquals(Float.POSITIVE_INFINITY, new Vector3f(1.8e37f, 1.8e37f, 1.5e36f).length(), 0.0f); } @Test public void testMaxLocal() { final Vector3f target = new Vector3f(); final Vector3f retval = target.maxLocal(new Vector3f(-0.0f, -0.0f, -0.0f)); assertEquals(target, retval); assertNotNull(retval); assertEquals(0.0f, retval.x, 0.0f); assertEquals(0.0f, retval.y, 0.0f); assertEquals(0.0f, retval.z, 0.0f); } @Test public void testMaxLocal2() { final Vector3f target = new Vector3f(0.0f, 0.0f, -1.32931e+36f); final Vector3f retval = target.maxLocal(new Vector3f(-0.0f, -0.0f, 1.32923e+36f)); assertEquals(target, retval); assertEquals(1.32923e+36f, target.z, 0.0f); assertNotNull(retval); assertEquals(0.0f, retval.x, 0.0f); assertEquals(0.0f, retval.y, 0.0f); assertEquals(1.32923e+36f, retval.z, 0.0f); } @Test public void testMaxLocal3() { final Vector3f target = new Vector3f(0.0f, Float.NEGATIVE_INFINITY, -1.32931e+36f); final Vector3f retval = target.maxLocal(new Vector3f(-0.0f, Float.POSITIVE_INFINITY, 1.32923e+36f)); assertEquals(target, retval); assertNotNull(retval); assertEquals(0.0f, retval.x, 0.0f); assertEquals(Float.POSITIVE_INFINITY, retval.y, 0.0f); assertEquals(1.32923e+36f, retval.z, 0.0f); } @Test public void testMaxLocal4() { final Vector3f target = new Vector3f(-2.24208e-44f, Float.NEGATIVE_INFINITY, -1.32f); final Vector3f other = new Vector3f(0.0f, Float.POSITIVE_INFINITY, 1.35f); final Vector3f retval = target.maxLocal(other); assertEquals(target, retval); assertNotNull(retval); assertEquals(0f, retval.x, 0.0f); assertEquals(Float.POSITIVE_INFINITY, retval.y, 0.0f); assertEquals(1.35f, retval.z, 0.0f); } @Test public void testMinLocal() { final Vector3f target = new Vector3f(0.0f, 0.0f, 0.0f); final Vector3f other = new Vector3f(-0.0f, -0.0f, -0.0f); final Vector3f retval = target.minLocal(other); assertEquals(target, retval); assertNotNull(retval); assertEquals(0.0f, retval.x, 0.0f); assertEquals(0.0f, retval.y, 0.0f); assertEquals(0.0f, retval.z, 0.0f); } @Test public void testMinLocal2() { final Vector3f target = new Vector3f(0.0f, 0.0f, Float.POSITIVE_INFINITY); final Vector3f other = new Vector3f(-0.0f, -0.0f, -0.0f); final Vector3f retval = target.minLocal(other); assertEquals(target, retval); assertNotNull(retval); assertEquals(0.0f, retval.x, 0.0f); assertEquals(0.0f, retval.y, 0.0f); assertEquals(-0.0f, retval.z, 0.0f); } @Test public void testMinLocal3() { final Vector3f target = new Vector3f(0.0f, Float.POSITIVE_INFINITY, Float.POSITIVE_INFINITY); final Vector3f other = new Vector3f(-0.0f, Float.NEGATIVE_INFINITY, -0.0f); final Vector3f retval = target.minLocal(other); assertEquals(target, retval); assertNotNull(retval); assertEquals(0.0f, retval.x, 0.0f); assertEquals(Float.NEGATIVE_INFINITY, retval.y, 0.0f); assertEquals(-0.0f, retval.z, 0.0f); } @Test public void testMinLocal4() { final Vector3f target = new Vector3f(1.43493e-42f, Float.POSITIVE_INFINITY, Float.POSITIVE_INFINITY); final Vector3f other = new Vector3f(-0.0f, Float.NEGATIVE_INFINITY, -0.0f); final Vector3f retval = target.minLocal(other); assertEquals(target, retval); assertNotNull(retval); assertEquals(-0.0f, retval.x, 0.0f); assertEquals(Float.NEGATIVE_INFINITY, retval.y, 0.0f); assertEquals(-0.0f, retval.z, 0.0f); } @Test public void testMult() { final Vector3f target = new Vector3f(4.9e+27f, 3.1e-20f, 1.9e-31f); final Vector3f vec = new Vector3f(0, 4.4e-29f, 0); final Vector3f store = new Vector3f(); final Vector3f retval = target.mult(vec, store); assertNotNull(retval); assertEquals(0, retval.x, 0.0f); assertEquals(0, retval.y, 0.0f); assertEquals(0, retval.z, 0.0f); } @Test public void testMult2() { final Vector3f target = new Vector3f(1.12f, 1.21f, 0.0f); final Vector3f vec = new Vector3f(1.09f, 5.87f, -5.2f); final Vector3f retval = target.mult(vec); assertNotNull(retval); assertEquals(1.2208f, retval.x, 0.0f); assertEquals(7.1027f, retval.y, 0.0f); assertEquals(0.0f, retval.z, 0.0f); } @Test public void testMult3() { final Vector3f target = new Vector3f(1.12f, 1.21f, 0.0f); assertNull(target.mult(null)); } @Test public void testMult4() { final Vector3f target = new Vector3f(1.12f, 1.21f, 0.0f); final Vector3f store = new Vector3f(); assertNull(target.mult(null, store)); } @Test public void testMult5() { final Vector3f retval = new Vector3f(3.24f, 6.63f, 7.81f).mult(1.5f); assertNotNull(retval); assertEquals(4.86f, retval.x, 0.0f); assertEquals(9.945f, retval.y, 0.0f); assertEquals(11.715f, retval.z, 0.0f); } @Test public void testMult6() { final Vector3f product = new Vector3f(); final Vector3f retval = new Vector3f(3.24f, 6.63f, 7.81f).mult(1.5f, product); assertEquals(product, retval); assertNotNull(retval); assertEquals(4.86f, retval.x, 0.0f); assertEquals(9.945f, retval.y, 0.0f); assertEquals(11.715f, retval.z, 0.0f); } @Test public void testMult7() { final Vector3f retval = new Vector3f(3.24f, 6.63f, 7.81f).mult(1.5f, null); assertNotNull(retval); assertEquals(4.86f, retval.x, 0.0f); assertEquals(9.945f, retval.y, 0.0f); assertEquals(11.715f, retval.z, 0.0f); } @Test public void testMultLocal() { final Vector3f target = new Vector3f(1.26f, 4.95f, 5.90f); final Vector3f retval = target.multLocal(1.3f, 3.5f, 2.2f); assertEquals(target, retval); assertNotNull(retval); assertEquals(1.6379999f, retval.x, 0.01f); assertEquals(17.324999f, retval.y, 0.01f); assertEquals(12.9800005f, retval.z, 0.01f); } @Test public void testMultLocal2() { final Vector3f target = new Vector3f(1.26f, 4.95f, 5.90f); final Vector3f retval = target.multLocal(new Vector3f(1.3f, 3.5f, 2.2f)); assertEquals(target, retval); assertNotNull(retval); assertEquals(1.6379999f, retval.x, 0.01f); assertEquals(17.324999f, retval.y, 0.01f); assertEquals(12.9800005f, retval.z, 0.01f); } @Test public void testMultLocal3() { final Vector3f target = new Vector3f(1.26f, 4.95f, 5.90f); final Vector3f retval = target.multLocal(null); assertNull(retval); } @Test public void testNegate() { final Vector3f target = new Vector3f(-1.0f, 2.0f, -0.0f); final Vector3f retval = target.negate(); assertNotNull(retval); assertEquals(1.0f, retval.x, 0.0f); assertEquals(-2.0f, retval.y, 0.0f); assertEquals(0.0f, retval.z, 0.0f); final Vector3f retval2 = retval.negate(); assertEquals(retval2, target); } @Test public void testNegate2() { final Vector3f retval = new Vector3f(Float.NaN, Float.POSITIVE_INFINITY, Float.NEGATIVE_INFINITY).negate(); assertNotNull(retval); assertEquals(Float.NaN, retval.x, 0.0f); assertEquals(Float.NEGATIVE_INFINITY, retval.y, 0.0f); assertEquals(Float.POSITIVE_INFINITY, retval.z, 0.0f); } @Test public void testNegateLocal() { final Vector3f target = new Vector3f(-4.5f, Float.POSITIVE_INFINITY, Float.NEGATIVE_INFINITY); final Vector3f retval = target.negateLocal(); assertEquals(target, retval); assertNotNull(retval); assertEquals(4.5f, retval.x, 0.0f); assertEquals(Float.NEGATIVE_INFINITY, retval.y, 0.0f); assertEquals(Float.POSITIVE_INFINITY, retval.z, 0.0f); } @Test public void testNormalizeLocal() { final Vector3f target = new Vector3f(6.9282f, Float.NaN, 4.694f); final Vector3f retval = target.normalizeLocal(); assertEquals(target, retval); assertNotNull(retval); assertEquals(Float.NaN, retval.x, 0.0f); assertEquals(Float.NaN, retval.y, 0.0f); assertEquals(Float.NaN, retval.z, 0.0f); } @Test public void testNormalize() { final Vector3f retval = new Vector3f(2.071f, 2.45f, 1.35f).normalize(); assertNotNull(retval); assertEquals(0.5950255f, retval.x, 0.01f); assertEquals(0.70391715f, retval.y, 0.0f); assertEquals(0.3878727f, retval.z, 0.0f); } @Test public void testNormalize2() { final Vector3f target = new Vector3f(1.0f, 0.0f, 0.0f); final Vector3f retval = target.normalize(); assertNotNull(retval); assertEquals(retval, target); assertEquals(1.0f, retval.x, 0.0f); assertEquals(0.0f, retval.y, 0.0f); assertEquals(0.0f, retval.z, 0.0f); } @Test public void testProject() { final Vector3f target = new Vector3f(3.8e+15f, 2.1e-25f, 0.0f); final Vector3f other = new Vector3f(2e-28f, -3.6e+12f, Float.POSITIVE_INFINITY); final Vector3f retval = target.project(other); assertNotNull(retval); assertEquals(Float.NaN, retval.x, 0.0f); assertEquals(Float.NaN, retval.y, 0.0f); assertEquals(Float.NaN, retval.z, 0.0f); } @Test public void testProject2() { final Vector3f target = new Vector3f(7.32f, 1.44f, 3.37f); final Vector3f other = new Vector3f(9.12f, -3.64f, 5.19f); final Vector3f retval = target.project(other); assertNotNull(retval); assertEquals(5.84f, retval.x, 0.01f); assertEquals(-2.33f, retval.y, 0.01f); assertEquals(3.32f, retval.z, 0.01f); } @Test public void testProjectLocal() { final Vector3f target = new Vector3f(-2.9e+17f, 3.9e-34f, 3.8e+20f); final Vector3f other = new Vector3f(5.4e-20f, -2.6e+36f, Float.NaN); final Vector3f retval = target.projectLocal(other); assertEquals(target, retval); assertNotNull(retval); assertEquals(Float.NaN, retval.z, 0.0f); assertEquals(Float.NaN, retval.x, 0.0f); assertEquals(Float.NaN, retval.y, 0.0f); } @Test public void testProjectLocal2() { final Vector3f target = new Vector3f(7.32f, 1.44f, 3.37f); final Vector3f other = new Vector3f(9.12f, -3.64f, 5.19f); final Vector3f retval = target.projectLocal(other); assertEquals(target, retval); assertNotNull(retval); assertEquals(5.8409867f, retval.x, 0.01f); assertEquals(-2.331271f, retval.y, 0.01f); assertEquals(3.3239825f, retval.z, 0.01f); } @Test public void testScaleAdd() { final Vector3f target = new Vector3f(); final Vector3f mult = new Vector3f(Float.POSITIVE_INFINITY, 5.60f, -1.74f); final Vector3f add = new Vector3f(-0.0f, -0.0f, 3.79f); final Vector3f retval = target.scaleAdd(1.70f, mult, add); assertEquals(target, retval); assertNotNull(retval); assertEquals(Float.POSITIVE_INFINITY, retval.x, 0.0f); assertEquals(9.52f, retval.y, 0.0f); assertEquals(0.8319998f, retval.z, 0.0f); } @Test public void testScaleAdd2() { final Vector3f target = new Vector3f(4.86f, 6.10f, -1.74f); final Vector3f add = new Vector3f(-0.16f, -0.51f, 1.03f); final Vector3f retval = target.scaleAdd(1.99f, add); assertEquals(target, retval); assertNotNull(retval); assertEquals(9.5114f, retval.x, 0.001f); assertEquals(11.629f, retval.y, 0.001f); assertEquals(-2.4326f, retval.z, 0.001f); } @Test public void testSet_OutputIllegalArgumentException() { thrown.expect(IllegalArgumentException.class); new Vector3f(1.5f, 2.3f, 4.7f).set(5, 1.5f); } @Test public void testSet() { Vector3f target = new Vector3f(0.0f, 0.0f, 0.0f); target.set(0, 5.0f); assertEquals(target.x, 5.0, 0.0f); target = new Vector3f(0.0f, 0.0f, 0.0f); target.set(1, 3.0f); assertEquals(target.y, 3.0, 0.0f); target = new Vector3f(0.0f, 0.0f, 0.0f); target.set(2, 8.0f); assertEquals(target.z, 8.0, 0.0f); } @Test public void testSetX() { final Vector3f retval = new Vector3f(0.0f, 0.0f, 0.0f).setX(3.0f); assertNotNull(retval); assertEquals(3.0f, retval.x, 0.0f); assertEquals(0.0f, retval.y, 0.0f); assertEquals(0.0f, retval.z, 0.0f); } @Test public void testSetY() { final Vector3f retval = new Vector3f(0.0f, 0.0f, 0.0f).setY(3.0f); assertNotNull(retval); assertEquals(0.0f, retval.x, 0.0f); assertEquals(3.0f, retval.y, 0.0f); assertEquals(0.0f, retval.z, 0.0f); } @Test public void testSetZ() { final Vector3f retval = new Vector3f(0.0f, 0.0f, 0.0f).setZ(3.0f); assertNotNull(retval); assertEquals(0.0f, retval.x, 0.0f); assertEquals(0.0f, retval.y, 0.0f); assertEquals(3.0f, retval.z, 0.0f); } @Test public void testSubtract() { final Vector3f retval = new Vector3f(12.0f, 8.0f, 5.0f).subtract(new Vector3f(7.0f, 4.0f, -2.0f)); assertNotNull(retval); assertEquals(5.0f, retval.x, 0.0f); assertEquals(4.0f, retval.y, 0.0f); assertEquals(7.0f, retval.z, 0.0f); } @Test public void testSubtract2() { final Vector3f target = new Vector3f(12.0f, 8.0f, 5.0f); final Vector3f other = new Vector3f(); final Vector3f retval = target.subtract(new Vector3f(7.0f, 4.0f, -2.0f), other); assertEquals(other, retval); assertNotNull(retval); assertEquals(5.0f, retval.x, 0.0f); assertEquals(4.0f, retval.y, 0.0f); assertEquals(7.0f, retval.z, 0.0f); } @Test public void testSubtract3() { final Vector3f other = null; final Vector3f target = new Vector3f(12.0f, 8.0f, 5.0f); final Vector3f retval = target.subtract(new Vector3f(7.0f, 4.0f, -2.0f), other); assertEquals(5.0f, retval.x, 0.0f); assertEquals(4.0f, retval.y, 0.0f); assertEquals(7.0f, retval.z, 0.0f); } @Test public void testSubtract4() { final Vector3f target = new Vector3f(12.0f, 8.0f, 5.0f); final Vector3f retval = target.subtract(7.0f, 4.0f, -2.0f); assertEquals(5.0f, retval.x, 0.0f); assertEquals(4.0f, retval.y, 0.0f); assertEquals(7.0f, retval.z, 0.0f); } @Test public void testSubtractLocal() { final Vector3f target = new Vector3f(12.0f, 8.0f, 5.0f); final Vector3f retval = target.subtractLocal(new Vector3f(7.0f, 4.0f, -2.0f)); assertEquals(target, retval); assertEquals(5.0f, retval.x, 0.0f); assertEquals(4.0f, retval.y, 0.0f); assertEquals(7.0f, retval.z, 0.0f); } @Test public void testSubtractLocal2() { final Vector3f target = new Vector3f(12.0f, 8.0f, 5.0f); final Vector3f retval = target.subtractLocal(7.0f, 4.0f, -2.0f); assertEquals(target, retval); assertEquals(5.0f, retval.x, 0.0f); assertEquals(4.0f, retval.y, 0.0f); assertEquals(7.0f, retval.z, 0.0f); } @Test public void testSubtractLocal3() { final Vector3f target = new Vector3f(12.0f, 8.0f, 5.0f); final Vector3f retval = target.subtractLocal(null); assertNull(retval); } @Test public void testToArray() { final float[] store = {0.0f, 0.0f, 0.0f}; final float[] retval = new Vector3f(1.0f, 2.0f, 3.0f).toArray(store); assertEquals(store, retval); assertArrayEquals(new float[] {1.0f, 2.0f, 3.0f}, retval, 0.0f); final float[] retval2 = new Vector3f(1.0f, 2.0f, 3.0f).toArray(new float[]{4.0f, 5.0f, 6.0f}); assertArrayEquals(new float[] {1.0f, 2.0f, 3.0f}, retval2, 0.0f); final float[] retval3 = new Vector3f(1.0f, 2.0f, 3.0f).toArray(null); assertArrayEquals(new float[] {1.0f, 2.0f, 3.0f}, retval3, 0.0f); } @Test public void testZero() { final Vector3f target = new Vector3f(1.0f, 5.0f, 9.0f); final Vector3f retval = target.zero(); assertEquals(target, retval); assertNotNull(retval); assertEquals(0.0f, retval.x, 0.0f); assertEquals(0.0f, retval.y, 0.0f); assertEquals(0.0f, retval.z, 0.0f); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.falcon.messaging; import java.io.InputStream; import java.io.OutputStream; import java.util.concurrent.CountDownLatch; import javax.jms.Connection; import javax.jms.ConnectionFactory; import javax.jms.Destination; import javax.jms.JMSException; import javax.jms.MapMessage; import javax.jms.MessageConsumer; import javax.jms.Session; import org.apache.activemq.ActiveMQConnectionFactory; import org.apache.activemq.broker.BrokerService; import org.apache.activemq.util.ByteArrayInputStream; import org.apache.falcon.cluster.util.EmbeddedCluster; import org.apache.falcon.hadoop.HadoopClientFactory; import org.apache.falcon.util.FalconTestUtil; import org.apache.falcon.workflow.WorkflowExecutionArgs; import org.apache.falcon.workflow.WorkflowExecutionContext; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IOUtils; import org.apache.commons.lang3.StringUtils; import org.testng.Assert; import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; /** * Test for feed message producer. */ public class FeedProducerTest { private String[] args; private static final String BROKER_URL = "vm://localhost?broker.useJmx=false&broker.persistent=true"; private static final String BROKER_IMPL_CLASS = "org.apache.activemq.ActiveMQConnectionFactory"; private static final String TOPIC_NAME = "Falcon.process1.click-logs"; private BrokerService broker; private Path logFile; private volatile AssertionError error; private EmbeddedCluster dfsCluster; private Configuration conf; private CountDownLatch latch = new CountDownLatch(1); private String[] instancePaths = {"/falcon/feed/agg-logs/path1/2010/10/10/20", "/falcon/feed/agg-logs/path1/2010/10/10/21", "/falcon/feed/agg-logs/path1/2010/10/10/22", "/falcon/feed/agg-logs/path1/2010/10/10/23", }; @BeforeClass public void setup() throws Exception { this.dfsCluster = EmbeddedCluster.newCluster("testCluster"); conf = dfsCluster.getConf(); logFile = new Path(conf.get(HadoopClientFactory.FS_DEFAULT_NAME_KEY), "/falcon/feed/agg-logs/instance-2012-01-01-10-00.csv"); args = new String[] { "-" + WorkflowExecutionArgs.ENTITY_NAME.getName(), TOPIC_NAME, "-" + WorkflowExecutionArgs.OUTPUT_FEED_NAMES.getName(), "click-logs", "-" + WorkflowExecutionArgs.OUTPUT_FEED_PATHS.getName(), "/click-logs/10/05/05/00/20", "-" + WorkflowExecutionArgs.WORKFLOW_ID.getName(), "workflow-01-00", "-" + WorkflowExecutionArgs.WORKFLOW_USER.getName(), FalconTestUtil.TEST_USER_1, "-" + WorkflowExecutionArgs.RUN_ID.getName(), "1", "-" + WorkflowExecutionArgs.NOMINAL_TIME.getName(), "2011-01-01-01-00", "-" + WorkflowExecutionArgs.TIMESTAMP.getName(), "2012-01-01-01-00", "-" + WorkflowExecutionArgs.BRKR_URL.getName(), BROKER_URL, "-" + WorkflowExecutionArgs.BRKR_IMPL_CLASS.getName(), BROKER_IMPL_CLASS, "-" + WorkflowExecutionArgs.USER_BRKR_URL.getName(), BROKER_URL, "-" + WorkflowExecutionArgs.USER_BRKR_IMPL_CLASS.getName(), BROKER_IMPL_CLASS, "-" + WorkflowExecutionArgs.ENTITY_TYPE.getName(), "FEED", "-" + WorkflowExecutionArgs.OPERATION.getName(), "DELETE", "-" + WorkflowExecutionArgs.LOG_FILE.getName(), logFile.toString(), "-" + WorkflowExecutionArgs.LOG_DIR.getName(), "/falcon/feed/agg-logs/", "-" + WorkflowExecutionArgs.TOPIC_NAME.getName(), TOPIC_NAME, "-" + WorkflowExecutionArgs.STATUS.getName(), "SUCCEEDED", "-" + WorkflowExecutionArgs.BRKR_TTL.getName(), "10", "-" + WorkflowExecutionArgs.CLUSTER_NAME.getName(), "corp", }; broker = new BrokerService(); broker.addConnector(BROKER_URL); broker.setDataDirectory("target/activemq"); broker.start(); } @AfterClass public void tearDown() throws Exception { broker.deleteAllMessages(); broker.stop(); this.dfsCluster.shutdown(); } @Test public void testLogFile() throws Exception { FileSystem fs = dfsCluster.getFileSystem(); OutputStream out = fs.create(logFile); InputStream in = new ByteArrayInputStream(("instancePaths=" + StringUtils.join(instancePaths, ",")).getBytes()); IOUtils.copyBytes(in, out, conf); testProcessMessageCreator(); } @Test public void testEmptyLogFile() throws Exception { FileSystem fs = dfsCluster.getFileSystem(); OutputStream out = fs.create(logFile); InputStream in = new ByteArrayInputStream(("instancePaths=").getBytes()); IOUtils.copyBytes(in, out, conf); WorkflowExecutionContext context = WorkflowExecutionContext.create( args, WorkflowExecutionContext.Type.POST_PROCESSING); JMSMessageProducer jmsMessageProducer = JMSMessageProducer.builder(context) .type(JMSMessageProducer.MessageType.USER).build(); jmsMessageProducer.sendMessage(); } private void testProcessMessageCreator() throws Exception { Thread t = new Thread() { @Override public void run() { try { consumer(); } catch (AssertionError e) { error = e; } catch (JMSException ignore) { error = null; } } }; t.start(); // Wait for consumer to be ready latch.await(); WorkflowExecutionContext context = WorkflowExecutionContext.create( args, WorkflowExecutionContext.Type.POST_PROCESSING); JMSMessageProducer jmsMessageProducer = JMSMessageProducer.builder(context) .type(JMSMessageProducer.MessageType.USER).build(); jmsMessageProducer.sendMessage(); t.join(); if (error != null) { throw error; } } private void consumer() throws JMSException { ConnectionFactory connectionFactory = new ActiveMQConnectionFactory(BROKER_URL); Connection connection = connectionFactory.createConnection(); connection.start(); Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE); Destination destination = session.createTopic(TOPIC_NAME); MessageConsumer consumer = session.createConsumer(destination); latch.countDown(); verifyMesssage(consumer); connection.close(); } private void verifyMesssage(MessageConsumer consumer) throws JMSException { for (String instancePath : instancePaths) { // receive call is blocking MapMessage m = (MapMessage) consumer.receive(); System.out.println("Received JMS message {}" + m.toString()); System.out.println("Consumed: " + m.toString()); assertMessage(m); Assert.assertEquals(m.getString(WorkflowExecutionArgs.OUTPUT_FEED_PATHS.getName()), instancePath); } } private void assertMessage(MapMessage m) throws JMSException { Assert.assertEquals(m.getString(WorkflowExecutionArgs.ENTITY_NAME.getName()), TOPIC_NAME); Assert.assertEquals(m.getString(WorkflowExecutionArgs.OPERATION.getName()), "DELETE"); Assert.assertEquals(m.getString(WorkflowExecutionArgs.WORKFLOW_ID.getName()), "workflow-01-00"); Assert.assertEquals(m.getString(WorkflowExecutionArgs.WORKFLOW_USER.getName()), FalconTestUtil.TEST_USER_1); Assert.assertEquals(m.getString(WorkflowExecutionArgs.RUN_ID.getName()), "1"); Assert.assertEquals(m.getString(WorkflowExecutionArgs.NOMINAL_TIME.getName()), "2011-01-01-01-00"); Assert.assertEquals(m.getString(WorkflowExecutionArgs.TIMESTAMP.getName()), "2012-01-01-01-00"); Assert.assertEquals(m.getString(WorkflowExecutionArgs.STATUS.getName()), "SUCCEEDED"); } }
/* * Copyright 2012-2013 inBloom, Inc. and its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.slc.sli.api.resources.security; import java.util.*; import javax.annotation.PostConstruct; import javax.annotation.Resource; import javax.ws.rs.DELETE; import javax.ws.rs.DefaultValue; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.Context; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; import javax.ws.rs.core.UriInfo; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Scope; import org.springframework.stereotype.Component; import org.slc.sli.api.cache.SessionCache; import org.slc.sli.api.config.EntityDefinition; import org.slc.sli.api.config.EntityDefinitionStore; import org.slc.sli.api.init.RoleInitializer; import org.slc.sli.api.representation.EntityBody; import org.slc.sli.api.resources.v1.HypermediaType; import org.slc.sli.api.security.RightsAllowed; import org.slc.sli.api.security.SecurityEventBuilder; import org.slc.sli.api.security.service.AuditLogger; import org.slc.sli.api.security.context.resolver.RealmHelper; import org.slc.sli.api.service.EntityService; import org.slc.sli.common.util.logging.SecurityEvent; import org.slc.sli.domain.Entity; import org.slc.sli.domain.NeutralCriteria; import org.slc.sli.domain.NeutralQuery; import org.slc.sli.domain.Repository; import org.slc.sli.domain.enums.Right; /** * CRUD resource for custom roles. * * @author jnanney */ @Component @Scope("request") @Path("/customRoles") @Produces({ HypermediaType.JSON + ";charset=utf-8" }) public class CustomRoleResource { @Autowired private EntityDefinitionStore store; @Value("${sli.sandbox.enabled}") protected boolean isSandboxEnabled; @Autowired private SecurityEventBuilder securityEventBuilder; @Autowired private AuditLogger auditLogger; @Autowired private RoleInitializer roleInitializer; private EntityService service; @Autowired private RealmHelper realmHelper; @Autowired @Qualifier("validationRepo") private Repository<Entity> repo; @Resource private SessionCache sessions; public static final String RESOURCE_NAME = "customRole"; protected static final String ERROR_DUPLICATE_ROLE = "Cannot list duplicate roles"; protected static final String ERROR_INVALID_REALM = "Cannot modify custom roles for that realm/tenant"; protected static final String ERROR_INVALID_RIGHT = "Invalid right listed in custom role document"; protected static final String ERROR_MULTIPLE_DOCS = "Cannot create multiple custom role documents per realm/tenant"; protected static final String ERROR_FORBIDDEN = "User does not have access to requested role document"; protected static final String ERROR_DUPLICATE_RIGHTS = "Cannot have the same right listed more than once in a role"; protected static final String ERROR_CHANGING_REALM_ID = "Cannot change the realmId on a custom role document"; protected static final String ERROR_INVALID_REALM_ID = "Invalid realmId specified."; protected static final String ERROR_INVALID_CONTEXT_RIGHT = "Invalid context rights. A staff role has to contain either TEACHER_CONTEXT or STAFF_CONTEXT right"; protected static final String ERROR_INVALID_STUDENT_RIGHT = "Student/Parent roles can not contain staff context right"; @PostConstruct public void init() { EntityDefinition def = store.lookupByResourceName("customRole"); service = def.getService(); } @GET @RightsAllowed({Right.CRUD_ROLE }) public Response readAll(@Context final UriInfo uriInfo, @DefaultValue("") @QueryParam("realmId") String realmId) { if (uriInfo.getQueryParameters() != null) { String defaultsOnly = uriInfo.getQueryParameters().getFirst("defaultsOnly"); if (defaultsOnly != null && Boolean.valueOf(defaultsOnly).booleanValue()) { return Response.ok(roleInitializer.getDefaultRoles()).build(); } } List<Map<String, Object>> results = new ArrayList<Map<String, Object>>(); //If the user's edorg is mapped to more than one realm, then we have to use a //realmId param to figure out which to use. Otherwise we can just return custom //roles for all the user's realms. Set<String> myRealms = realmHelper.getAssociatedRealmIds(); Set<String> realmsToQuery = null; if (!realmId.isEmpty() && !myRealms.contains(realmId)) { return buildBadRequest(ERROR_INVALID_REALM_ID + ": '" + realmId + "'"); } else { if (realmId.isEmpty()) { realmsToQuery = myRealms; } else { realmsToQuery = new HashSet<String>(); realmsToQuery.add(realmId); } } NeutralQuery customRoleQuery = new NeutralQuery(); customRoleQuery.addCriteria(new NeutralCriteria("realmId", NeutralCriteria.CRITERIA_IN, realmsToQuery)); Iterable<String> customRoles = repo.findAllIds("customRole", customRoleQuery); for (String id : customRoles) { EntityBody result = service.get(id); results.add(result); } return Response.ok(results).build(); } @GET @Path("{id}") @RightsAllowed({Right.CRUD_ROLE }) public Response read(@PathParam("id") String id, @Context final UriInfo uriInfo) { EntityBody customRole = service.get(id); String realmId = (String)customRole.get("realmId"); if (!realmHelper.getAssociatedRealmIds().contains(realmId)) { auditSecEvent(uriInfo, "Failed to read custom role with id: " + id + " wrong tenant + realm combination.",realmId); return Response.status(Status.FORBIDDEN).entity(ERROR_FORBIDDEN).build(); } return Response.ok(customRole).build(); } @POST @RightsAllowed({Right.CRUD_ROLE }) public Response createCustomRole(EntityBody newCustomRole, @Context final UriInfo uriInfo) { String realmId = (String) newCustomRole.get("realmId"); Response res = validateRights(newCustomRole); if (res != null) { auditSecEvent(uriInfo, "Failed to create custom role rights validation failed.",realmId); return res; } res = validateUniqueRoles(newCustomRole); if (res != null) { auditSecEvent(uriInfo, "Failed to create custom role unique roles check failed.",realmId); return res; } res = validateValidRealm(newCustomRole); if (res != null) { auditSecEvent(uriInfo, "Failed to create custom role invalid realm specified.",realmId); return res; } NeutralQuery existingCustomRoleQuery = new NeutralQuery(); existingCustomRoleQuery.addCriteria(new NeutralCriteria("realmId", NeutralCriteria.OPERATOR_EQUAL, realmId)); Entity existingRoleDoc = repo.findOne(RESOURCE_NAME, existingCustomRoleQuery); if (existingRoleDoc != null) { auditSecEvent(uriInfo, "Failed to create custom role Already exists.",realmId); return buildBadRequest(ERROR_MULTIPLE_DOCS + ": Realm '" + realmId + "'"); } String id = service.create(newCustomRole); if (id != null) { String uri = uriToString(uriInfo) + "/" + id; auditSecEvent(uriInfo, "Created custom role with id: " + id,realmId); this.sessions.clear(); return Response.status(Status.CREATED).header("Location", uri).build(); } return Response.status(Status.BAD_REQUEST).build(); } @PUT @Path("{id}") @RightsAllowed({Right.CRUD_ROLE }) public Response updateCustomRole(@PathParam("id") String id, EntityBody updated, @Context final UriInfo uriInfo) { Response res = validateRights(updated); if (res != null) { auditSecEvent(uriInfo, "Failed to create custom role rights validation failed.",null); return res; } res = validateUniqueRoles(updated); if (res != null) { auditSecEvent(uriInfo, "Failed to create custom role unique roles check failed.",null); return res; } res = validateValidRealm(updated); if (res != null) { auditSecEvent(uriInfo, "Failed to create custom role invalid realm specified.",null); return res; } EntityBody oldRealm = service.get(id); String oldRealmId = (String) oldRealm.get("realmId"); String updatedRealmId = (String) updated.get("realmId"); if (!updatedRealmId.equals(oldRealmId)) { auditSecEvent(uriInfo, "Failed to update realmId { from: " + oldRealmId + ", to: " + updatedRealmId + " } for role with id:" + id, oldRealmId); return buildBadRequest(ERROR_CHANGING_REALM_ID + ": '" + oldRealmId + "' -> '" + updatedRealmId + "'"); } if (service.update(id, updated, false)) { auditSecEvent(uriInfo, "Updated role with id:" + id,oldRealmId); this.sessions.clear(); return Response.status(Status.NO_CONTENT).build(); } return Response.status(Status.BAD_REQUEST).build(); } private static String uriToString(UriInfo uri) { return uri.getBaseUri() + uri.getPath().replaceAll("/$", ""); } @DELETE @Path("{id}") @RightsAllowed({Right.CRUD_ROLE }) public Response deleteCustomRole(@PathParam("id") String id, @Context final UriInfo uriInfo) { service.delete(id); auditSecEvent(uriInfo, "Deleted role with id:" + id, null); this.sessions.clear(); return Response.status(Status.NO_CONTENT).build(); } private Response validateRights(EntityBody customRoleDoc) { @SuppressWarnings("unchecked") List<Map<String, List<String>>> roles = (List<Map<String, List<String>>>) customRoleDoc.get("roles"); for (Map<String, List<String>> cur : roles) { List<String> rights = cur.get("rights"); Set<Right> rightsSet = new HashSet<Right>(); for (String rightName : rights) { Right right = null; try { right = Right.valueOf(rightName); } catch (IllegalArgumentException iae) { return buildBadRequest(ERROR_INVALID_RIGHT + ": '" + rightName + "'"); } if (rightsSet.contains(right)) { return buildBadRequest(ERROR_DUPLICATE_RIGHTS + ": '" + rightName + "'"); } else { rightsSet.add(right); } } } return null; } private Response validateUniqueRoles(EntityBody customRoleDoc) { Set<String> roleNames = new HashSet<String>(); @SuppressWarnings("unchecked") List<Map<String, List<String>>> roles = (List<Map<String, List<String>>>) customRoleDoc.get("roles"); for (Map<String, List<String>> cur : roles) { List<String> names = cur.get("names"); for (String name : names) { if (roleNames.contains(name)) { return buildBadRequest(ERROR_DUPLICATE_ROLE + ": '" + name + "'"); } else { roleNames.add(name); } } } return null; } private Response validateValidRealm(EntityBody customRoleDoc) { Set<String> realmIds = realmHelper.getAssociatedRealmIds(); if (!realmIds.contains(customRoleDoc.get("realmId"))) { return Response.status(Status.FORBIDDEN).entity(ERROR_INVALID_REALM).build(); } return null; } private void auditSecEvent(UriInfo uriInfo, String message, String RealmId) { SecurityEvent event = securityEventBuilder.createSecurityEvent(CustomRoleResource.class.getName(), uriInfo.getRequestUri(), message, true); if(RealmId!=null) { String targetEdOrg = realmHelper.getEdOrgIdFromRealm(RealmId); event.setTargetEdOrgList(targetEdOrg); //@TA10431 - change targetEdOrg from scalar to list } auditLogger.audit(event); } private Response buildBadRequest(String message) { return Response.status(Status.BAD_REQUEST).entity(message).build(); } }
/* * Copyright (c) 2011-2015, Peter Abeles. All Rights Reserved. * * This file is part of BoofCV (http://boofcv.org). * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package boofcv.alg.misc; import boofcv.misc.AutoTypeImage; import boofcv.misc.CodeGeneratorBase; import java.io.FileNotFoundException; /** * Generates functions inside of {@link boofcv.alg.misc.ImageMiscOps}. * * @author Peter Abeles */ public class GenerateImageMiscOps extends CodeGeneratorBase { String className = "ImageMiscOps"; private AutoTypeImage imageType; private String imageName; private String dataType; private String bitWise; public void generate() throws FileNotFoundException { printPreamble(); printAllGeneric(); printAllSpecific(); out.println("}"); } private void printPreamble() throws FileNotFoundException { setOutputFile(className); out.print("import boofcv.struct.image.*;\n" + "\n" + "import java.util.Random;\n" + "\n" + "\n" + "/**\n" + " * Basic image operations which have no place better to go.\n" + " *\n" + " * <p>DO NOT MODIFY: Generated by " + getClass().getSimpleName() + ".</p>\n" + " *\n" + " * @author Peter Abeles\n" + " */\n" + "public class " + className + " {\n\n"); } public void printAllGeneric() { AutoTypeImage types[] = AutoTypeImage.getGenericTypes(); for( AutoTypeImage t : types ) { imageType = t; imageName = t.getSingleBandName(); dataType = t.getDataType(); printCopy(); printFill(); printFillInterleaved(); printFillInterleaved_bands(); printFillBand_Interleaved(); printInsertBandInterleaved(); printFillBorder(); printFillRectangle(); printFillRectangleInterleaved(); printFillUniform(); printFillUniformInterleaved(); printFillGaussian(); printFillGaussianInterleaved(); printFlipVertical(); printFlipHorizontal(); printRotateCW_one(); printRotateCW_two(); printRotateCCW_one(); printRotateCCW_two(); } } public void printAllSpecific() { AutoTypeImage types[] = AutoTypeImage.getSpecificTypes(); for( AutoTypeImage t : types ) { imageType = t; imageName = t.getSingleBandName(); dataType = t.getDataType(); bitWise = t.getBitWise(); printAddUniformSB(); printAddUniformIL(); printAddGaussianSB(); printAddGaussianIL(); } } public void printCopy() { out.print("\t/**\n" + "\t * Copies a rectangular region from one image into another.<br>\n" + "\t * output[dstX:(dstX+width) , dstY:(dstY+height-1)] = input[srcX:(srcX+width) , srcY:(srcY+height-1)]\n" + "\t *\n" + "\t * @param srcX x-coordinate of corner in input image\n" + "\t * @param srcY y-coordinate of corner in input image\n" + "\t * @param dstX x-coordinate of corner in output image\n" + "\t * @param dstY y-coordinate of corner in output image\n" + "\t * @param width Width of region to be copied\n" + "\t * @param height Height of region to be copied\n" + "\t * @param input Input image\n" + "\t * @param output output image\n" + "\t */\n" + "\tpublic static void copy( int srcX , int srcY , int dstX , int dstY , int width , int height ,\n" + "\t\t\t\t\t\t\t "+imageName+" input , "+imageName+" output ) {\n" + "\n" + "\t\tif( input.width < srcX+width || input.height < srcY+height )\n" + "\t\t\tthrow new IllegalArgumentException(\"Copy region must be contained input image\");\n" + "\t\tif( output.width < dstX+width || output.height < dstY+height )\n" + "\t\t\tthrow new IllegalArgumentException(\"Copy region must be contained output image\");\n" + "\n" + "\t\tfor (int y = 0; y < height; y++) {\n" + "\t\t\tint indexSrc = input.startIndex + (srcY + y) * input.stride + srcX;\n" + "\t\t\tint indexDst = output.startIndex + (dstY + y) * output.stride + dstX;\n" + "\n" + "\t\t\tfor (int x = 0; x < width; x++) {\n" + "\t\t\t\toutput.data[indexDst++] = input.data[indexSrc++];\n" + "\t\t\t}\n" + "\t\t}\n" + "\t}\n\n"); } public void printFill() { String typeCast = imageType.getTypeCastFromSum(); out.print("\t/**\n" + "\t * Fills the whole image with the specified value\n" + "\t *\n" + "\t * @param input An image.\n" + "\t * @param value The value that the image is being filled with.\n" + "\t */\n" + "\tpublic static void fill("+imageName+" input, "+imageType.getSumType()+" value) {\n" + "\n" + "\t\tfor (int y = 0; y < input.height; y++) {\n" + "\t\t\tint index = input.getStartIndex() + y * input.getStride();\n" + "\t\t\tfor (int x = 0; x < input.width; x++) {\n" + "\t\t\t\tinput.data[index++] = "+typeCast+"value;\n" + "\t\t\t}\n" + "\t\t}\n" + "\t}\n\n"); } public void printFillInterleaved() { String imageName = imageType.getInterleavedName(); String typeCast = imageType.getTypeCastFromSum(); out.print("\t/**\n" + "\t * Fills the whole image with the specified value\n" + "\t *\n" + "\t * @param input An image.\n" + "\t * @param value The value that the image is being filled with.\n" + "\t */\n" + "\tpublic static void fill("+imageName+" input, "+imageType.getSumType()+" value) {\n" + "\n" + "\t\tfor (int y = 0; y < input.height; y++) {\n" + "\t\t\tint index = input.getStartIndex() + y * input.getStride();\n" + "\t\t\tint end = index + input.width*input.numBands;\n" + "\t\t\tfor (; index < end; index++ ) {\n" + "\t\t\t\tinput.data[index] = "+typeCast+"value;\n" + "\t\t\t}\n" + "\t\t}\n" + "\t}\n\n"); } public void printFillInterleaved_bands() { String imageName = imageType.getInterleavedName(); String typeCast = imageType.getTypeCastFromSum(); out.print( "\t/**\n" + "\t * Fills each band in the image with the specified values\n" + "\t *\n" + "\t * @param input An image.\n" + "\t * @param values Array which contains the values each band is to be filled with.\n" + "\t */\n" + "\tpublic static void fill("+imageName+" input, "+imageType.getSumType()+"[] values) {\n" + "\n" + "\t\tfinal int numBands = input.numBands;\n" + "\t\tfor (int y = 0; y < input.height; y++) {\n" + "\t\t\tfor( int band = 0; band < numBands; band++ ) {\n" + "\t\t\t\tint index = input.getStartIndex() + y * input.getStride() + band;\n" + "\t\t\t\tint end = index + input.width*numBands - band;\n" + "\t\t\t\t"+imageType.getSumType()+" value = values[band];\n" + "\t\t\t\tfor (; index < end; index += numBands ) {\n" + "\t\t\t\t\tinput.data[index] = "+typeCast+"value;\n" + "\t\t\t\t}\n" + "\t\t\t}\n" + "\t\t}\n" + "\t}\n\n"); } public void printFillBand_Interleaved() { String imageName = imageType.getInterleavedName(); String typeCast = imageType.getTypeCastFromSum(); out.print( "\t/**\n" + "\t * Fills one band in the image with the specified value\n" + "\t *\n" + "\t * @param input An image.\n" + "\t * @param band Which band is to be filled with the specified value \n" + "\t * @param value The value that the image is being filled with.\n" + "\t */\n" + "\tpublic static void fillBand("+imageName+" input, int band , "+imageType.getSumType()+" value) {\n" + "\n" + "\t\tfinal int numBands = input.numBands;\n" + "\t\tfor (int y = 0; y < input.height; y++) {\n" + "\t\t\tint index = input.getStartIndex() + y * input.getStride() + band;\n" + "\t\t\tint end = index + input.width*numBands - band;\n" + "\t\t\tfor (; index < end; index += numBands ) {\n" + "\t\t\t\tinput.data[index] = "+typeCast+"value;\n" + "\t\t\t}\n" + "\t\t}\n" + "\t}\n\n"); } public void printInsertBandInterleaved() { String singleName = imageType.getSingleBandName(); String interleavedName = imageType.getInterleavedName(); out.print( "\t/**\n" + "\t * Inserts a single band into into one of the bands in a multi-band image\n" + "\t *\n" + "\t * @param input Single band image\n" + "\t * @param band Which band the image is to be inserted into\n" + "\t * @param output The multi-band image which the input image is to be inserted into\n" + "\t */\n" + "\tpublic static void insertBand( "+singleName+" input, int band , "+interleavedName+" output) {\n" + "\n" + "\t\tfinal int numBands = output.numBands;\n" + "\t\tfor (int y = 0; y < input.height; y++) {\n" + "\t\t\tint indexIn = input.getStartIndex() + y * input.getStride();\n" + "\t\t\tint indexOut = output.getStartIndex() + y * output.getStride() + band;\n" + "\t\t\tint end = indexOut + output.width*numBands - band;\n" + "\t\t\tfor (; indexOut < end; indexOut += numBands , indexIn++ ) {\n" + "\t\t\t\toutput.data[indexOut] = input.data[indexIn];\n" + "\t\t\t}\n" + "\t\t}\n" + "\t}\n\n"); } public void printFillBorder() { String typeCast = imageType.getTypeCastFromSum(); out.print("\t/**\n" + "\t * Fills the outside border with the specified value\n" + "\t *\n" + "\t * @param input An image.\n" + "\t * @param value The value that the image is being filled with.\n" + "\t * @param radius Border width. \n" + "\t */\n" + "\tpublic static void fillBorder("+imageName+" input, "+imageType.getSumType()+" value, int radius ) {\n" + "\n" + "\t\t// top and bottom\n" + "\t\tfor (int y = 0; y < radius; y++) {\n" + "\t\t\tint indexTop = input.startIndex + y * input.stride;\n" + "\t\t\tint indexBottom = input.startIndex + (input.height-y-1) * input.stride;\n" + "\t\t\tfor (int x = 0; x < input.width; x++) {\n" + "\t\t\t\tinput.data[indexTop++] = "+typeCast+"value;\n" + "\t\t\t\tinput.data[indexBottom++] = "+typeCast+"value;\n" + "\t\t\t}\n" + "\t\t}\n" + "\n" + "\t\t// left and right\n" + "\t\tint h = input.height-radius;\n" + "\t\tint indexStart = input.startIndex + radius*input.stride;\n" + "\t\tfor (int x = 0; x < radius; x++) {\n" + "\t\t\tint indexLeft = indexStart + x;\n" + "\t\t\tint indexRight = indexStart + input.width-1-x;\n" + "\t\t\tfor (int y = radius; y < h; y++) {\n" + "\t\t\t\tinput.data[indexLeft] = "+typeCast+"value;\n" + "\t\t\t\tinput.data[indexRight] = "+typeCast+"value;\n" + "\t\t\t\t\n" + "\t\t\t\tindexLeft += input.stride;\n" + "\t\t\t\tindexRight += input.stride;\n" + "\t\t\t}\n" + "\t\t}\n" + "\t}\n\n"); } public void printFillRectangle() { out.print("\t/**\n" + "\t * Draws a filled rectangle that is aligned along the image axis inside the image.\n" + "\t *\n" + "\t * @param img Image the rectangle is drawn in. Modified\n" + "\t * @param value Value of the rectangle\n" + "\t * @param x0 Top left x-coordinate\n" + "\t * @param y0 Top left y-coordinate\n" + "\t * @param width Rectangle width\n" + "\t * @param height Rectangle height\n" + "\t */\n" + "\tpublic static void fillRectangle("+imageName+" img, "+imageType.getSumType()+" value, int x0, int y0, int width, int height) {\n" + "\t\tint x1 = x0 + width;\n" + "\t\tint y1 = y0 + height;\n" + "\n" + "\t\tif( x0 < 0 ) x0 = 0; if( x1 > img.width ) x1 = img.width;\n" + "\t\tif( y0 < 0 ) y0 = 0; if( y1 > img.height ) y1 = img.height;\n" + "\n" + "\t\tfor (int y = y0; y < y1; y++) {\n" + "\t\t\tfor (int x = x0; x < x1; x++) {\n" + "\t\t\t\timg.set(x, y, value);\n" + "\t\t\t}\n" + "\t\t}\n" + "\t}\n\n"); } public void printFillRectangleInterleaved() { String imageName = imageType.getInterleavedName(); String dataType = imageType.getDataType(); out.print("\t/**\n" + "\t * Draws a filled rectangle that is aligned along the image axis inside the image. All bands\n" + "\t * are filled with the same value.\n" + "\t *\n" + "\t * @param img Image the rectangle is drawn in. Modified\n" + "\t * @param value Value of the rectangle\n" + "\t * @param x0 Top left x-coordinate\n" + "\t * @param y0 Top left y-coordinate\n" + "\t * @param width Rectangle width\n" + "\t * @param height Rectangle height\n" + "\t */\n" + "\tpublic static void fillRectangle("+imageName+" img, "+dataType+" value, int x0, int y0, int width, int height) {\n" + "\t\tint x1 = x0 + width;\n" + "\t\tint y1 = y0 + height;\n" + "\n" + "\t\tif( x0 < 0 ) x0 = 0; if( x1 > img.width ) x1 = img.width;\n" + "\t\tif( y0 < 0 ) y0 = 0; if( y1 > img.height ) y1 = img.height;\n" + "\n" + "\t\tint length = (x1-x0)*img.numBands;\n" + "\t\tfor (int y = y0; y < y1; y++) {\n" + "\t\t\tint index = img.startIndex + y*img.stride + x0*img.numBands;\n" + "\t\t\tint indexEnd = index + length;\n" + "\t\t\twhile( index < indexEnd ) {\n" + "\t\t\t\timg.data[index++] = value;\n" + "\t\t\t}\n" + "\t\t}\n" + "\t}"); } public void printFillUniform() { String sumType = imageType.getSumType(); String typeCast = imageType.getTypeCastFromSum(); String maxInclusive = imageType.isInteger() ? "exclusive" : "inclusive"; out.print("\t/**\n" + "\t * Sets each value in the image to a value drawn from an uniform distribution that has a range of min &le; X &lt; max.\n" + "\t *\n" + "\t * @param img Image which is to be filled. Modified,\n" + "\t * @param rand Random number generator\n" + "\t * @param min Minimum value of the distribution, inclusive\n" + "\t * @param max Maximum value of the distribution, "+maxInclusive+"\n" + "\t */\n" + "\tpublic static void fillUniform("+imageName+" img, Random rand , "+sumType+" min , "+sumType+" max) {\n" + "\t\t"+sumType+" range = max-min;\n" + "\n" + "\t\t"+dataType+"[] data = img.data;\n" + "\n" + "\t\tfor (int y = 0; y < img.height; y++) {\n" + "\t\t\tint index = img.getStartIndex() + y * img.getStride();\n" + "\t\t\tfor (int x = 0; x < img.width; x++) {\n"); if( imageType.isInteger() && imageType.getNumBits() < 64) { out.print("\t\t\t\tdata[index++] = "+typeCast+"(rand.nextInt(range)+min);\n"); } else if( imageType.isInteger() ) { out.print("\t\t\t\tdata[index++] = rand.nextInt((int)range)+min;\n"); } else { String randType = imageType.getRandType(); out.print("\t\t\t\tdata[index++] = rand.next"+randType+"()*range+min;\n"); } out.print("\t\t\t}\n" + "\t\t}\n" + "\t}\n\n"); } public void printFillUniformInterleaved() { String imageName = imageType.getInterleavedName(); String sumType = imageType.getSumType(); String typeCast = imageType.getTypeCastFromSum(); String maxInclusive = imageType.isInteger() ? "exclusive" : "inclusive"; out.print("\t/**\n" + "\t * Sets each value in the image to a value drawn from an uniform distribution that has a range of min &le; X &lt; max.\n" + "\t *\n" + "\t * @param img Image which is to be filled. Modified,\n" + "\t * @param rand Random number generator\n" + "\t * @param min Minimum value of the distribution, inclusive\n" + "\t * @param max Maximum value of the distribution, "+maxInclusive+"\n" + "\t */\n" + "\tpublic static void fillUniform("+imageName+" img, Random rand , "+sumType+" min , "+sumType+" max) {\n" + "\t\t"+sumType+" range = max-min;\n" + "\n" + "\t\t"+dataType+"[] data = img.data;\n" + "\n" + "\t\tfor (int y = 0; y < img.height; y++) {\n" + "\t\t\tint index = img.getStartIndex() + y * img.getStride();\n" + "\t\t\tint end = index + img.width*img.numBands;\n" + "\t\t\tfor (; index < end; index++) {\n"); if( imageType.isInteger() && imageType.getNumBits() < 64) { out.print("\t\t\t\tdata[index] = "+typeCast+"(rand.nextInt(range)+min);\n"); } else if( imageType.isInteger() ) { out.print("\t\t\t\tdata[index] = rand.nextInt((int)range)+min;\n"); } else { String randType = imageType.getRandType(); out.print("\t\t\t\tdata[index] = rand.next"+randType+"()*range+min;\n"); } out.print("\t\t\t}\n" + "\t\t}\n" + "\t}\n\n"); } public void printFillGaussian() { String sumType = imageType.getSumType(); String castToSum = sumType.compareTo("double") == 0 ? "" : "("+sumType+")"; String typeCast = imageType.getTypeCastFromSum(); out.print("\t/**\n" + "\t * Sets each value in the image to a value drawn from a Gaussian distribution. A user\n" + "\t * specified lower and upper bound is provided to ensure that the values are within a legal\n" + "\t * range. A drawn value outside the allowed range will be set to the closest bound.\n" + "\t * \n" + "\t * @param input Input image. Modified.\n" + "\t * @param rand Random number generator\n" + "\t * @param mean Distribution's mean.\n" + "\t * @param sigma Distribution's standard deviation.\n" + "\t * @param lowerBound Lower bound of value clip\n" + "\t * @param upperBound Upper bound of value clip\n" + "\t */\n" + "\tpublic static void fillGaussian("+imageName+" input, Random rand , double mean , double sigma , " +sumType+" lowerBound , "+sumType+" upperBound ) {\n" + "\t\t"+dataType+"[] data = input.data;\n" + "\n" + "\t\tfor (int y = 0; y < input.height; y++) {\n" + "\t\t\tint index = input.getStartIndex() + y * input.getStride();\n" + "\t\t\tfor (int x = 0; x < input.width; x++) {\n" + "\t\t\t\t"+sumType+" value = "+castToSum+"(rand.nextGaussian()*sigma+mean);\n" + "\t\t\t\tif( value < lowerBound ) value = lowerBound;\n" + "\t\t\t\tif( value > upperBound ) value = upperBound;\n" + "\t\t\t\tdata[index++] = "+typeCast+"value;\n" + "\t\t\t}\n" + "\t\t}\n" + "\t}\n\n"); } public void printFillGaussianInterleaved() { String imageName = imageType.getInterleavedName(); String sumType = imageType.getSumType(); String castToSum = sumType.compareTo("double") == 0 ? "" : "("+sumType+")"; String typeCast = imageType.getTypeCastFromSum(); out.print("\t/**\n" + "\t * Sets each value in the image to a value drawn from a Gaussian distribution. A user\n" + "\t * specified lower and upper bound is provided to ensure that the values are within a legal\n" + "\t * range. A drawn value outside the allowed range will be set to the closest bound.\n" + "\t * \n" + "\t * @param input Input image. Modified.\n" + "\t * @param rand Random number generator\n" + "\t * @param mean Distribution's mean.\n" + "\t * @param sigma Distribution's standard deviation.\n" + "\t * @param lowerBound Lower bound of value clip\n" + "\t * @param upperBound Upper bound of value clip\n" + "\t */\n" + "\tpublic static void fillGaussian("+imageName+" input, Random rand , double mean , double sigma , " +sumType+" lowerBound , "+sumType+" upperBound ) {\n" + "\t\t"+dataType+"[] data = input.data;\n" + "\t\tint length = input.width*input.numBands;\n" + "\n" + "\t\tfor (int y = 0; y < input.height; y++) {\n" + "\t\t\tint index = input.getStartIndex() + y * input.getStride();\n" + "\t\t\tint indexEnd = index+length;\n" + "\n" + "\t\t\twhile( index < indexEnd ) {\n" + "\t\t\t\t"+sumType+" value = "+castToSum+"(rand.nextGaussian()*sigma+mean);\n" + "\t\t\t\tif( value < lowerBound ) value = lowerBound;\n" + "\t\t\t\tif( value > upperBound ) value = upperBound;\n" + "\t\t\t\tdata[index++] = "+typeCast+"value;\n" + "\t\t\t}\n" + "\t\t}\n" + "\t}\n\n"); } public void printAddUniformSB() { String sumType = imageType.getSumType(); int min = imageType.getMin().intValue(); int max = imageType.getMax().intValue(); String typeCast = imageType.getTypeCastFromSum(); out.print("\t/**\n" + "\t * Adds uniform i.i.d noise to each pixel in the image. Noise range is min &le; X &lt; max.\n" + "\t */\n" + "\tpublic static void addUniform("+imageName+" input, Random rand , "+sumType+" min , "+sumType+" max) {\n" + "\t\t"+sumType+" range = max-min;\n" + "\n" + "\t\t"+dataType+"[] data = input.data;\n" + "\n" + "\t\tfor (int y = 0; y < input.height; y++) {\n" + "\t\t\tint index = input.getStartIndex() + y * input.getStride();\n" + "\t\t\tfor (int x = 0; x < input.width; x++) {\n"); if( imageType.isInteger() && imageType.getNumBits() != 64) { out.print("\t\t\t\t"+sumType+" value = (data[index] "+bitWise+") + rand.nextInt(range)+min;\n"); if( imageType.getNumBits() < 32 ) { out.print("\t\t\t\tif( value < "+min+" ) value = "+min+";\n" + "\t\t\t\tif( value > "+max+" ) value = "+max+";\n" + "\n"); } } else if( imageType.isInteger() ) { out.print("\t\t\t\t"+sumType+" value = data[index] + rand.nextInt((int)range)+min;\n"); } else { String randType = imageType.getRandType(); out.print("\t\t\t\t"+sumType+" value = data[index] + rand.next"+randType+"()*range+min;\n"); } out.print("\t\t\t\tdata[index++] = "+typeCast+" value;\n" + "\t\t\t}\n" + "\t\t}\n" + "\t}\n\n"); } public void printAddUniformIL() { String imageName = imageType.getInterleavedName(); String sumType = imageType.getSumType(); int min = imageType.getMin().intValue(); int max = imageType.getMax().intValue(); String typeCast = imageType.getTypeCastFromSum(); out.print("\t/**\n" + "\t * Adds uniform i.i.d noise to each pixel in the image. Noise range is min &le; X &lt; max.\n" + "\t */\n" + "\tpublic static void addUniform("+imageName+" input, Random rand , "+sumType+" min , "+sumType+" max) {\n" + "\t\t"+sumType+" range = max-min;\n" + "\n" + "\t\t"+dataType+"[] data = input.data;\n" + "\t\tint length = input.width*input.numBands;\n" + "\n" + "\t\tfor (int y = 0; y < input.height; y++) {\n" + "\t\t\tint index = input.getStartIndex() + y * input.getStride();\n" + "\n" + "\t\t\t\tint indexEnd = index+length;\n" + "\t\t\t\twhile( index < indexEnd ) {\n"); if( imageType.isInteger() && imageType.getNumBits() != 64) { out.print("\t\t\t\t"+sumType+" value = (data[index] "+bitWise+") + rand.nextInt(range)+min;\n"); if( imageType.getNumBits() < 32 ) { out.print("\t\t\t\tif( value < "+min+" ) value = "+min+";\n" + "\t\t\t\tif( value > "+max+" ) value = "+max+";\n" + "\n"); } } else if( imageType.isInteger() ) { out.print("\t\t\t\t"+sumType+" value = data[index] + rand.nextInt((int)range)+min;\n"); } else { String randType = imageType.getRandType(); out.print("\t\t\t\t"+sumType+" value = data[index] + rand.next"+randType+"()*range+min;\n"); } out.print("\t\t\t\tdata[index++] = "+typeCast+" value;\n" + "\t\t\t}\n" + "\t\t}\n" + "\t}\n\n"); } public void printAddGaussianSB() { String sumType = imageType.getSumType(); String typeCast = imageType.getTypeCastFromSum(); String sumCast = sumType.equals("double") ? "" : "("+sumType+")"; out.print("\t/**\n" + "\t * Adds Gaussian/normal i.i.d noise to each pixel in the image. If a value exceeds the specified\n"+ "\t * it will be set to the closest bound.\n" + "\t * @param input Input image. Modified.\n" + "\t * @param rand Random number generator.\n" + "\t * @param sigma Distributions standard deviation.\n" + "\t * @param lowerBound Allowed lower bound\n" + "\t * @param upperBound Allowed upper bound\n" + "\t */\n" + "\tpublic static void addGaussian("+imageName+" input, Random rand , double sigma , " +sumType+" lowerBound , "+sumType+" upperBound ) {\n" + "\n" + "\t\tfor (int y = 0; y < input.height; y++) {\n" + "\t\t\tint index = input.getStartIndex() + y * input.getStride();\n" + "\t\t\tfor (int x = 0; x < input.width; x++) {\n" + "\t\t\t\t"+sumType+" value = (input.data[index] "+bitWise+") + "+sumCast+"(rand.nextGaussian()*sigma);\n" + "\t\t\t\tif( value < lowerBound ) value = lowerBound;\n" + "\t\t\t\tif( value > upperBound ) value = upperBound;\n" + "\t\t\t\tinput.data[index++] = "+typeCast+" value;\n" + "\t\t\t}\n" + "\t\t}\n" + "\t}\n\n"); } public void printAddGaussianIL() { String imageName = imageType.getInterleavedName(); String sumType = imageType.getSumType(); String typeCast = imageType.getTypeCastFromSum(); String sumCast = sumType.equals("double") ? "" : "("+sumType+")"; out.print("\t/**\n" + "\t * Adds Gaussian/normal i.i.d noise to each pixel in the image. If a value exceeds the specified\n"+ "\t * it will be set to the closest bound.\n" + "\t * @param input Input image. Modified.\n" + "\t * @param rand Random number generator.\n" + "\t * @param sigma Distributions standard deviation.\n" + "\t * @param lowerBound Allowed lower bound\n" + "\t * @param upperBound Allowed upper bound\n" + "\t */\n" + "\tpublic static void addGaussian("+imageName+" input, Random rand , double sigma , " +sumType+" lowerBound , "+sumType+" upperBound ) {\n" + "\n" + "\t\tint length = input.width*input.numBands;\n" + "\n" + "\t\tfor (int y = 0; y < input.height; y++) {\n" + "\t\t\tint index = input.getStartIndex() + y * input.getStride();\n" + "\t\t\tint indexEnd = index+length;\n" + "\t\t\twhile( index < indexEnd ) {\n" + "\t\t\t\t"+sumType+" value = (input.data[index]"+bitWise+") + "+sumCast+"(rand.nextGaussian()*sigma);\n" + "\t\t\t\tif( value < lowerBound ) value = lowerBound;\n" + "\t\t\t\tif( value > upperBound ) value = upperBound;\n" + "\t\t\t\tinput.data[index++] = "+typeCast+"value;\n" + "\t\t\t}\n" + "\t\t}\n" + "\t}\n\n"); } public void printFlipVertical() { String sumType = imageType.getSumType(); out.print("\t/**\n" + "\t * Flips the image from top to bottom\n" + "\t */\n" + "\tpublic static void flipVertical( "+imageName+" input ) {\n" + "\t\tint h2 = input.height/2;\n" + "\n" + "\t\tfor( int y = 0; y < h2; y++ ) {\n" + "\t\t\tint index1 = input.getStartIndex() + y * input.getStride();\n" + "\t\t\tint index2 = input.getStartIndex() + (input.height - y - 1) * input.getStride();\n" + "\n" + "\t\t\tint end = index1 + input.width;\n" + "\n" + "\t\t\twhile( index1 < end ) {\n" + "\t\t\t\t"+sumType+" tmp = input.data[index1];\n" + "\t\t\t\tinput.data[index1++] = input.data[index2];\n" + "\t\t\t\tinput.data[index2++] = ("+dataType+")tmp;\n" + "\t\t\t}\n" + "\t\t}\n" + "\t}\n\n"); } public void printFlipHorizontal() { String sumType = imageType.getSumType(); out.print("\t/**\n" + "\t * Flips the image from left to right\n" + "\t */\n" + "\tpublic static void flipHorizontal( "+imageName+" input ) {\n" + "\t\tint w2 = input.width/2;\n" + "\n" + "\t\tfor( int y = 0; y < input.height; y++ ) {\n" + "\t\t\tint index1 = input.getStartIndex() + y * input.getStride();\n" + "\t\t\tint index2 = index1 + input.width-1;\n" + "\n" + "\t\t\tint end = index1 + w2;\n" + "\n" + "\t\t\twhile( index1 < end ) {\n" + "\t\t\t\t"+sumType+" tmp = input.data[index1];\n" + "\t\t\t\tinput.data[index1++] = input.data[index2];\n" + "\t\t\t\tinput.data[index2--] = ("+dataType+")tmp;\n" + "\t\t\t}\n" + "\t\t}\n" + "\t}\n\n"); } public void printRotateCW_one() { String sumType = imageType.getSumType(); out.print("\t/**\n" + "\t * In-place 90 degree image rotation in the clockwise direction. Only works on\n" + "\t * square images.\n" + "\t */\n" + "\tpublic static void rotateCW( "+imageName+" image ) {\n" + "\t\tif( image.width != image.height )\n" + "\t\t\tthrow new IllegalArgumentException(\"Image must be square\");\n" + "\n" + "\t\tint w = image.height/2 + image.height%2;\n" + "\t\tint h = image.height/2;\n" + "\n" + "\t\tfor( int y0 = 0; y0 < h; y0++ ) {\n" + "\t\t\tint y1 = image.height-y0-1;\n" + "\n" + "\t\t\tfor( int x0 = 0; x0 < w; x0++ ) {\n" + "\t\t\t\tint x1 = image.width-x0-1;\n" + "\n" + "\t\t\t\tint index0 = image.startIndex + y0*image.stride + x0;\n" + "\t\t\t\tint index1 = image.startIndex + x0*image.stride + y1;\n" + "\t\t\t\tint index2 = image.startIndex + y1*image.stride + x1;\n" + "\t\t\t\tint index3 = image.startIndex + x1*image.stride + y0;\n" + "\t\t\t\t\n" + "\t\t\t\t"+sumType+" tmp3 = image.data[index3];\n" + "\n" + "\t\t\t\timage.data[index3] = image.data[index2];\n" + "\t\t\t\timage.data[index2] = image.data[index1];\n" + "\t\t\t\timage.data[index1] = image.data[index0];\n" + "\t\t\t\timage.data[index0] = ("+dataType+")tmp3;\n" + "\t\t\t}\n" + "\t\t}\n" + "\t}\n\n"); } public void printRotateCW_two() { out.print("\t/**\n" + "\t * Rotates the image 90 degrees in the clockwise direction.\n" + "\t */\n" + "\tpublic static void rotateCW( "+imageName+" input , "+imageName+" output ) {\n" + "\t\tif( input.width != output.height || input.height != output.width )\n" + "\t\t\tthrow new IllegalArgumentException(\"Incompatible shapes\");\n" + "\n" + "\t\tint h = input.height-1;\n" + "\n" + "\t\tfor( int y = 0; y < input.height; y++ ) {\n" + "\t\t\tint indexIn = input.startIndex + y*input.stride;\n" + "\t\t\tfor (int x = 0; x < input.width; x++) {\n" + "\t\t\t\toutput.unsafe_set(h-y,x,input.data[indexIn++]);\n" + "\t\t\t}\n" + "\t\t}\n" + "\t}\n\n"); } public void printRotateCCW_one() { String sumType = imageType.getSumType(); out.print("\t/**\n" + "\t * In-place 90 degree image rotation in the counter-clockwise direction. Only works on\n" + "\t * square images.\n" + "\t */\n" + "\tpublic static void rotateCCW( "+imageName+" image ) {\n" + "\t\tif( image.width != image.height )\n" + "\t\t\tthrow new IllegalArgumentException(\"Image must be square\");\n" + "\n" + "\t\tint w = image.height/2 + image.height%2;\n" + "\t\tint h = image.height/2;\n" + "\n" + "\t\tfor( int y0 = 0; y0 < h; y0++ ) {\n" + "\t\t\tint y1 = image.height-y0-1;\n" + "\n" + "\t\t\tfor( int x0 = 0; x0 < w; x0++ ) {\n" + "\t\t\t\tint x1 = image.width-x0-1;\n" + "\n" + "\t\t\t\tint index0 = image.startIndex + y0*image.stride + x0;\n" + "\t\t\t\tint index1 = image.startIndex + x0*image.stride + y1;\n" + "\t\t\t\tint index2 = image.startIndex + y1*image.stride + x1;\n" + "\t\t\t\tint index3 = image.startIndex + x1*image.stride + y0;\n" + "\t\t\t\t\n" + "\t\t\t\t"+sumType+" tmp0 = image.data[index0];\n" + "\n" + "\t\t\t\timage.data[index0] = image.data[index1];\n" + "\t\t\t\timage.data[index1] = image.data[index2];\n" + "\t\t\t\timage.data[index2] = image.data[index3];\n" + "\t\t\t\timage.data[index3] = ("+dataType+")tmp0;\n" + "\t\t\t}\n" + "\t\t}\n" + "\t}\n\n"); } public void printRotateCCW_two() { out.print("\t/**\n" + "\t * Rotates the image 90 degrees in the counter-clockwise direction.\n" + "\t */\n" + "\tpublic static void rotateCCW( "+imageName+" input , "+imageName+" output ) {\n" + "\t\tif( input.width != output.height || input.height != output.width )\n" + "\t\t\tthrow new IllegalArgumentException(\"Incompatible shapes\");\n" + "\n" + "\t\tint w = input.width-1;\n" + "\n" + "\t\tfor( int y = 0; y < input.height; y++ ) {\n" + "\t\t\tint indexIn = input.startIndex + y*input.stride;\n" + "\t\t\tfor (int x = 0; x < input.width; x++) {\n" + "\t\t\t\toutput.unsafe_set(y,w-x,input.data[indexIn++]);\n" + "\t\t\t}\n" + "\t\t}\n" + "\t}\n\n"); } public static void main( String args[] ) throws FileNotFoundException { GenerateImageMiscOps gen = new GenerateImageMiscOps(); gen.generate(); } }
/* * ========================================================================= * Copyright (c) 2002-2014 Pivotal Software, Inc. All Rights Reserved. * This product is protected by U.S. and international copyright * and intellectual property laws. Pivotal products are covered by * more patents listed at http://www.pivotal.io/patents. * ======================================================================== */ package com.gemstone.gemfire.management.internal; import java.beans.IntrospectionException; import java.lang.reflect.InvocationHandler; import java.lang.reflect.Method; import java.lang.reflect.Proxy; import java.util.Arrays; import java.util.List; import javax.management.JMX; import javax.management.MBeanException; import javax.management.MBeanInfo; import javax.management.MBeanNotificationInfo; import javax.management.Notification; import javax.management.NotificationBroadcaster; import javax.management.NotificationBroadcasterSupport; import javax.management.NotificationEmitter; import javax.management.NotificationFilter; import javax.management.NotificationListener; import javax.management.ObjectName; import org.apache.logging.log4j.Logger; import com.gemstone.gemfire.SystemFailure; import com.gemstone.gemfire.cache.Region; import com.gemstone.gemfire.cache.execute.FunctionService; import com.gemstone.gemfire.cache.execute.ResultCollector; import com.gemstone.gemfire.distributed.DistributedMember; import com.gemstone.gemfire.internal.logging.LogService; /** * This class is the proxy handler for all the proxies created for federated * MBeans. Its designed with Java proxy mechanism. All data calls are * delegated to the federation components. * All method calls are routed to specified members via Function service * * @author rishim * */ public class MBeanProxyInvocationHandler implements InvocationHandler { private static final Logger logger = LogService.getLogger(); /** * Name of the MBean */ private ObjectName objectName; /** * The monitoring region where this Object resides. */ private Region<String, Object> monitoringRegion; /** * The member to which this proxy belongs */ private DistributedMember member; /** * emitter is a helper class for sending notifications on behalf of the proxy */ private final NotificationBroadcasterSupport emitter; private final ProxyInterface proxyImpl; private boolean isMXBean; private MXBeanProxyInvocationHandler mxbeanInvocationRef; /** * * @param member * member to which this MBean belongs * @param monitoringRegion * corresponding MonitoringRegion * @param objectName * ObjectName of the MBean * @param interfaceClass * on which interface the proxy to be exposed * @return Object * @throws ClassNotFoundException * @throws IntrospectionException */ public static Object newProxyInstance(DistributedMember member, Region<String, Object> monitoringRegion, ObjectName objectName, Class interfaceClass) throws ClassNotFoundException, IntrospectionException { boolean isMXBean = JMX.isMXBeanInterface(interfaceClass); boolean notificationBroadcaster = ((FederationComponent) monitoringRegion .get(objectName.toString())).isNotificationEmitter(); InvocationHandler handler = new MBeanProxyInvocationHandler(member, objectName, monitoringRegion, isMXBean); Class[] interfaces; if (notificationBroadcaster) { interfaces = new Class[] { interfaceClass, ProxyInterface.class, NotificationBroadCasterProxy.class }; } else { interfaces = new Class[] { interfaceClass, ProxyInterface.class }; } Object proxy = Proxy.newProxyInstance(MBeanProxyInvocationHandler.class .getClassLoader(), interfaces, handler); return interfaceClass.cast(proxy); } /** * * @param member * member to which this MBean belongs * @param objectName * ObjectName of the MBean * @param monitoringRegion * corresponding MonitoringRegion * @throws IntrospectionException * @throws ClassNotFoundException */ private MBeanProxyInvocationHandler(DistributedMember member, ObjectName objectName, Region<String, Object> monitoringRegion, boolean isMXBean) throws IntrospectionException, ClassNotFoundException { this.member = member; this.objectName = objectName; this.monitoringRegion = monitoringRegion; this.emitter = new NotificationBroadcasterSupport(); this.proxyImpl = new ProxyInterfaceImpl(); this.isMXBean = isMXBean; } /** * Inherited method from Invocation handler All object state requests are * delegated to the federated component. * * All setters and operations() are delegated to the function service. * * Notification emmitter methods are also delegated to the function service */ @Override public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { if (logger.isTraceEnabled()) { logger.trace("Invoking Method {}", method.getName()); } final Class methodClass = method.getDeclaringClass(); if (methodClass.equals(NotificationBroadcaster.class) || methodClass.equals(NotificationEmitter.class)) return invokeBroadcasterMethod(proxy, method, args); final String methodName = method.getName(); final Class[] paramTypes = method.getParameterTypes(); final Class returnType = method.getReturnType(); final int nargs = (args == null) ? 0 : args.length; if (methodName.equals("setLastRefreshedTime")) { proxyImpl.setLastRefreshedTime((Long) args[0]); return null; } if (methodName.equals("getLastRefreshedTime")) { return proxyImpl.getLastRefreshedTime(); } if (methodName.equals("sendNotification")) { sendNotification(args[0]); return null; } // local or not: equals, toString, hashCode if (shouldDoLocally(proxy, method)){ return doLocally(proxy, method, args); } // Support For MXBean open types if (isMXBean) { MXBeanProxyInvocationHandler p = findMXBeanProxy(objectName, methodClass, this); return p.invoke( proxy, method, args); } if (methodName.startsWith("get") && methodName.length() > 3 && nargs == 0 && !returnType.equals(Void.TYPE)) { return delegateToObjectState(methodName.substring(3)); } if (methodName.startsWith("is") && methodName.length() > 2 && nargs == 0 && (returnType.equals(Boolean.TYPE) || returnType.equals(Boolean.class))) { return delegateToObjectState(methodName.substring(2)); } final String[] signature = new String[paramTypes.length]; for (int i = 0; i < paramTypes.length; i++) signature[i] = paramTypes[i].getName(); if (methodName.startsWith("set") && methodName.length() > 3 && nargs == 1 && returnType.equals(Void.TYPE)) { return delegateToFucntionService(objectName, methodName, args, signature); } return delegateToFucntionService(objectName, methodName, args, signature); } /** * As this proxy may behave as an notification emitter it delegates to the * member NotificationBroadcasterSupport object * * @param notification */ private void sendNotification(Object notification) { emitter.sendNotification((Notification) notification); } /** * This will get the data from Object state which is replicated across the * hidden region FederataionComponent being the carrier. * * @param attributeName * @return Object */ protected Object delegateToObjectState(String attributeName) throws Throwable { Object returnObj; try { FederationComponent fedComp = (FederationComponent) monitoringRegion .get(objectName.toString()); returnObj = fedComp.getValue(attributeName); } catch (IllegalArgumentException e) { throw new MBeanException(e); } catch (Exception e) { throw new MBeanException(e); } catch (VirtualMachineError e) { SystemFailure.initiateFailure(e); throw e; } catch (Throwable th) { SystemFailure.checkFailure(); throw new MBeanException(new Exception(th.getLocalizedMessage())); } return returnObj; } /** * It will call the Generic function to execute the method on the remote VM * * @param objectName * ObjectName of the MBean * @param methodName * method name * @param args * arguments to the methods * @param signature * signature of the method * @return result Object */ protected Object delegateToFucntionService(ObjectName objectName, String methodName, Object[] args, String[] signature) throws Throwable { Object[] functionArgs = new Object[5]; functionArgs[0] = objectName; functionArgs[1] = methodName; functionArgs[2] = signature; functionArgs[3] = args; functionArgs[4] = member.getName(); List<Object> result = null; try { ResultCollector rc = FunctionService.onMember(member).withArgs( functionArgs).execute(ManagementConstants.MGMT_FUNCTION_ID); result = (List<Object>) rc.getResult(); // Exceptions of ManagementFunctions } catch (Exception e) { if (logger.isDebugEnabled()) { logger.debug(" Exception while Executing Funtion {}", e.getMessage(), e); } //Only in case of Exception caused for Function framework. return null; } catch (VirtualMachineError e) { SystemFailure.initiateFailure(e); throw e; } catch (Throwable th) { SystemFailure.checkFailure(); if (logger.isDebugEnabled()) { logger.debug(" Exception while Executing Funtion {}", th.getMessage(), th); } return null; } return checkErrors(result.get(ManagementConstants.RESULT_INDEX)); } private Object checkErrors(Object lastResult) throws Throwable { if (lastResult instanceof MBeanException) { // Convert all MBean public API exceptions to MBeanException throw (Exception) lastResult; } if (lastResult instanceof Exception) { return null; } if (lastResult instanceof Throwable) { return null; } return lastResult; } /** * The call will delegate to Managed Node for NotificationHub to register a * local listener to listen for notification from the MBean * * Moreover it will also add the client to local listener list by adding to * the contained emitter. * * @param proxy * the proxy object * @param method * method to be invoked * @param args * method arguments * @return result value if any * @throws Exception */ private Object invokeBroadcasterMethod(Object proxy, Method method, Object[] args) throws Throwable { final String methodName = method.getName(); final int nargs = (args == null) ? 0 : args.length; final Class[] paramTypes = method.getParameterTypes(); final String[] signature = new String[paramTypes.length]; if (methodName.equals("addNotificationListener")) { /* * The various throws of IllegalArgumentException here should not happen, * since we know what the methods in NotificationBroadcaster and * NotificationEmitter are. */ if (nargs != 3) { final String msg = "Bad arg count to addNotificationListener: " + nargs; throw new IllegalArgumentException(msg); } /* * Other inconsistencies will produce ClassCastException below. */ NotificationListener listener = (NotificationListener) args[0]; NotificationFilter filter = (NotificationFilter) args[1]; Object handback = args[2]; emitter.addNotificationListener(listener, filter, handback); delegateToFucntionService(objectName, methodName, null, signature); return null; } else if (methodName.equals("removeNotificationListener")) { /* * NullPointerException if method with no args, but that shouldn't happen * because removeNL does have args. */ NotificationListener listener = (NotificationListener) args[0]; switch (nargs) { case 1: emitter.removeNotificationListener(listener); /** * No need to send listener and filter details to other members. * We only need to send a message saying remove the listner registered for this object on your side. * Fixes Bug[ #47075 ] */ delegateToFucntionService(objectName, methodName, null, signature); return null; case 3: NotificationFilter filter = (NotificationFilter) args[1]; Object handback = args[2]; emitter.removeNotificationListener(listener, filter, handback); delegateToFucntionService(objectName, methodName, null, signature); return null; default: final String msg = "Bad arg count to removeNotificationListener: " + nargs; throw new IllegalArgumentException(msg); } } else if (methodName.equals("getNotificationInfo")) { if (args != null) { throw new IllegalArgumentException("getNotificationInfo has " + "args"); } if(!MBeanJMXAdapter.mbeanServer.isRegistered(objectName)){ return new MBeanNotificationInfo[0]; } /** * MBean info is delegated to function service as intention is to get the * info of the actual mbean rather than the proxy */ Object obj = delegateToFucntionService(objectName, methodName, args, signature); if(obj instanceof String){ return new MBeanNotificationInfo[0]; } MBeanInfo info = (MBeanInfo) obj; return info.getNotifications(); } else { throw new IllegalArgumentException("Bad method name: " + methodName); } } /** * Internal implementation of all the generic proxy methods * * @author rishim * */ private class ProxyInterfaceImpl implements ProxyInterface { /** * last refreshed time of the proxy */ private long lastRefreshedTime; /** * Constructore */ public ProxyInterfaceImpl() { this.lastRefreshedTime = System.currentTimeMillis(); } /** * Last refreshed time */ public long getLastRefreshedTime() { return lastRefreshedTime; } /** * sets the proxy refresh time */ public void setLastRefreshedTime(long lastRefreshedTime) { this.lastRefreshedTime = lastRefreshedTime; } } private boolean shouldDoLocally(Object proxy, Method method) { final String methodName = method.getName(); if ((methodName.equals("hashCode") || methodName.equals("toString")) && method.getParameterTypes().length == 0) return true; if (methodName.equals("equals") && Arrays.equals(method.getParameterTypes(), new Class[] { Object.class })) return true; return false; } private Object doLocally(Object proxy, Method method, Object[] args) { final String methodName = method.getName(); FederationComponent fedComp = (FederationComponent) monitoringRegion .get(objectName.toString()); if (methodName.equals("equals")) { return fedComp.equals(args[0]); } else if (methodName.equals("toString")) { return fedComp.toString(); } else if (methodName.equals("hashCode")) { return fedComp.hashCode(); } throw new RuntimeException("Unexpected method name: " + methodName); } private MXBeanProxyInvocationHandler findMXBeanProxy(ObjectName objectName, Class<?> mxbeanInterface, MBeanProxyInvocationHandler handler) throws Throwable { MXBeanProxyInvocationHandler proxyRef = mxbeanInvocationRef; if (mxbeanInvocationRef == null) { synchronized (this) { try { mxbeanInvocationRef = new MXBeanProxyInvocationHandler(objectName, mxbeanInterface, handler); } catch (IllegalArgumentException e) { String msg = "Cannot make MXBean proxy for " + mxbeanInterface.getName() + ": " + e.getMessage(); throw new IllegalArgumentException(msg, e.getCause()); } } } return mxbeanInvocationRef; } }
// Generated from QL.g4 by ANTLR 4.2 package ql.parser.antlr; import ql.ast.expr.*; import ql.ast.form.stat.*; import ql.ast.form.*; import ql.ast.type.*; import ql.ast.expr.exprType.*; import ql.ast.expr.operation.*; import ql.ast.expr.operation.add.*; import ql.ast.expr.operation.andor.*; import ql.ast.expr.operation.mul.*; import ql.ast.expr.operation.rel.*; import ql.ast.expr.operation.un.*; import org.antlr.v4.runtime.ParserRuleContext; import org.antlr.v4.runtime.misc.NotNull; import org.antlr.v4.runtime.tree.ErrorNode; import org.antlr.v4.runtime.tree.TerminalNode; /** * This class provides an empty implementation of {@link QLListener}, * which can be extended to create a listener which only needs to handle a subset * of the available methods. */ public class QLBaseListener implements QLListener { /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterAndExpr(@NotNull QLParser.AndExprContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitAndExpr(@NotNull QLParser.AndExprContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterForm(@NotNull QLParser.FormContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitForm(@NotNull QLParser.FormContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterFormItems(@NotNull QLParser.FormItemsContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitFormItems(@NotNull QLParser.FormItemsContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterAddExpr(@NotNull QLParser.AddExprContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitAddExpr(@NotNull QLParser.AddExprContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterMulExpr(@NotNull QLParser.MulExprContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitMulExpr(@NotNull QLParser.MulExprContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterExpr(@NotNull QLParser.ExprContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitExpr(@NotNull QLParser.ExprContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterQuestion(@NotNull QLParser.QuestionContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitQuestion(@NotNull QLParser.QuestionContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterRelExpr(@NotNull QLParser.RelExprContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitRelExpr(@NotNull QLParser.RelExprContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterType(@NotNull QLParser.TypeContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitType(@NotNull QLParser.TypeContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterUnExpr(@NotNull QLParser.UnExprContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitUnExpr(@NotNull QLParser.UnExprContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterStat(@NotNull QLParser.StatContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitStat(@NotNull QLParser.StatContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterOrExpr(@NotNull QLParser.OrExprContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitOrExpr(@NotNull QLParser.OrExprContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterEveryRule(@NotNull ParserRuleContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitEveryRule(@NotNull ParserRuleContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void visitTerminal(@NotNull TerminalNode node) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void visitErrorNode(@NotNull ErrorNode node) { } }
package com.xyp.sapidoc.idoc.model; import com.xyp.sapidoc.idoc.annotation.IdocField; public class EDI_DS40 { @IdocField(name = "TABNAM", length = 10, field_pos = 1, character_first = 1, character_last = 10) private String TABNAM; @IdocField(name = "MANDT", length = 3, field_pos = 2, character_first = 11, character_last = 13) private String MANDT; @IdocField(name = "DOCNUM", length = 16, field_pos = 3, character_first = 14, character_last = 29) private String DOCNUM; @IdocField(name = "LOGDAT", length = 8, field_pos = 4, character_first = 30, character_last = 37) private String LOGDAT; @IdocField(name = "LOGTIM", length = 6, field_pos = 5, character_first = 38, character_last = 43) private String LOGTIM; @IdocField(name = "STATUS", length = 2, field_pos = 6, character_first = 44, character_last = 45) private String STATUS; @IdocField(name = "STAMQU", length = 3, field_pos = 7, character_first = 46, character_last = 48) private String STAMQU; @IdocField(name = "STAMID", length = 20, field_pos = 8, character_first = 49, character_last = 68) private String STAMID; @IdocField(name = "STAMNO", length = 3, field_pos = 9, character_first = 69, character_last = 71) private String STAMNO; @IdocField(name = "STATYP", length = 1, field_pos = 10, character_first = 72, character_last = 72) private String STATYP; @IdocField(name = "STAPA1", length = 50, field_pos = 11, character_first = 73, character_last = 122) private String STAPA1; @IdocField(name = "STAPA2", length = 50, field_pos = 12, character_first = 123, character_last = 172) private String STAPA2; @IdocField(name = "STAPA3", length = 50, field_pos = 13, character_first = 173, character_last = 222) private String STAPA3; @IdocField(name = "STAPA4", length = 50, field_pos = 14, character_first = 223, character_last = 272) private String STAPA4; @IdocField(name = "STATXT", length = 70, field_pos = 15, character_first = 273, character_last = 342) private String STATXT; @IdocField(name = "UNAME", length = 12, field_pos = 16, character_first = 343, character_last = 354) private String UNAME; @IdocField(name = "REPID", length = 30, field_pos = 17, character_first = 355, character_last = 384) private String REPID; @IdocField(name = "ROUTID", length = 30, field_pos = 18, character_first = 385, character_last = 414) private String ROUTID; @IdocField(name = "SEGNUM", length = 6, field_pos = 19, character_first = 415, character_last = 420) private String SEGNUM; @IdocField(name = "SEGFLD", length = 30, field_pos = 20, character_first = 421, character_last = 450) private String SEGFLD; @IdocField(name = "REFINT", length = 14, field_pos = 21, character_first = 451, character_last = 464) private String REFINT; @IdocField(name = "REFGRP", length = 14, field_pos = 22, character_first = 465, character_last = 478) private String REFGRP; @IdocField(name = "REFMES", length = 14, field_pos = 23, character_first = 479, character_last = 492) private String REFMES; @IdocField(name = "ARCKEY", length = 70, field_pos = 24, character_first = 493, character_last = 562) private String ARCKEY; public void setTABNAM(String TABNAM) { this.TABNAM = TABNAM; } public void setMANDT(String MANDT) { this.MANDT = MANDT; } public void setDOCNUM(String DOCNUM) { this.DOCNUM = DOCNUM; } public void setLOGDAT(String LOGDAT) { this.LOGDAT = LOGDAT; } public void setLOGTIM(String LOGTIM) { this.LOGTIM = LOGTIM; } public void setSTATUS(String STATUS) { this.STATUS = STATUS; } public void setSTAMQU(String STAMQU) { this.STAMQU = STAMQU; } public void setSTAMID(String STAMID) { this.STAMID = STAMID; } public void setSTAMNO(String STAMNO) { this.STAMNO = STAMNO; } public void setSTATYP(String STATYP) { this.STATYP = STATYP; } public void setSTAPA1(String STAPA1) { this.STAPA1 = STAPA1; } public void setSTAPA2(String STAPA2) { this.STAPA2 = STAPA2; } public void setSTAPA3(String STAPA3) { this.STAPA3 = STAPA3; } public void setSTAPA4(String STAPA4) { this.STAPA4 = STAPA4; } public void setSTATXT(String STATXT) { this.STATXT = STATXT; } public void setUNAME(String UNAME) { this.UNAME = UNAME; } public void setREPID(String REPID) { this.REPID = REPID; } public void setROUTID(String ROUTID) { this.ROUTID = ROUTID; } public void setSEGNUM(String SEGNUM) { this.SEGNUM = SEGNUM; } public void setSEGFLD(String SEGFLD) { this.SEGFLD = SEGFLD; } public void setREFINT(String REFINT) { this.REFINT = REFINT; } public void setREFGRP(String REFGRP) { this.REFGRP = REFGRP; } public void setREFMES(String REFMES) { this.REFMES = REFMES; } public void setARCKEY(String ARCKEY) { this.ARCKEY = ARCKEY; } public String getTABNAM() { return TABNAM; } public String getMANDT() { return MANDT; } public String getDOCNUM() { return DOCNUM; } public String getLOGDAT() { return LOGDAT; } public String getLOGTIM() { return LOGTIM; } public String getSTATUS() { return STATUS; } public String getSTAMQU() { return STAMQU; } public String getSTAMID() { return STAMID; } public String getSTAMNO() { return STAMNO; } public String getSTATYP() { return STATYP; } public String getSTAPA1() { return STAPA1; } public String getSTAPA2() { return STAPA2; } public String getSTAPA3() { return STAPA3; } public String getSTAPA4() { return STAPA4; } public String getSTATXT() { return STATXT; } public String getUNAME() { return UNAME; } public String getREPID() { return REPID; } public String getROUTID() { return ROUTID; } public String getSEGNUM() { return SEGNUM; } public String getSEGFLD() { return SEGFLD; } public String getREFINT() { return REFINT; } public String getREFGRP() { return REFGRP; } public String getREFMES() { return REFMES; } public String getARCKEY() { return ARCKEY; } }
/* * Copyright 2009-2014 DigitalGlobe, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and limitations under the License. */ package org.mrgeo.services.mrspyramid.rendering; import org.apache.commons.lang.ClassUtils; import org.reflections.Reflections; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.HashMap; import java.util.Map; import java.util.Set; /** * Selects an image handler for images rendered by WMS requests (e.g. image renderer, color scale * applier, image response writer, etc.). Uses text matching between the requested image format and * class handler names to select the appropriate handler. There is no ImageHandler interface persay, * but if "handlers" follow a convention similar to what's described in the ImageRenderer interface, * they can be instantiated by this factory. */ public class ImageHandlerFactory { private static final Logger log = LoggerFactory.getLogger(ImageHandlerFactory.class); // Became frustrated by the mime types not being picked up on deployed systems despite following // the instructions in the documentation at: // http://docs.oracle.com/javaee/5/api/javax/activation/MimetypesFileTypeMap.html // Its currently far simpler to do some string comparisons to find the correct handler to use. // If necessary, the former code used to convert the image format to a mime type can be revived // from source history. static Map<Class<?>, Map<String, Class<?>>> imageFormatHandlers = null; static Map<Class<?>, Map<String, Class<?>>> mimeTypeHandlers = null; /** * Returns a MrGeo WMS "image handler" for the requested image format * * @param imageFormat * mage format the requested image handler supports * @param handlerType * a supported image handler type (see comments below to see how that is determined) * @return an object of specified type * @throws Exception */ public static Object getHandler(final String format, final Class<?> handlerType) throws Exception { return getHandler(format, handlerType, null, null); } @SuppressWarnings("unused") public static Object getHandler(String imageFormat, final Class<?> handlerType, final Object[] constructorParams, final Class<?>[] constructorParamTypes) throws Exception { if (imageFormatHandlers == null || mimeTypeHandlers == null) { loadHandlers(); } if (org.apache.commons.lang.StringUtils.isEmpty(imageFormat)) { throw new IllegalArgumentException("NULL image format requested."); } log.debug("Requested image format: {}", imageFormat); imageFormat = imageFormat.toLowerCase(); if (handlerType == null) { throw new IllegalArgumentException("NULL handler type requested."); } log.debug("Requested handler type: {}", handlerType.getName()); // first look in the mime types if (mimeTypeHandlers.containsKey(handlerType)) { final Map<String, Class<?>> handlers = mimeTypeHandlers.get(handlerType); if (handlers.containsKey(imageFormat)) { Object cl; cl = handlers.get(imageFormat.toLowerCase()).newInstance(); return cl; } } // now look in the formats if (imageFormatHandlers.containsKey(handlerType)) { final Map<String, Class<?>> handlers = imageFormatHandlers.get(handlerType); if (handlers.containsKey(imageFormat)) { Object cl; cl = handlers.get(imageFormat.toLowerCase()).newInstance(); return cl; } } throw new IllegalArgumentException("Unsupported image format - " + imageFormat); } public static String[] getImageFormats(final Class<?> handlerType) { if (imageFormatHandlers == null || mimeTypeHandlers == null) { loadHandlers(); } if (imageFormatHandlers.containsKey(handlerType)) { final Map<String, Class<?>> handlers = imageFormatHandlers.get(handlerType); return handlers.keySet().toArray(new String[0]); } throw new IllegalArgumentException("Invalid handler type: " + handlerType.getCanonicalName() + ". Not supported."); } public static String[] getMimeFormats(final Class<?> handlerType) { if (imageFormatHandlers == null || mimeTypeHandlers == null) { loadHandlers(); } if (mimeTypeHandlers.containsKey(handlerType)) { final Map<String, Class<?>> handlers = mimeTypeHandlers.get(handlerType); return handlers.keySet().toArray(new String[0]); } throw new IllegalArgumentException("Invalid handler type: " + handlerType.getCanonicalName() + ". Not supported."); } private static void addFormatHandlers(final Map<String, Class<?>> handlers, final Class<?> clazz) { try { Object cl; cl = clazz.newInstance(); Method method; method = clazz.getMethod("getWmsFormats"); final Object o = method.invoke(cl); String[] formats; if (o != null) { formats = (String[]) o; for (final String format : formats) { handlers.put(format, clazz); log.info(" {}", format); } } } catch (final InstantiationException e) { } catch (final IllegalAccessException e) { } catch (final SecurityException e) { } catch (final NoSuchMethodException e) { } catch (final IllegalArgumentException e) { } catch (final InvocationTargetException e) { } } private static void addMimeHandlers(final Map<String, Class<?>> handlers, final Class<?> clazz) { try { Object cl; cl = clazz.newInstance(); Method method; method = clazz.getMethod("getMimeTypes"); final Object o = method.invoke(cl); String[] formats; if (o != null) { formats = (String[]) o; for (final String format : formats) { handlers.put(format, clazz); log.info(" {}", format); } } } catch (final InstantiationException e) { } catch (final IllegalAccessException e) { } catch (final SecurityException e) { } catch (final NoSuchMethodException e) { } catch (final IllegalArgumentException e) { } catch (final InvocationTargetException e) { } } /** * Returns a MrGeo WMS "image handler" for the requested image format * * @param imageFormat * image format the requested image handler supports * @param handlerType * a supported image handler type (see comments below to see how that is determined) * @param constructorParams * parameters to pass to the image handlers constructor; optional (pass null for none) * @param constructorParamTypes * parameters types to pass to the image handlers constructor; optional (pass null for * none) * @return an object of specified type * @throws Exception */ private static synchronized void loadHandlers() { if (imageFormatHandlers == null) { imageFormatHandlers = new HashMap<Class<?>, Map<String, Class<?>>>(); mimeTypeHandlers = new HashMap<Class<?>, Map<String, Class<?>>>(); Reflections reflections = new Reflections(ClassUtils.getPackageName(ImageRenderer.class)); // image format renderers mimeTypeHandlers.put(ImageRenderer.class, new HashMap<String, Class<?>>()); imageFormatHandlers.put(ImageRenderer.class, new HashMap<String, Class<?>>()); Map<String, Class<?>> mimeHandlers = mimeTypeHandlers.get(ImageRenderer.class); Map<String, Class<?>> formatHandlers = imageFormatHandlers.get(ImageRenderer.class); final Set<Class<? extends ImageRenderer>> imageRenderers = reflections .getSubTypesOf(ImageRenderer.class); for (final Class<? extends ImageRenderer> clazz : imageRenderers) { log.info("Registering Image Renderer: {}", clazz.getCanonicalName()); log.info(" Mime Types"); addMimeHandlers(mimeHandlers, clazz); log.info(" Format Strings"); addFormatHandlers(formatHandlers, clazz); } reflections = new Reflections(ClassUtils.getPackageName(ColorScaleApplier.class)); // Color scale appliers mimeTypeHandlers.put(ColorScaleApplier.class, new HashMap<String, Class<?>>()); imageFormatHandlers.put(ColorScaleApplier.class, new HashMap<String, Class<?>>()); mimeHandlers = mimeTypeHandlers.get(ColorScaleApplier.class); formatHandlers = imageFormatHandlers.get(ColorScaleApplier.class); final Set<Class<? extends ColorScaleApplier>> colorscaleappliers = reflections .getSubTypesOf(ColorScaleApplier.class); for (final Class<? extends ColorScaleApplier> clazz : colorscaleappliers) { log.info("Registering Color Scale Applier: {}", clazz.getCanonicalName()); log.info(" Mime Types"); addMimeHandlers(mimeHandlers, clazz); log.info(" Format Strings"); addFormatHandlers(formatHandlers, clazz); } reflections = new Reflections(ClassUtils.getPackageName(ImageResponseWriter.class)); // image response writers mimeTypeHandlers.put(ImageResponseWriter.class, new HashMap<String, Class<?>>()); imageFormatHandlers.put(ImageResponseWriter.class, new HashMap<String, Class<?>>()); mimeHandlers = mimeTypeHandlers.get(ImageResponseWriter.class); formatHandlers = imageFormatHandlers.get(ImageResponseWriter.class); final Set<Class<? extends ImageResponseWriter>> imageresponsewriters = reflections .getSubTypesOf(ImageResponseWriter.class); for (final Class<? extends ImageResponseWriter> clazz : imageresponsewriters) { log.info("Registering Image Image Response Writer: {}", clazz.getCanonicalName()); log.info(" Mime Types"); addMimeHandlers(mimeHandlers, clazz); log.info(" Format Strings"); addFormatHandlers(formatHandlers, clazz); } } } }
/* @test @bug 6431076 @summary Mouse cursor must remain DEFAULT over scrollbar when text is typed @author Andrei Dmitriev: area=TextArea @run main/manual Test */ import java.awt.*; import java.awt.event.*; public class Test { private static void init() { Frame f = new Frame("Test for cursor"); final int dim = 100; String line = ""; for( int i=0; i<dim; ++i ) { line += "a"; } String text = ""; for( int i=0; i<dim; ++i ) { text += line; if( i < dim-1 ) { text += "\n"; } } f.setLayout( new BorderLayout () ); f.add( new TextArea( text ) ); f.setSize(400, 300); f.setVisible(true); String[] instructions = { "1. Place keyboard cursor inside TextArea.", "2. Repeat steps 2.* for each of two TextArea's scrollbars.", "2.1. Place mouse cursor over TextArea's scrollbar.", "2.2. If mouse cursor is not DEFAULT_CURSOR (arrow), test failed.", "2.3. Type any symbol into TextArea.", "2.4. Type ENTER symbol into TextArea.", "2.5. If mouse cursor changes to TEXT_CURSOR (beam), test failed", "(if cursor disappears on Windows, it's OK).", "3. Test passed.", }; Sysout.createDialogWithInstructions( instructions ); } /***************************************************** * Standard Test Machinery Section * DO NOT modify anything in this section -- it's a * standard chunk of code which has all of the * synchronisation necessary for the test harness. * By keeping it the same in all tests, it is easier * to read and understand someone else's test, as * well as insuring that all tests behave correctly * with the test harness. * There is a section following this for test-defined * classes ******************************************************/ private static boolean theTestPassed = false; private static boolean testGeneratedInterrupt = false; private static String failureMessage = ""; private static Thread mainThread = null; private static int sleepTime = 300000; public static void main( String args[] ) throws InterruptedException { mainThread = Thread.currentThread(); try { init(); } catch( TestPassedException e ) { //The test passed, so just return from main and harness will // interepret this return as a pass return; } //At this point, neither test passed nor test failed has been // called -- either would have thrown an exception and ended the // test, so we know we have multiple threads. //Test involves other threads, so sleep and wait for them to // called pass() or fail() try { Thread.sleep( sleepTime ); //Timed out, so fail the test throw new RuntimeException( "Timed out after " + sleepTime/1000 + " seconds" ); } catch (InterruptedException e) { if( ! testGeneratedInterrupt ) throw e; //reset flag in case hit this code more than once for some reason (just safety) testGeneratedInterrupt = false; if ( theTestPassed == false ) { throw new RuntimeException( failureMessage ); } } }//main public static synchronized void setTimeoutTo( int seconds ) { sleepTime = seconds * 1000; } public static synchronized void pass() { Sysout.println( "The test passed." ); Sysout.println( "The test is over, hit Ctl-C to stop Java VM" ); //first check if this is executing in main thread if ( mainThread == Thread.currentThread() ) { //Still in the main thread, so set the flag just for kicks, // and throw a test passed exception which will be caught // and end the test. theTestPassed = true; throw new TestPassedException(); } //pass was called from a different thread, so set the flag and interrupt // the main thead. theTestPassed = true; testGeneratedInterrupt = true; if (mainThread != null){ mainThread.interrupt(); } }//pass() public static synchronized void fail() { //test writer didn't specify why test failed, so give generic fail( "it just plain failed! :-)" ); } public static synchronized void fail( String whyFailed ) { Sysout.println( "The test failed: " + whyFailed ); Sysout.println( "The test is over, hit Ctl-C to stop Java VM" ); //check if this called from main thread if ( mainThread == Thread.currentThread() ) { //If main thread, fail now 'cause not sleeping throw new RuntimeException( whyFailed ); } theTestPassed = false; testGeneratedInterrupt = true; failureMessage = whyFailed; mainThread.interrupt(); }//fail() }// class //This exception is used to exit from any level of call nesting // when it's determined that the test has passed, and immediately // end the test. class TestPassedException extends RuntimeException { } //*********** End Standard Test Machinery Section ********** //************ Begin classes defined for the test **************** // make listeners in a class defined here, and instantiate them in init() /* Example of a class which may be written as part of a test class NewClass implements anInterface { static int newVar = 0; public void eventDispatched(AWTEvent e) { //Counting events to see if we get enough eventCount++; if( eventCount == 20 ) { //got enough events, so pass ManualMainTest.pass(); } else if( tries == 20 ) { //tried too many times without getting enough events so fail ManualMainTest.fail(); } }// eventDispatched() }// NewClass class */ //************** End classes defined for the test ******************* /**************************************************** Standard Test Machinery DO NOT modify anything below -- it's a standard chunk of code whose purpose is to make user interaction uniform, and thereby make it simpler to read and understand someone else's test. ****************************************************/ /** This is part of the standard test machinery. It creates a dialog (with the instructions), and is the interface for sending text messages to the user. To print the instructions, send an array of strings to Sysout.createDialog WithInstructions method. Put one line of instructions per array entry. To display a message for the tester to see, simply call Sysout.println with the string to be displayed. This mimics System.out.println but works within the test harness as well as standalone. */ class Sysout { private static TestDialog dialog; private static boolean numbering = false; private static int messageNumber = 0; public static void createDialogWithInstructions( String[] instructions ) { dialog = new TestDialog( new Frame(), "Instructions" ); dialog.printInstructions( instructions ); dialog.setVisible(true); println( "Any messages for the tester will display here." ); } public static void createDialog( ) { dialog = new TestDialog( new Frame(), "Instructions" ); String[] defInstr = { "Instructions will appear here. ", "" } ; dialog.printInstructions( defInstr ); dialog.setVisible(true); println( "Any messages for the tester will display here." ); } /* Enables message counting for the tester. */ public static void enableNumbering(boolean enable){ numbering = enable; } public static void printInstructions( String[] instructions ) { dialog.printInstructions( instructions ); } public static void println( String messageIn ) { if (numbering) { messageIn = "" + messageNumber + " " + messageIn; messageNumber++; } dialog.displayMessage( messageIn ); } }// Sysout class /** This is part of the standard test machinery. It provides a place for the test instructions to be displayed, and a place for interactive messages to the user to be displayed. To have the test instructions displayed, see Sysout. To have a message to the user be displayed, see Sysout. Do not call anything in this dialog directly. */ class TestDialog extends Dialog implements ActionListener { TextArea instructionsText; TextArea messageText; int maxStringLength = 80; Panel buttonP = new Panel(); Button passB = new Button( "pass" ); Button failB = new Button( "fail" ); //DO NOT call this directly, go through Sysout public TestDialog( Frame frame, String name ) { super( frame, name ); int scrollBoth = TextArea.SCROLLBARS_BOTH; instructionsText = new TextArea( "", 15, maxStringLength, scrollBoth ); add( "North", instructionsText ); messageText = new TextArea( "", 5, maxStringLength, scrollBoth ); add("Center", messageText); passB = new Button( "pass" ); passB.setActionCommand( "pass" ); passB.addActionListener( this ); buttonP.add( "East", passB ); failB = new Button( "fail" ); failB.setActionCommand( "fail" ); failB.addActionListener( this ); buttonP.add( "West", failB ); add( "South", buttonP ); pack(); setVisible(true); }// TestDialog() //DO NOT call this directly, go through Sysout public void printInstructions( String[] instructions ) { //Clear out any current instructions instructionsText.setText( "" ); //Go down array of instruction strings String printStr, remainingStr; for( int i=0; i < instructions.length; i++ ) { //chop up each into pieces maxSringLength long remainingStr = instructions[ i ]; while( remainingStr.length() > 0 ) { //if longer than max then chop off first max chars to print if( remainingStr.length() >= maxStringLength ) { //Try to chop on a word boundary int posOfSpace = remainingStr. lastIndexOf( ' ', maxStringLength - 1 ); if( posOfSpace <= 0 ) posOfSpace = maxStringLength - 1; printStr = remainingStr.substring( 0, posOfSpace + 1 ); remainingStr = remainingStr.substring( posOfSpace + 1 ); } //else just print else { printStr = remainingStr; remainingStr = ""; } instructionsText.append( printStr + "\n" ); }// while }// for }//printInstructions() //DO NOT call this directly, go through Sysout public void displayMessage( String messageIn ) { messageText.append( messageIn + "\n" ); System.out.println(messageIn); } //catch presses of the passed and failed buttons. //simply call the standard pass() or fail() static methods of //ManualMainTest public void actionPerformed( ActionEvent e ) { if( e.getActionCommand() == "pass" ) { Test.pass(); } else { Test.fail(); } } }// TestDialog class
/* * Copyright (C) 2015-2016 Willi Ye <williye97@gmail.com> * * This file is part of Kernel Adiutor. * * Kernel Adiutor is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * Kernel Adiutor is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with Kernel Adiutor. If not, see <http://www.gnu.org/licenses/>. * */ package com.grarak.kerneladiutor.views.recyclerview; import android.animation.ValueAnimator; import android.support.v7.widget.AppCompatImageView; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.LinearLayout; import android.widget.TextView; import com.grarak.kerneladiutor.R; import java.util.ArrayList; import java.util.List; /** * Created by willi on 04.06.16. */ public class DropDownView extends RecyclerViewItem { private TextView mTitle; private TextView mSummary; private AppCompatImageView mArrow; private LinearLayout mParent; private CharSequence mTitleText; private CharSequence mSummaryText; private List<String> mItems; private int mSelection = -1; private boolean mExpanded; private List<View> mDoneViews = new ArrayList<>(); private float mItemHeight; private ValueAnimator mAnimator; private OnDropDownListener mOnDropDownListener; public interface OnDropDownListener { void onSelect(DropDownView dropDownView, int position, String value); } @Override public int getLayoutRes() { return R.layout.rv_drop_down_view; } @Override public void onCreateView(View view) { mTitle = (TextView) view.findViewById(R.id.title); mSummary = (TextView) view.findViewById(R.id.summary); mArrow = (AppCompatImageView) view.findViewById(R.id.arrow_image); mParent = (LinearLayout) view.findViewById(R.id.parent_layout); mItemHeight = view.getResources().getDimension(R.dimen.rv_drop_down_item_height); mArrow.setRotationX(mExpanded ? 0 : 180); setHeight(mExpanded && mItems != null ? Math.round(mItemHeight * mItems.size()) : 0); view.findViewById(R.id.title_parent).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { if (mExpanded) { collapse(); } else { expand(); } } }); super.onCreateView(view); } public void setTitle(CharSequence title) { mTitleText = title; refresh(); } public void setSummary(CharSequence summary) { mSummaryText = summary; refresh(); } public void setItems(List<String> items) { mItems = items; refresh(); } public void setSelection(int selection) { mSelection = selection; refresh(); } public void setOnDropDownListener(OnDropDownListener onDropDownListener) { mOnDropDownListener = onDropDownListener; } @Override protected void refresh() { super.refresh(); if (mTitle != null) { if (mTitleText != null) { mTitle.setText(mTitleText); mTitle.setVisibility(View.VISIBLE); } else { mTitle.setVisibility(View.GONE); } } if (mSummary != null) { if (mSummaryText != null) { mSummary.setText(mSummaryText); mSummary.setVisibility(View.VISIBLE); } else { mSummary.setVisibility(View.GONE); } } if (mParent != null && mItems != null) { mParent.removeAllViews(); mDoneViews.clear(); for (int i = 0; i < mItems.size(); i++) { View item = LayoutInflater.from(mParent.getContext()).inflate(R.layout.rv_drop_down_item_view, mParent, false); ((TextView) item.findViewById(R.id.title)).setText(mItems.get(i)); mDoneViews.add(item.findViewById(R.id.done_image)); item.findViewById(R.id.done_image).setVisibility(View.GONE); final int position = i; item.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { mSelection = position; for (int i = 0; i < mDoneViews.size(); i++) { mDoneViews.get(i).setVisibility(position == i ? View.VISIBLE : View.INVISIBLE); } if (mOnDropDownListener != null) { mOnDropDownListener.onSelect(DropDownView.this, position, mItems.get(position)); } } }); mParent.addView(item); } if (mSelection >= 0 && mSelection < mDoneViews.size()) { mDoneViews.get(mSelection).setVisibility(View.VISIBLE); } } } private void expand() { mExpanded = true; if (mArrow != null) { mArrow.animate().rotationX(0).setDuration(500).start(); if (mAnimator != null) { mAnimator.cancel(); } if (mItems == null) return; mAnimator = ValueAnimator.ofFloat(0, mItemHeight * mItems.size()); mAnimator.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() { @Override public void onAnimationUpdate(ValueAnimator animation) { setHeight(Math.round((float) animation.getAnimatedValue())); } }); mAnimator.setDuration(500); mAnimator.start(); } } private void collapse() { mExpanded = false; if (mArrow != null) { mArrow.animate().rotationX(180).setDuration(500).start(); if (mAnimator != null) { mAnimator.cancel(); } if (mItems == null) return; mAnimator = ValueAnimator.ofFloat(mItemHeight * mItems.size(), 0); mAnimator.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() { @Override public void onAnimationUpdate(ValueAnimator animation) { setHeight(Math.round((float) animation.getAnimatedValue())); } }); mAnimator.setDuration(500); mAnimator.start(); } } private void setHeight(int height) { if (mParent != null) { ViewGroup.LayoutParams params = mParent.getLayoutParams(); params.height = height; mParent.requestLayout(); viewChanged(); } } }
/* * Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you * may not use this file except in compliance with the License. You * may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. See accompanying * LICENSE file. */ package com.gemstone.gemfire.management.internal.web.http; import java.net.URI; import java.util.Collections; import java.util.List; import java.util.Map; import com.gemstone.gemfire.internal.lang.Filter; import com.gemstone.gemfire.internal.lang.ObjectUtils; import com.gemstone.gemfire.internal.util.CollectionUtils; import com.gemstone.gemfire.management.internal.web.domain.Link; import com.gemstone.gemfire.management.internal.web.util.UriUtils; import org.springframework.http.HttpEntity; import org.springframework.http.HttpHeaders; import org.springframework.http.HttpMethod; import org.springframework.http.HttpRequest; import org.springframework.http.MediaType; import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.MultiValueMap; import org.springframework.web.util.UriComponentsBuilder; import org.springframework.web.util.UriTemplate; /** * The ClientHttpRequest class is an abstraction modeling an HTTP request sent by a client and serves as the envelop * encapsulating all the necessary information (headers, request parameters, body, etc) to send the client's request * using HTTP. * <p/> * The required information for an HTTP request comes from a combination of the Link class containing the reference * uniquely identifying the resource or location of where the request will be sent, along with the HttpHeaders class * capturing the headers for the request as well as the generic container, HttpEntity to write the body of the request. * <p/> * This implementation of HttpRequest should not be confused with Spring's * org.springframework.http.client.ClientHttpRequest interface, which is often created by factory using a specific * HTTP client technology, like the Java HttpURLConnection or Apache's HTTP components, and so on. * <p/> * @author John Blum * @see java.net.URI * @see com.gemstone.gemfire.management.internal.web.http.HttpHeader * @see com.gemstone.gemfire.management.internal.web.http.HttpMethod * @see com.gemstone.gemfire.management.internal.web.domain.Link * @see org.springframework.http.HttpEntity * @see org.springframework.http.HttpHeaders * @see org.springframework.http.HttpMethod * @see org.springframework.http.HttpRequest * @see org.springframework.http.MediaType * @see org.springframework.util.MultiValueMap * @see org.springframework.web.util.UriComponentsBuilder * @since 7.5 */ @SuppressWarnings("unused") public class ClientHttpRequest implements HttpRequest { // the HTTP headers to be sent with the client's request message private final HttpHeaders requestHeaders = new HttpHeaders(); // the Link referencing the URI and method used with HTTP for the client's request private final Link link; // the mapping of request parameter name and values encoded for HTTP and sent with/in the client's request message private final MultiValueMap<String, Object> requestParameters = new LinkedMultiValueMap<String, Object>(); // the content/media or payload for the body of the client's HTTP request private Object content; /** * Constructs an instance of the ClientHttpRequest class initialized with the specified Link containing the URI * and method for the client's HTTP request. * <p/> * @param link the Link encapsulating the URI and method for the client's HTTP request. * @see com.gemstone.gemfire.management.internal.web.domain.Link */ public ClientHttpRequest(final Link link) { assert link != null : "The Link containing the URI and method for the client's HTTP request cannot be null!"; this.link = link; } /** * Gets the HTTP headers that will be sent in the client's HTTP request message. * <p/> * @return the HTTP headers that will be sent in the client's HTTP request message. * @see org.springframework.http.HttpHeaders * @see org.springframework.http.HttpMessage#getHeaders() */ @Override public HttpHeaders getHeaders() { return requestHeaders; } /** * Gets the Link containing the URI and method used to send the client's HTTP request. * <p/> * @return the Link encapsulating the URI and method for the client's HTTP request. * @see com.gemstone.gemfire.management.internal.web.domain.Link */ public final Link getLink() { return link; } /** * Gets the HTTP method indicating the operation to perform on the resource identified in the client's HTTP request. * This method converts GemFire's HttpMethod enumerated value from the Link into a corresponding Spring HttpMethod * enumerated value. * <p/> * @return a Spring HttpMethod enumerated value indicating the operation to perform on the resource identified in the * client's HTTP request. * @see com.gemstone.gemfire.management.internal.web.http.HttpMethod * @see com.gemstone.gemfire.management.internal.web.domain.Link#getMethod() * @see org.springframework.http.HttpMethod * @see org.springframework.http.HttpRequest#getMethod() */ @Override public HttpMethod getMethod() { switch (getLink().getMethod()) { case DELETE: return HttpMethod.DELETE; case HEAD: return HttpMethod.HEAD; case OPTIONS: return HttpMethod.OPTIONS; case POST: return HttpMethod.POST; case PUT: return HttpMethod.PUT; case TRACE: return HttpMethod.TRACE; case GET: default: return HttpMethod.GET; } } /** * Determines whether this is an HTTP DELETE request. * <p/> * @return a boolean value indicating if the HTTP method is DELETE. * @see #getMethod() * @see org.springframework.http.HttpMethod#DELETE */ public boolean isDelete() { return HttpMethod.DELETE.equals(getMethod()); } /** * Determines whether this is an HTTP GET request. * <p/> * @return a boolean value indicating if the HTTP method is GET. * @see #getMethod() * @see org.springframework.http.HttpMethod#GET */ public boolean isGet() { return HttpMethod.GET.equals(getMethod()); } /** * Determines whether this is an HTTP POST request. * <p/> * @return a boolean value indicating if the HTTP method is POST. * @see #getMethod() * @see org.springframework.http.HttpMethod#POST */ public boolean isPost() { return HttpMethod.POST.equals(getMethod()); } /** * Determines whether this is an HTTP PUT request. * <p/> * @return a boolean value indicating if the HTTP method is PUT. * @see #getMethod() * @see org.springframework.http.HttpMethod#PUT */ public boolean isPut() { return HttpMethod.PUT.equals(getMethod()); } /** * Gets the request parameters that will be sent in the client's HTTP request message. * <p/> * @return a MultiValueMap of request parameters and values that will be sent in the client's HTTP request message. * @see org.springframework.util.MultiValueMap */ public MultiValueMap<String, Object> getParameters() { return requestParameters; } /** * Gets the path variables in the URI template. Note, this would be better placed in the Link class, but Link cannot * contain an Spring dependencies! * <p/> * @return a List of Strings for each path variable in the URI template. * @see #getURI() * @see org.springframework.web.util.UriTemplate */ protected List<String> getPathVariables() { return Collections.unmodifiableList(new UriTemplate(UriUtils.decode(getURI().toString())).getVariableNames()); } /** * Gets the URI for the client's HTTP request. The URI may actually be an encoded URI template containing * path variables requiring expansion. * <p/> * @return the URI of the resource targeted in the request by the client using HTTP. * @see java.net.URI * @see org.springframework.http.HttpRequest#getURI() */ @Override public URI getURI() { return getLink().getHref(); } /** * Gets the URL for the client's HTTP request. * <p/> * @return a URL as a URI referring to the location of the resource requested by the client via HTTP. * @see #getURL(java.util.Map) * @see java.net.URI */ public URI getURL() { return getURL(Collections.<String, Object>emptyMap()); } /** * Gets the URL for the client's HTTP request. * <p/> * @param uriVariables a Map of URI path variables to values in order to expand the URI template into a URI. * @return a URL as a URI referring to the location of the resource requested by the client via HTTP. * @see #getURI() * @see java.net.URI * @see org.springframework.web.util.UriComponents * @see org.springframework.web.util.UriComponentsBuilder */ public URI getURL(final Map<String, ?> uriVariables) { final UriComponentsBuilder uriBuilder = UriComponentsBuilder.fromUriString(UriUtils.decode(getURI().toString())); if (isGet() || isDelete()) { final List<String> pathVariables = getPathVariables(); // get query parameters to append to the URI/URL based on the request parameters that are not path variables... final Map<String, List<Object>> queryParameters = CollectionUtils.removeKeys( new LinkedMultiValueMap<String, Object>(getParameters()), new Filter<Map.Entry<String, List<Object>>>() { @Override public boolean accept(final Map.Entry<String, List<Object>> entry) { return !pathVariables.contains(entry.getKey()); } }); for (final String queryParameterName : queryParameters.keySet()) { uriBuilder.queryParam(queryParameterName, getParameters().get(queryParameterName).toArray()); } } return uriBuilder.build().expand(uriVariables).encode().toUri(); } /** * Gets the HTTP request entity encapsulating the headers and body of the HTTP message. The body of the HTTP request * message will consist of an URL encoded application form (a mapping of key-value pairs) for POST/PUT HTTP requests. * <p/> * @return an HttpEntity with the headers and body for the HTTP request message. * @see #getParameters() * @see org.springframework.http.HttpEntity * @see org.springframework.http.HttpHeaders */ public HttpEntity<?> createRequestEntity() { if (isPost() || isPut()) { // NOTE HTTP request parameters take precedence over HTTP message body content/media if (!getParameters().isEmpty()) { getHeaders().setContentType(determineContentType(MediaType.APPLICATION_FORM_URLENCODED)); return new HttpEntity<MultiValueMap<String, Object>>(getParameters(), getHeaders()); } else { // NOTE the HTTP "Content-Type" header will be determined and set by the appropriate HttpMessageConverter // based on the Class type of the "content". return new HttpEntity<Object>(getContent(), getHeaders()); } } else { return new HttpEntity<Object>(getHeaders()); } } /** * Tries to determine the content/media type of this HTTP request iff the HTTP "Content-Type" header was not * explicitly set by the user, otherwise the user provided value is used. If the "Content-Type" HTTP header value * is null, then the content/media/payload of this HTTP request is inspected to determine the content type. * <p/> * The simplest evaluation sets the content type to "application/x-www-form-urlencoded" if this is a POST or PUT * HTTP request, unless any request parameter value is determined to have multiple parts, the the content type will be * "multipart/form-data". * <p/> * @param defaultContentType the default content/media type to use when the content type cannot be determined from * this HTTP request. * @return a MediaType for the value of the HTTP Content-Type header as determined from this HTTP request. * @see #getHeaders() * @see org.springframework.http.HttpHeaders#getContentType() * @see org.springframework.http.MediaType */ protected MediaType determineContentType(final MediaType defaultContentType) { MediaType contentType = getHeaders().getContentType(); // if the content type HTTP header was not explicitly set, try to determine the media type from the content body // of the HTTP request if (contentType == null) { if (isPost() || isPut()) { OUT : for (final String name : getParameters().keySet()) { for (final Object value : getParameters().get(name)) { if (value != null && !(value instanceof String)) { contentType = MediaType.MULTIPART_FORM_DATA; break OUT; } } } // since this is a POST/PUT HTTP request, default the content/media type to "application/x-www-form-urlencoded" contentType = ObjectUtils.defaultIfNull(contentType, MediaType.APPLICATION_FORM_URLENCODED); } else { // NOTE the "Content-Type" HTTP header is not applicable to GET/DELETE and other methods of HTTP requests // since there is typically no content (media/payload/request body/etc) to send. Any request parameters // are encoded in the URL as query parameters. } } return ObjectUtils.defaultIfNull(contentType, defaultContentType); } public Object getContent() { return content; } public void setContent(final Object content) { this.content = content; } /** * Adds 1 or more values for the specified HTTP header. * <p/> * @param headerName a String specifying the name of the HTTP header. * @param headerValues the array of values to set for the HTTP header. * @see org.springframework.http.HttpHeaders#add(String, String) */ public void addHeaderValues(final String headerName, final String... headerValues) { if (headerValues != null) { for (final String headerValue : headerValues) { getHeaders().add(headerName, headerValue); } } } /** * Gets the first value for the specified HTTP header or null if the HTTP header is not set. * <p/> * @param headerName a String specifying the name of the HTTP header. * @return the first value in the list of values for the HTTP header, or null if the HTTP header is not set. * @see org.springframework.http.HttpHeaders#getFirst(String) */ public String getHeaderValue(final String headerName) { return getHeaders().getFirst(headerName); } /** * Gets all values for the specified HTTP header or an empty List if the HTTP header is not set. * <p/> * @param headerName a String specifying the name of the HTTP header. * @return a list of String values for the specified HTTP header. * @see org.springframework.http.HttpHeaders#get(Object) */ public List<String> getHeaderValues(final String headerName) { return Collections.unmodifiableList(getHeaders().get(headerName)); } /** * Sets the specified HTTP header to the given value, overriding any previously set values for the HTTP header. * <p/> * @param headerName a String specifying the name of the HTTP header. * @param headerValue a String containing the value of the HTTP header. * @see org.springframework.http.HttpHeaders#set(Object, Object) */ public void setHeader(final String headerName, final String headerValue) { getHeaders().set(headerName, headerValue); } /** * Adds 1 or more parameter values to the HTTP request. * <p/> * @param requestParameterName a String specifying the name of the HTTP request parameter. * @param requestParameterValues the array of values to set for the HTTP request parameter. * @see org.springframework.util.MultiValueMap#add(Object, Object) */ public void addRequestParameterValues(final String requestParameterName, final Object... requestParameterValues) { if (requestParameterValues != null) { for (final Object requestParameterValue : requestParameterValues) { getParameters().add(requestParameterName, requestParameterValue); } } } /** * Gets the first value for the specified HTTP request parameter or null if the HTTP request parameter is not set. * <p/> * @param requestParameterName a String specifying the name of the HTTP request parameter. * @return the first value in the list of values for the HTTP request parameter, or null if the HTTP request parameter * is not set. * @see org.springframework.util.MultiValueMap#getFirst(Object) */ public Object getRequestParameterValue(final String requestParameterName) { return getParameters().getFirst(requestParameterName); } /** * Gets all values for the specified HTTP request parameter or an empty List if the HTTP request parameter is not set. * <p/> * @param requestParameterName a String specifying the name of the HTTP request parameter. * @return a list of String values for the specified HTTP request parameter. * @see org.springframework.util.MultiValueMap#get(Object) */ public List<Object> getRequestParameterValues(final String requestParameterName) { return Collections.unmodifiableList(getParameters().get(requestParameterName)); } /** * Sets the specified HTTP request parameter to the given value, overriding any previously set values for * the HTTP request parameter. * <p/> * @param name a String specifying the name of the HTTP request parameter. * @param value a String containing the value of the HTTP request parameter. * @see org.springframework.util.MultiValueMap#set(Object, Object) */ public void setRequestParameter(final String name, final Object value) { getParameters().set(name, value); } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/example/library/v1/library.proto package com.google.example.library.v1; /** * <pre> * Request message for LibraryService.DeleteShelf. * </pre> * * Protobuf type {@code google.example.library.v1.DeleteShelfRequest} */ public final class DeleteShelfRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.example.library.v1.DeleteShelfRequest) DeleteShelfRequestOrBuilder { // Use DeleteShelfRequest.newBuilder() to construct. private DeleteShelfRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private DeleteShelfRequest() { name_ = ""; } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private DeleteShelfRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!input.skipField(tag)) { done = true; } break; } case 10: { java.lang.String s = input.readStringRequireUtf8(); name_ = s; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.example.library.v1.LibraryProto.internal_static_google_example_library_v1_DeleteShelfRequest_descriptor; } protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.example.library.v1.LibraryProto.internal_static_google_example_library_v1_DeleteShelfRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.example.library.v1.DeleteShelfRequest.class, com.google.example.library.v1.DeleteShelfRequest.Builder.class); } public static final int NAME_FIELD_NUMBER = 1; private volatile java.lang.Object name_; /** * <pre> * The name of the shelf to delete. * </pre> * * <code>optional string name = 1;</code> */ public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * <pre> * The name of the shelf to delete. * </pre> * * <code>optional string name = 1;</code> */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!getNameBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!getNameBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.example.library.v1.DeleteShelfRequest)) { return super.equals(obj); } com.google.example.library.v1.DeleteShelfRequest other = (com.google.example.library.v1.DeleteShelfRequest) obj; boolean result = true; result = result && getName() .equals(other.getName()); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.example.library.v1.DeleteShelfRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.example.library.v1.DeleteShelfRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.example.library.v1.DeleteShelfRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.example.library.v1.DeleteShelfRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.example.library.v1.DeleteShelfRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.example.library.v1.DeleteShelfRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.example.library.v1.DeleteShelfRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.example.library.v1.DeleteShelfRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.example.library.v1.DeleteShelfRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.example.library.v1.DeleteShelfRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.example.library.v1.DeleteShelfRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Request message for LibraryService.DeleteShelf. * </pre> * * Protobuf type {@code google.example.library.v1.DeleteShelfRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.example.library.v1.DeleteShelfRequest) com.google.example.library.v1.DeleteShelfRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.example.library.v1.LibraryProto.internal_static_google_example_library_v1_DeleteShelfRequest_descriptor; } protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.example.library.v1.LibraryProto.internal_static_google_example_library_v1_DeleteShelfRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.example.library.v1.DeleteShelfRequest.class, com.google.example.library.v1.DeleteShelfRequest.Builder.class); } // Construct using com.google.example.library.v1.DeleteShelfRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); name_ = ""; return this; } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.example.library.v1.LibraryProto.internal_static_google_example_library_v1_DeleteShelfRequest_descriptor; } public com.google.example.library.v1.DeleteShelfRequest getDefaultInstanceForType() { return com.google.example.library.v1.DeleteShelfRequest.getDefaultInstance(); } public com.google.example.library.v1.DeleteShelfRequest build() { com.google.example.library.v1.DeleteShelfRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public com.google.example.library.v1.DeleteShelfRequest buildPartial() { com.google.example.library.v1.DeleteShelfRequest result = new com.google.example.library.v1.DeleteShelfRequest(this); result.name_ = name_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.example.library.v1.DeleteShelfRequest) { return mergeFrom((com.google.example.library.v1.DeleteShelfRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.example.library.v1.DeleteShelfRequest other) { if (other == com.google.example.library.v1.DeleteShelfRequest.getDefaultInstance()) return this; if (!other.getName().isEmpty()) { name_ = other.name_; onChanged(); } onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.example.library.v1.DeleteShelfRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.example.library.v1.DeleteShelfRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object name_ = ""; /** * <pre> * The name of the shelf to delete. * </pre> * * <code>optional string name = 1;</code> */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * The name of the shelf to delete. * </pre> * * <code>optional string name = 1;</code> */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * The name of the shelf to delete. * </pre> * * <code>optional string name = 1;</code> */ public Builder setName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; onChanged(); return this; } /** * <pre> * The name of the shelf to delete. * </pre> * * <code>optional string name = 1;</code> */ public Builder clearName() { name_ = getDefaultInstance().getName(); onChanged(); return this; } /** * <pre> * The name of the shelf to delete. * </pre> * * <code>optional string name = 1;</code> */ public Builder setNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; onChanged(); return this; } public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return this; } public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return this; } // @@protoc_insertion_point(builder_scope:google.example.library.v1.DeleteShelfRequest) } // @@protoc_insertion_point(class_scope:google.example.library.v1.DeleteShelfRequest) private static final com.google.example.library.v1.DeleteShelfRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.example.library.v1.DeleteShelfRequest(); } public static com.google.example.library.v1.DeleteShelfRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<DeleteShelfRequest> PARSER = new com.google.protobuf.AbstractParser<DeleteShelfRequest>() { public DeleteShelfRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new DeleteShelfRequest(input, extensionRegistry); } }; public static com.google.protobuf.Parser<DeleteShelfRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<DeleteShelfRequest> getParserForType() { return PARSER; } public com.google.example.library.v1.DeleteShelfRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you * may not use this file except in compliance with the License. You * may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. See accompanying * LICENSE file. */ package com.pivotal.gemfirexd.internal.engine.hadoop.mapreduce; import java.lang.reflect.Method; import java.sql.Connection; import java.sql.DriverManager; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.List; import java.util.Properties; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapreduce.RecordWriter; import org.apache.hadoop.mapreduce.TaskAttemptID; import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl; import com.gemstone.gemfire.internal.AvailablePort; import com.pivotal.gemfirexd.FabricServer; import com.pivotal.gemfirexd.FabricServiceManager; import com.pivotal.gemfirexd.hadoop.mapreduce.Key; import com.pivotal.gemfirexd.hadoop.mapreduce.RowOutputFormat; import com.pivotal.gemfirexd.jdbc.JdbcTestBase; public class GfxdOutputFormatTest extends JdbcTestBase { /** * Tests whether writing data through output format creates data in gemfirexd * instance */ public void testMR2OutputWriter() throws Exception { FabricServer server = FabricServiceManager.getFabricServerInstance(); Properties props = new Properties(); int mcastPort = AvailablePort.getRandomAvailablePort(AvailablePort.JGROUPS); props.setProperty("mcast-port", String.valueOf(mcastPort)); server.start(props); int clientPort = AvailablePort.getRandomAvailablePort(AvailablePort.JGROUPS); server.startNetworkServer("localhost", clientPort, props); Connection conn; Statement st; conn = DriverManager.getConnection("jdbc:gemfirexd://localhost:" + clientPort + "/"); st = conn.createStatement(); st.execute("create schema emp"); st.execute("set schema emp"); st.execute("create table emp.usrtable (col1 int primary key, col2 varchar(100))"); class DataObject { int col1; String col2; public DataObject(int col1, String col2) { this.col1 = col1; this.col2 = col2; } public void setCol1(int index, PreparedStatement ps) throws SQLException { ps.setInt(index, col1); } public void setCol2(int i, PreparedStatement ps) throws SQLException { ps.setString(i, col2); } public String toString() { return col1 + "-" + col2; } } Configuration conf = new Configuration(); conf.set(RowOutputFormat.OUTPUT_TABLE, "emp.usrtable"); conf.set(RowOutputFormat.OUTPUT_URL, "jdbc:gemfirexd://localhost:" + clientPort + "/"); RowOutputFormat<DataObject> format = new RowOutputFormat<DataObject>(); TaskAttemptContextImpl task = new TaskAttemptContextImpl(conf, new TaskAttemptID()); format.checkOutputSpecs(task); RecordWriter<Key, DataObject> writer = format.getRecordWriter(task); writer.write(new Key(), new DataObject(1, "1")); writer.write(new Key(), new DataObject(2, "2")); writer.close(task); ResultSet rs = st.executeQuery("select * from emp.usrtable"); ArrayList<String> rows = new ArrayList<String>(); while(rs.next()) { rows.add(rs.getInt(1) + "-" + rs.getString(2)); } assertEquals(2, rows.size()); assertTrue(rows.contains("1-1")); assertTrue(rows.contains("2-2")); st.close(); conn.close(); server.stop(props); } /** * Tests whether writing data through output format creates data in gemfirexd * instance */ public void testMR1OutputWriter() throws Exception { FabricServer server = FabricServiceManager.getFabricServerInstance(); Properties props = new Properties(); int mcastPort = AvailablePort.getRandomAvailablePort(AvailablePort.JGROUPS); props.setProperty("mcast-port", String.valueOf(mcastPort)); server.start(props); int clientPort = AvailablePort.getRandomAvailablePort(AvailablePort.JGROUPS); server.startNetworkServer("localhost", clientPort, props); Connection conn; Statement st; conn = DriverManager.getConnection("jdbc:gemfirexd://localhost:" + clientPort + "/"); st = conn.createStatement(); st.execute("create schema emp"); st.execute("set schema emp"); st.execute("create table emp.usrtable (col1 int, col2 varchar(100))"); class DataObject { int col1; String col2; public DataObject(int col1, String col2) { this.col1 = col1; this.col2 = col2; } public void setCol1(int index, PreparedStatement ps) throws SQLException { ps.setInt(index, col1); } public void setCol2(int i, PreparedStatement ps) throws SQLException { ps.setString(i, col2); } public String toString() { return col1 + "-" + col2; } } com.pivotal.gemfirexd.hadoop.mapred.RowOutputFormat<DataObject> mr1Instance; mr1Instance = new com.pivotal.gemfirexd.hadoop.mapred.RowOutputFormat<DataObject>(); JobConf conf = new JobConf(); FileSystem fs = FileSystem.get(conf); conf.set(mr1Instance.OUTPUT_TABLE, "emp.usrtable"); conf.set(mr1Instance.OUTPUT_URL, "jdbc:gemfirexd://localhost:" + clientPort + "/"); mr1Instance.checkOutputSpecs(FileSystem.get(conf), conf); org.apache.hadoop.mapred.RecordWriter<Key, DataObject> writer; writer = mr1Instance.getRecordWriter(fs, conf, "name", null); writer.write(new Key(), new DataObject(1, "1")); writer.write(new Key(), new DataObject(2, "2")); writer.close(null); ResultSet rs = st.executeQuery("select * from emp.usrtable"); ArrayList<String> rows = new ArrayList<String>(); while(rs.next()) { rows.add(rs.getInt(1) + "-" + rs.getString(2)); } assertEquals(2, rows.size()); assertTrue(rows.contains("1-1")); assertTrue(rows.contains("2-2")); st.close(); conn.close(); server.stop(props); } /* * Test for batched execution */ public void testMR1BatchWriter() throws Exception { FabricServer server = FabricServiceManager.getFabricServerInstance(); Properties props = new Properties(); int mcastPort = AvailablePort.getRandomAvailablePort(AvailablePort.JGROUPS); props.setProperty("mcast-port", String.valueOf(mcastPort)); server.start(props); int clientPort = AvailablePort.getRandomAvailablePort(AvailablePort.JGROUPS); server.startNetworkServer("localhost", clientPort, props); Connection conn; Statement st; conn = DriverManager.getConnection("jdbc:gemfirexd://localhost:" + clientPort + "/"); st = conn.createStatement(); st.execute("create schema emp"); st.execute("set schema emp"); st.execute("create table emp.usrtable (col1 int, col2 varchar(100))"); class DataObject { int col1; String col2; public DataObject(int col1, String col2) { this.col1 = col1; this.col2 = col2; } public void setCol1(int index, PreparedStatement ps) throws SQLException { ps.setInt(index, col1); } public void setCol2(int i, PreparedStatement ps) throws SQLException { ps.setString(i, col2); } public String toString() { return col1 + "-" + col2; } } com.pivotal.gemfirexd.hadoop.mapred.RowOutputFormat<DataObject> mr1Instance; mr1Instance = new com.pivotal.gemfirexd.hadoop.mapred.RowOutputFormat<DataObject>(); JobConf conf = new JobConf(); FileSystem fs = FileSystem.get(conf); conf.set(mr1Instance.OUTPUT_TABLE, "emp.usrtable"); conf.set(mr1Instance.OUTPUT_URL, "jdbc:gemfirexd://localhost:" + clientPort + "/"); mr1Instance.checkOutputSpecs(FileSystem.get(conf), conf); org.apache.hadoop.mapred.RecordWriter<Key, DataObject> writer; writer = mr1Instance.getRecordWriter(fs, conf, "name", null); for(int i = 1; i <= 20005; i++) { writer.write(new Key(), new DataObject(i, "" + i)); if ( i % 10000 == 0) { assertEquals(10000, OutputFormatUtil.resultCountTest); } else { assertEquals(0, OutputFormatUtil.resultCountTest); } } writer.close(null); assertEquals(5, OutputFormatUtil.resultCountTest); ResultSet rs = st.executeQuery("select * from emp.usrtable"); ArrayList<String> rows = new ArrayList<String>(); while(rs.next()) { rows.add(rs.getInt(1) + "-" + rs.getString(2)); } assertEquals(20005, rows.size()); conf.set(mr1Instance.OUTPUT_BATCH_SIZE, "10"); writer = mr1Instance.getRecordWriter(fs, conf, "name", null); for(int i = 50001; i <= 50025; i++) { writer.write(new Key(), new DataObject(i, "" + i)); if ( i % 10 == 0) { assertEquals(10, OutputFormatUtil.resultCountTest); } else { assertEquals(0, OutputFormatUtil.resultCountTest); } } writer.close(null); assertEquals(5, OutputFormatUtil.resultCountTest); rs = st.executeQuery("select * from emp.usrtable"); rows = new ArrayList<String>(); while(rs.next()) { rows.add(rs.getInt(1) + "-" + rs.getString(2)); } assertEquals(20030, rows.size()); st.close(); conn.close(); server.stop(props); } /** * Tests user's class parsing and query creation */ public void testCreateQueryFromObj() throws Exception { class DataObject { public void setCol1(int index, PreparedStatement ps) { } public void setA(int i, PreparedStatement ps) { } // none of the following private void setPrivate(int i, PreparedStatement ps) { } private void SetCol2(int i, PreparedStatement ps) { } public void set(int i, PreparedStatement ps) { } public void no(int i, PreparedStatement ps) { } public void setNotThis1(PreparedStatement ps, int i) { } public void setNotThis1(int i, PreparedStatement ps, Object o) { } } Configuration conf = new Configuration(); conf.set(RowOutputFormat.OUTPUT_TABLE, "table"); DataObject obj = new DataObject(); OutputFormatUtil util = new OutputFormatUtil(); List<Method> columns = util.spotTableColumnSetters(obj);; assertEquals(2, columns.size()); assertTrue(columns.contains(DataObject.class.getMethod("setCol1", int.class, PreparedStatement.class))); assertTrue(columns.contains(DataObject.class.getMethod("setA", int.class, PreparedStatement.class))); String query = util.createQuery("table", columns); assertEquals("PUT INTO table(col1, a) VALUES (?, ?);", query); } public GfxdOutputFormatTest(String name) { super(name); } }
/* * Copyright 2017 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing permissions and * limitations under the License. */ package com.firebase.ui.auth.ui.phone; import android.app.PendingIntent; import android.content.Context; import android.content.Intent; import android.content.IntentSender; import android.os.Bundle; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.support.annotation.RestrictTo; import android.support.v4.app.FragmentActivity; import android.text.TextUtils; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.Button; import android.widget.EditText; import android.widget.TextView; import com.firebase.ui.auth.AuthUI; import com.firebase.ui.auth.R; import com.firebase.ui.auth.ui.ExtraConstants; import com.firebase.ui.auth.ui.FlowParameters; import com.firebase.ui.auth.ui.FragmentBase; import com.firebase.ui.auth.ui.ImeHelper; import com.firebase.ui.auth.util.GoogleApiHelper; import com.google.android.gms.auth.api.Auth; import com.google.android.gms.auth.api.credentials.Credential; import com.google.android.gms.auth.api.credentials.CredentialPickerConfig; import com.google.android.gms.auth.api.credentials.HintRequest; import com.google.android.gms.common.ConnectionResult; import com.google.android.gms.common.api.GoogleApiClient; import java.util.Locale; /** * Displays country selector and phone number input form for users */ @RestrictTo(RestrictTo.Scope.LIBRARY_GROUP) public class VerifyPhoneNumberFragment extends FragmentBase implements View.OnClickListener { public static final String TAG = "VerifyPhoneFragment"; private static final int RC_PHONE_HINT = 22; private Context mAppContext; private CountryListSpinner mCountryListSpinner; private EditText mPhoneEditText; private TextView mErrorEditText; private Button mSendCodeButton; private PhoneVerificationActivity mVerifier; private TextView mSmsTermsText; public static VerifyPhoneNumberFragment newInstance( FlowParameters flowParameters, Bundle params) { VerifyPhoneNumberFragment fragment = new VerifyPhoneNumberFragment(); Bundle args = new Bundle(); args.putParcelable(ExtraConstants.EXTRA_FLOW_PARAMS, flowParameters); args.putBundle(ExtraConstants.EXTRA_PARAMS, params); fragment.setArguments(args); return fragment; } @Override public void onAttach(Context context) { super.onAttach(context); mAppContext = context.getApplicationContext(); } @Nullable @Override public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) { View v = inflater.inflate(R.layout.fui_phone_layout, container, false); mCountryListSpinner = v.findViewById(R.id.country_list); mPhoneEditText = v.findViewById(R.id.phone_number); mErrorEditText = v.findViewById(R.id.phone_number_error); mSendCodeButton = v.findViewById(R.id.send_code); mSmsTermsText = v.findViewById(R.id.send_sms_tos); ImeHelper.setImeOnDoneListener(mPhoneEditText, new ImeHelper.DonePressedListener() { @Override public void onDonePressed() { onNext(); } }); FragmentActivity parentActivity = getActivity(); parentActivity.setTitle(getString(R.string.fui_verify_phone_number_title)); setupCountrySpinner(); setupSendCodeButton(); setupTerms(); return v; } private void setupTerms() { final String verifyPhoneButtonText = getString(R.string.fui_verify_phone_number); final String terms = getString(R.string.fui_sms_terms_of_service, verifyPhoneButtonText); mSmsTermsText.setText(terms); } @Override public void onActivityCreated(@Nullable Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); // Set listener if (!(getActivity() instanceof PhoneVerificationActivity)) { throw new IllegalStateException("Activity must implement PhoneVerificationHandler"); } mVerifier = (PhoneVerificationActivity) getActivity(); if (savedInstanceState != null) { return; } // Check for phone // It is assumed that the phone number that are being wired in via Credential Selector // are e164 since we store it. Bundle params = getArguments().getBundle(ExtraConstants.EXTRA_PARAMS); String phone = null; String countryCode = null; String nationalNumber = null; if (params != null) { phone = params.getString(AuthUI.EXTRA_DEFAULT_PHONE_NUMBER); countryCode = params.getString(AuthUI.EXTRA_DEFAULT_COUNTRY_CODE); nationalNumber = params.getString(AuthUI.EXTRA_DEFAULT_NATIONAL_NUMBER); } if (!TextUtils.isEmpty(countryCode) && !TextUtils.isEmpty(nationalNumber)) { // User supplied country code & national number PhoneNumber phoneNumber = PhoneNumberUtils.getPhoneNumber(countryCode, nationalNumber); setPhoneNumber(phoneNumber); setCountryCode(phoneNumber); } else if (!TextUtils.isEmpty(phone)) { // User supplied full phone number PhoneNumber phoneNumber = PhoneNumberUtils.getPhoneNumber(phone); setPhoneNumber(phoneNumber); setCountryCode(phoneNumber); } else if (getFlowParams().enableHints) { // Try SmartLock phone autocomplete hint showPhoneAutoCompleteHint(); } } @Override public void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); if (requestCode == RC_PHONE_HINT) { if (data != null) { Credential cred = data.getParcelableExtra(Credential.EXTRA_KEY); if (cred != null) { // Hint selector does not always return phone numbers in e164 format. // To accommodate either case, we normalize to e164 with best effort final String unformattedPhone = cred.getId(); final String formattedPhone = PhoneNumberUtils.formatPhoneNumberUsingCurrentCountry(unformattedPhone, mAppContext); if (formattedPhone == null) { Log.e(TAG, "Unable to normalize phone number from hint selector:" + unformattedPhone); return; } final PhoneNumber phoneNumberObj = PhoneNumberUtils.getPhoneNumber(formattedPhone); setPhoneNumber(phoneNumberObj); setCountryCode(phoneNumberObj); onNext(); } } } } @Override public void onClick(View v) { onNext(); } private void onNext() { String phoneNumber = getPseudoValidPhoneNumber(); if (phoneNumber == null) { mErrorEditText.setText(R.string.fui_invalid_phone_number); } else { mVerifier.verifyPhoneNumber(phoneNumber, false); } } @Nullable private String getPseudoValidPhoneNumber() { final CountryInfo countryInfo = (CountryInfo) mCountryListSpinner.getTag(); final String everythingElse = mPhoneEditText.getText().toString(); if (TextUtils.isEmpty(everythingElse)) { return null; } return PhoneNumberUtils.formatPhoneNumber(everythingElse, countryInfo); } private void setupCountrySpinner() { //clear error when spinner is clicked on mCountryListSpinner.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { mErrorEditText.setText(""); } }); } private void setupSendCodeButton() { mSendCodeButton.setOnClickListener(this); } private void showPhoneAutoCompleteHint() { try { startIntentSenderForResult(getPhoneHintIntent().getIntentSender(), RC_PHONE_HINT); } catch (IntentSender.SendIntentException e) { Log.e(TAG, "Unable to start hint intent", e); } } private PendingIntent getPhoneHintIntent() { GoogleApiClient client = new GoogleApiClient.Builder(getContext()) .addApi(Auth.CREDENTIALS_API) .enableAutoManage( getActivity(), GoogleApiHelper.getSafeAutoManageId(), new GoogleApiClient.OnConnectionFailedListener() { @Override public void onConnectionFailed(@NonNull ConnectionResult connectionResult) { Log.e(TAG, "Client connection failed: " + connectionResult.getErrorMessage()); } }) .build(); HintRequest hintRequest = new HintRequest.Builder() .setHintPickerConfig( new CredentialPickerConfig.Builder().setShowCancelButton(true).build()) .setPhoneNumberIdentifierSupported(true) .setEmailAddressIdentifierSupported(false) .build(); return Auth.CredentialsApi.getHintPickerIntent(client, hintRequest); } private void setPhoneNumber(PhoneNumber phoneNumber) { if (PhoneNumber.isValid(phoneNumber)) { mPhoneEditText.setText(phoneNumber.getPhoneNumber()); mPhoneEditText.setSelection(phoneNumber.getPhoneNumber().length()); } } private void setCountryCode(PhoneNumber phoneNumber) { if (PhoneNumber.isCountryValid(phoneNumber)) { mCountryListSpinner.setSelectedForCountry(new Locale("", phoneNumber.getCountryIso()), phoneNumber.getCountryCode()); } } void showError(String e) { mErrorEditText.setText(e); } }
package org.docksidestage.hangar.dbflute.cbean.cq.bs; import java.util.*; import org.dbflute.cbean.*; import org.dbflute.cbean.chelper.*; import org.dbflute.cbean.ckey.*; import org.dbflute.cbean.coption.*; import org.dbflute.cbean.cvalue.ConditionValue; import org.dbflute.cbean.ordering.*; import org.dbflute.cbean.scoping.*; import org.dbflute.cbean.sqlclause.SqlClause; import org.dbflute.dbmeta.DBMetaProvider; import org.docksidestage.hangar.dbflute.allcommon.*; import org.docksidestage.hangar.dbflute.cbean.*; import org.docksidestage.hangar.dbflute.cbean.cq.*; /** * The abstract condition-query of WHITE_BASE_ONE01_SEA_BROADWAY. * @author DBFlute(AutoGenerator) */ public abstract class AbstractBsWhiteBaseOne01SeaBroadwayCQ extends AbstractConditionQuery { // =================================================================================== // Constructor // =========== public AbstractBsWhiteBaseOne01SeaBroadwayCQ(ConditionQuery referrerQuery, SqlClause sqlClause, String aliasName, int nestLevel) { super(referrerQuery, sqlClause, aliasName, nestLevel); } // =================================================================================== // DB Meta // ======= @Override protected DBMetaProvider xgetDBMetaProvider() { return DBMetaInstanceHandler.getProvider(); } public String asTableDbName() { return "WHITE_BASE_ONE01_SEA_BROADWAY"; } // =================================================================================== // Query // ===== /** * Equal(=). And NullIgnored, OnlyOnceRegistered. <br> * BROADWAY_ID: {PK, NotNull, INTEGER(10)} * @param broadwayId The value of broadwayId as equal. (basically NotNull: error as default, or no condition as option) */ public void setBroadwayId_Equal(Integer broadwayId) { doSetBroadwayId_Equal(broadwayId); } protected void doSetBroadwayId_Equal(Integer broadwayId) { regBroadwayId(CK_EQ, broadwayId); } /** * NotEqual(&lt;&gt;). And NullIgnored, OnlyOnceRegistered. <br> * BROADWAY_ID: {PK, NotNull, INTEGER(10)} * @param broadwayId The value of broadwayId as notEqual. (basically NotNull: error as default, or no condition as option) */ public void setBroadwayId_NotEqual(Integer broadwayId) { doSetBroadwayId_NotEqual(broadwayId); } protected void doSetBroadwayId_NotEqual(Integer broadwayId) { regBroadwayId(CK_NES, broadwayId); } /** * GreaterThan(&gt;). And NullIgnored, OnlyOnceRegistered. <br> * BROADWAY_ID: {PK, NotNull, INTEGER(10)} * @param broadwayId The value of broadwayId as greaterThan. (basically NotNull: error as default, or no condition as option) */ public void setBroadwayId_GreaterThan(Integer broadwayId) { regBroadwayId(CK_GT, broadwayId); } /** * LessThan(&lt;). And NullIgnored, OnlyOnceRegistered. <br> * BROADWAY_ID: {PK, NotNull, INTEGER(10)} * @param broadwayId The value of broadwayId as lessThan. (basically NotNull: error as default, or no condition as option) */ public void setBroadwayId_LessThan(Integer broadwayId) { regBroadwayId(CK_LT, broadwayId); } /** * GreaterEqual(&gt;=). And NullIgnored, OnlyOnceRegistered. <br> * BROADWAY_ID: {PK, NotNull, INTEGER(10)} * @param broadwayId The value of broadwayId as greaterEqual. (basically NotNull: error as default, or no condition as option) */ public void setBroadwayId_GreaterEqual(Integer broadwayId) { regBroadwayId(CK_GE, broadwayId); } /** * LessEqual(&lt;=). And NullIgnored, OnlyOnceRegistered. <br> * BROADWAY_ID: {PK, NotNull, INTEGER(10)} * @param broadwayId The value of broadwayId as lessEqual. (basically NotNull: error as default, or no condition as option) */ public void setBroadwayId_LessEqual(Integer broadwayId) { regBroadwayId(CK_LE, broadwayId); } /** * RangeOf with various options. (versatile) <br> * {(default) minNumber &lt;= column &lt;= maxNumber} <br> * And NullIgnored, OnlyOnceRegistered. <br> * BROADWAY_ID: {PK, NotNull, INTEGER(10)} * @param minNumber The min number of broadwayId. (basically NotNull: if op.allowOneSide(), null allowed) * @param maxNumber The max number of broadwayId. (basically NotNull: if op.allowOneSide(), null allowed) * @param opLambda The callback for option of range-of. (NotNull) */ public void setBroadwayId_RangeOf(Integer minNumber, Integer maxNumber, ConditionOptionCall<RangeOfOption> opLambda) { setBroadwayId_RangeOf(minNumber, maxNumber, xcROOP(opLambda)); } /** * RangeOf with various options. (versatile) <br> * {(default) minNumber &lt;= column &lt;= maxNumber} <br> * And NullIgnored, OnlyOnceRegistered. <br> * BROADWAY_ID: {PK, NotNull, INTEGER(10)} * @param minNumber The min number of broadwayId. (basically NotNull: if op.allowOneSide(), null allowed) * @param maxNumber The max number of broadwayId. (basically NotNull: if op.allowOneSide(), null allowed) * @param rangeOfOption The option of range-of. (NotNull) */ protected void setBroadwayId_RangeOf(Integer minNumber, Integer maxNumber, RangeOfOption rangeOfOption) { regROO(minNumber, maxNumber, xgetCValueBroadwayId(), "BROADWAY_ID", rangeOfOption); } /** * InScope {in (1, 2)}. And NullIgnored, NullElementIgnored, SeveralRegistered. <br> * BROADWAY_ID: {PK, NotNull, INTEGER(10)} * @param broadwayIdList The collection of broadwayId as inScope. (basically NotNull, NotEmpty: error as default, or no condition as option) */ public void setBroadwayId_InScope(Collection<Integer> broadwayIdList) { doSetBroadwayId_InScope(broadwayIdList); } protected void doSetBroadwayId_InScope(Collection<Integer> broadwayIdList) { regINS(CK_INS, cTL(broadwayIdList), xgetCValueBroadwayId(), "BROADWAY_ID"); } /** * NotInScope {not in (1, 2)}. And NullIgnored, NullElementIgnored, SeveralRegistered. <br> * BROADWAY_ID: {PK, NotNull, INTEGER(10)} * @param broadwayIdList The collection of broadwayId as notInScope. (basically NotNull, NotEmpty: error as default, or no condition as option) */ public void setBroadwayId_NotInScope(Collection<Integer> broadwayIdList) { doSetBroadwayId_NotInScope(broadwayIdList); } protected void doSetBroadwayId_NotInScope(Collection<Integer> broadwayIdList) { regINS(CK_NINS, cTL(broadwayIdList), xgetCValueBroadwayId(), "BROADWAY_ID"); } /** * Set up ExistsReferrer (correlated sub-query). <br> * {exists (select BROADWAY_ID from WHITE_BASE_ONE01_SEA where ...)} <br> * WHITE_BASE_ONE01_SEA by BROADWAY_ID, named 'whiteBaseOne01SeaAsOne'. * <pre> * cb.query().<span style="color: #CC4747">existsWhiteBaseOne01Sea</span>(seaCB <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * seaCB.query().set... * }); * </pre> * @param subCBLambda The callback for sub-query of WhiteBaseOne01SeaList for 'exists'. (NotNull) */ public void existsWhiteBaseOne01Sea(SubQuery<WhiteBaseOne01SeaCB> subCBLambda) { assertObjectNotNull("subCBLambda", subCBLambda); WhiteBaseOne01SeaCB cb = new WhiteBaseOne01SeaCB(); cb.xsetupForExistsReferrer(this); lockCall(() -> subCBLambda.query(cb)); String pp = keepBroadwayId_ExistsReferrer_WhiteBaseOne01SeaList(cb.query()); registerExistsReferrer(cb.query(), "BROADWAY_ID", "BROADWAY_ID", pp, "whiteBaseOne01SeaList"); } public abstract String keepBroadwayId_ExistsReferrer_WhiteBaseOne01SeaList(WhiteBaseOne01SeaCQ sq); /** * Set up NotExistsReferrer (correlated sub-query). <br> * {not exists (select BROADWAY_ID from WHITE_BASE_ONE01_SEA where ...)} <br> * WHITE_BASE_ONE01_SEA by BROADWAY_ID, named 'whiteBaseOne01SeaAsOne'. * <pre> * cb.query().<span style="color: #CC4747">notExistsWhiteBaseOne01Sea</span>(seaCB <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * seaCB.query().set... * }); * </pre> * @param subCBLambda The callback for sub-query of BroadwayId_NotExistsReferrer_WhiteBaseOne01SeaList for 'not exists'. (NotNull) */ public void notExistsWhiteBaseOne01Sea(SubQuery<WhiteBaseOne01SeaCB> subCBLambda) { assertObjectNotNull("subCBLambda", subCBLambda); WhiteBaseOne01SeaCB cb = new WhiteBaseOne01SeaCB(); cb.xsetupForExistsReferrer(this); lockCall(() -> subCBLambda.query(cb)); String pp = keepBroadwayId_NotExistsReferrer_WhiteBaseOne01SeaList(cb.query()); registerNotExistsReferrer(cb.query(), "BROADWAY_ID", "BROADWAY_ID", pp, "whiteBaseOne01SeaList"); } public abstract String keepBroadwayId_NotExistsReferrer_WhiteBaseOne01SeaList(WhiteBaseOne01SeaCQ sq); public void xsderiveWhiteBaseOne01SeaList(String fn, SubQuery<WhiteBaseOne01SeaCB> sq, String al, DerivedReferrerOption op) { assertObjectNotNull("subQuery", sq); WhiteBaseOne01SeaCB cb = new WhiteBaseOne01SeaCB(); cb.xsetupForDerivedReferrer(this); lockCall(() -> sq.query(cb)); String pp = keepBroadwayId_SpecifyDerivedReferrer_WhiteBaseOne01SeaList(cb.query()); registerSpecifyDerivedReferrer(fn, cb.query(), "BROADWAY_ID", "BROADWAY_ID", pp, "whiteBaseOne01SeaList", al, op); } public abstract String keepBroadwayId_SpecifyDerivedReferrer_WhiteBaseOne01SeaList(WhiteBaseOne01SeaCQ sq); /** * Prepare for (Query)DerivedReferrer (correlated sub-query). <br> * {FOO &lt;= (select max(BAR) from WHITE_BASE_ONE01_SEA where ...)} <br> * WHITE_BASE_ONE01_SEA by BROADWAY_ID, named 'whiteBaseOne01SeaAsOne'. * <pre> * cb.query().<span style="color: #CC4747">derivedWhiteBaseOne01Sea()</span>.<span style="color: #CC4747">max</span>(seaCB <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * seaCB.specify().<span style="color: #CC4747">columnFoo...</span> <span style="color: #3F7E5E">// derived column by function</span> * seaCB.query().setBar... <span style="color: #3F7E5E">// referrer condition</span> * }).<span style="color: #CC4747">greaterEqual</span>(123); <span style="color: #3F7E5E">// condition to derived column</span> * </pre> * @return The object to set up a function for referrer table. (NotNull) */ public HpQDRFunction<WhiteBaseOne01SeaCB> derivedWhiteBaseOne01Sea() { return xcreateQDRFunctionWhiteBaseOne01SeaList(); } protected HpQDRFunction<WhiteBaseOne01SeaCB> xcreateQDRFunctionWhiteBaseOne01SeaList() { return xcQDRFunc((fn, sq, rd, vl, op) -> xqderiveWhiteBaseOne01SeaList(fn, sq, rd, vl, op)); } public void xqderiveWhiteBaseOne01SeaList(String fn, SubQuery<WhiteBaseOne01SeaCB> sq, String rd, Object vl, DerivedReferrerOption op) { assertObjectNotNull("subQuery", sq); WhiteBaseOne01SeaCB cb = new WhiteBaseOne01SeaCB(); cb.xsetupForDerivedReferrer(this); lockCall(() -> sq.query(cb)); String sqpp = keepBroadwayId_QueryDerivedReferrer_WhiteBaseOne01SeaList(cb.query()); String prpp = keepBroadwayId_QueryDerivedReferrer_WhiteBaseOne01SeaListParameter(vl); registerQueryDerivedReferrer(fn, cb.query(), "BROADWAY_ID", "BROADWAY_ID", sqpp, "whiteBaseOne01SeaList", rd, vl, prpp, op); } public abstract String keepBroadwayId_QueryDerivedReferrer_WhiteBaseOne01SeaList(WhiteBaseOne01SeaCQ sq); public abstract String keepBroadwayId_QueryDerivedReferrer_WhiteBaseOne01SeaListParameter(Object vl); /** * IsNull {is null}. And OnlyOnceRegistered. <br> * BROADWAY_ID: {PK, NotNull, INTEGER(10)} */ public void setBroadwayId_IsNull() { regBroadwayId(CK_ISN, DOBJ); } /** * IsNotNull {is not null}. And OnlyOnceRegistered. <br> * BROADWAY_ID: {PK, NotNull, INTEGER(10)} */ public void setBroadwayId_IsNotNull() { regBroadwayId(CK_ISNN, DOBJ); } protected void regBroadwayId(ConditionKey ky, Object vl) { regQ(ky, vl, xgetCValueBroadwayId(), "BROADWAY_ID"); } protected abstract ConditionValue xgetCValueBroadwayId(); /** * Equal(=). And NullOrEmptyIgnored, OnlyOnceRegistered. <br> * BROADWAY_NAME: {NotNull, VARCHAR(200)} * @param broadwayName The value of broadwayName as equal. (basically NotNull, NotEmpty: error as default, or no condition as option) */ public void setBroadwayName_Equal(String broadwayName) { doSetBroadwayName_Equal(fRES(broadwayName)); } protected void doSetBroadwayName_Equal(String broadwayName) { regBroadwayName(CK_EQ, broadwayName); } /** * LikeSearch with various options. (versatile) {like '%xxx%' escape ...}. And NullOrEmptyIgnored, SeveralRegistered. <br> * BROADWAY_NAME: {NotNull, VARCHAR(200)} <br> * <pre>e.g. setBroadwayName_LikeSearch("xxx", op <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> op.<span style="color: #CC4747">likeContain()</span>);</pre> * @param broadwayName The value of broadwayName as likeSearch. (basically NotNull, NotEmpty: error as default, or no condition as option) * @param opLambda The callback for option of like-search. (NotNull) */ public void setBroadwayName_LikeSearch(String broadwayName, ConditionOptionCall<LikeSearchOption> opLambda) { setBroadwayName_LikeSearch(broadwayName, xcLSOP(opLambda)); } /** * LikeSearch with various options. (versatile) {like '%xxx%' escape ...}. And NullOrEmptyIgnored, SeveralRegistered. <br> * BROADWAY_NAME: {NotNull, VARCHAR(200)} <br> * <pre>e.g. setBroadwayName_LikeSearch("xxx", new <span style="color: #CC4747">LikeSearchOption</span>().likeContain());</pre> * @param broadwayName The value of broadwayName as likeSearch. (basically NotNull, NotEmpty: error as default, or no condition as option) * @param likeSearchOption The option of like-search. (NotNull) */ protected void setBroadwayName_LikeSearch(String broadwayName, LikeSearchOption likeSearchOption) { regLSQ(CK_LS, fRES(broadwayName), xgetCValueBroadwayName(), "BROADWAY_NAME", likeSearchOption); } /** * NotLikeSearch with various options. (versatile) {not like 'xxx%' escape ...} <br> * And NullOrEmptyIgnored, SeveralRegistered. <br> * BROADWAY_NAME: {NotNull, VARCHAR(200)} * @param broadwayName The value of broadwayName as notLikeSearch. (basically NotNull, NotEmpty: error as default, or no condition as option) * @param opLambda The callback for option of like-search. (NotNull) */ public void setBroadwayName_NotLikeSearch(String broadwayName, ConditionOptionCall<LikeSearchOption> opLambda) { setBroadwayName_NotLikeSearch(broadwayName, xcLSOP(opLambda)); } /** * NotLikeSearch with various options. (versatile) {not like 'xxx%' escape ...} <br> * And NullOrEmptyIgnored, SeveralRegistered. <br> * BROADWAY_NAME: {NotNull, VARCHAR(200)} * @param broadwayName The value of broadwayName as notLikeSearch. (basically NotNull, NotEmpty: error as default, or no condition as option) * @param likeSearchOption The option of not-like-search. (NotNull) */ protected void setBroadwayName_NotLikeSearch(String broadwayName, LikeSearchOption likeSearchOption) { regLSQ(CK_NLS, fRES(broadwayName), xgetCValueBroadwayName(), "BROADWAY_NAME", likeSearchOption); } protected void regBroadwayName(ConditionKey ky, Object vl) { regQ(ky, vl, xgetCValueBroadwayName(), "BROADWAY_NAME"); } protected abstract ConditionValue xgetCValueBroadwayName(); // =================================================================================== // ScalarCondition // =============== /** * Prepare ScalarCondition as equal. <br> * {where FOO = (select max(BAR) from ...)} * <pre> * cb.query().scalar_Equal().<span style="color: #CC4747">avg</span>(<span style="color: #553000">purchaseCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">purchaseCB</span>.specify().<span style="color: #CC4747">columnPurchasePrice</span>(); <span style="color: #3F7E5E">// *Point!</span> * <span style="color: #553000">purchaseCB</span>.query().setPaymentCompleteFlg_Equal_True(); * }); * </pre> * @return The object to set up a function. (NotNull) */ public HpSLCFunction<WhiteBaseOne01SeaBroadwayCB> scalar_Equal() { return xcreateSLCFunction(CK_EQ, WhiteBaseOne01SeaBroadwayCB.class); } /** * Prepare ScalarCondition as equal. <br> * {where FOO &lt;&gt; (select max(BAR) from ...)} * <pre> * cb.query().scalar_Equal().<span style="color: #CC4747">avg</span>(<span style="color: #553000">purchaseCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">purchaseCB</span>.specify().<span style="color: #CC4747">columnPurchasePrice</span>(); <span style="color: #3F7E5E">// *Point!</span> * <span style="color: #553000">purchaseCB</span>.query().setPaymentCompleteFlg_Equal_True(); * }); * </pre> * @return The object to set up a function. (NotNull) */ public HpSLCFunction<WhiteBaseOne01SeaBroadwayCB> scalar_NotEqual() { return xcreateSLCFunction(CK_NES, WhiteBaseOne01SeaBroadwayCB.class); } /** * Prepare ScalarCondition as greaterThan. <br> * {where FOO &gt; (select max(BAR) from ...)} * <pre> * cb.query().scalar_Equal().<span style="color: #CC4747">avg</span>(<span style="color: #553000">purchaseCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">purchaseCB</span>.specify().<span style="color: #CC4747">columnPurchasePrice</span>(); <span style="color: #3F7E5E">// *Point!</span> * <span style="color: #553000">purchaseCB</span>.query().setPaymentCompleteFlg_Equal_True(); * }); * </pre> * @return The object to set up a function. (NotNull) */ public HpSLCFunction<WhiteBaseOne01SeaBroadwayCB> scalar_GreaterThan() { return xcreateSLCFunction(CK_GT, WhiteBaseOne01SeaBroadwayCB.class); } /** * Prepare ScalarCondition as lessThan. <br> * {where FOO &lt; (select max(BAR) from ...)} * <pre> * cb.query().scalar_Equal().<span style="color: #CC4747">avg</span>(<span style="color: #553000">purchaseCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">purchaseCB</span>.specify().<span style="color: #CC4747">columnPurchasePrice</span>(); <span style="color: #3F7E5E">// *Point!</span> * <span style="color: #553000">purchaseCB</span>.query().setPaymentCompleteFlg_Equal_True(); * }); * </pre> * @return The object to set up a function. (NotNull) */ public HpSLCFunction<WhiteBaseOne01SeaBroadwayCB> scalar_LessThan() { return xcreateSLCFunction(CK_LT, WhiteBaseOne01SeaBroadwayCB.class); } /** * Prepare ScalarCondition as greaterEqual. <br> * {where FOO &gt;= (select max(BAR) from ...)} * <pre> * cb.query().scalar_Equal().<span style="color: #CC4747">avg</span>(<span style="color: #553000">purchaseCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">purchaseCB</span>.specify().<span style="color: #CC4747">columnPurchasePrice</span>(); <span style="color: #3F7E5E">// *Point!</span> * <span style="color: #553000">purchaseCB</span>.query().setPaymentCompleteFlg_Equal_True(); * }); * </pre> * @return The object to set up a function. (NotNull) */ public HpSLCFunction<WhiteBaseOne01SeaBroadwayCB> scalar_GreaterEqual() { return xcreateSLCFunction(CK_GE, WhiteBaseOne01SeaBroadwayCB.class); } /** * Prepare ScalarCondition as lessEqual. <br> * {where FOO &lt;= (select max(BAR) from ...)} * <pre> * cb.query().<span style="color: #CC4747">scalar_LessEqual()</span>.max(new SubQuery&lt;WhiteBaseOne01SeaBroadwayCB&gt;() { * public void query(WhiteBaseOne01SeaBroadwayCB subCB) { * subCB.specify().setFoo... <span style="color: #3F7E5E">// derived column for function</span> * subCB.query().setBar... * } * }); * </pre> * @return The object to set up a function. (NotNull) */ public HpSLCFunction<WhiteBaseOne01SeaBroadwayCB> scalar_LessEqual() { return xcreateSLCFunction(CK_LE, WhiteBaseOne01SeaBroadwayCB.class); } @SuppressWarnings("unchecked") protected <CB extends ConditionBean> void xscalarCondition(String fn, SubQuery<CB> sq, String rd, HpSLCCustomized<CB> cs, ScalarConditionOption op) { assertObjectNotNull("subQuery", sq); WhiteBaseOne01SeaBroadwayCB cb = xcreateScalarConditionCB(); sq.query((CB)cb); String pp = keepScalarCondition(cb.query()); // for saving query-value cs.setPartitionByCBean((CB)xcreateScalarConditionPartitionByCB()); // for using partition-by registerScalarCondition(fn, cb.query(), pp, rd, cs, op); } public abstract String keepScalarCondition(WhiteBaseOne01SeaBroadwayCQ sq); protected WhiteBaseOne01SeaBroadwayCB xcreateScalarConditionCB() { WhiteBaseOne01SeaBroadwayCB cb = newMyCB(); cb.xsetupForScalarCondition(this); return cb; } protected WhiteBaseOne01SeaBroadwayCB xcreateScalarConditionPartitionByCB() { WhiteBaseOne01SeaBroadwayCB cb = newMyCB(); cb.xsetupForScalarConditionPartitionBy(this); return cb; } // =================================================================================== // MyselfDerived // ============= public void xsmyselfDerive(String fn, SubQuery<WhiteBaseOne01SeaBroadwayCB> sq, String al, DerivedReferrerOption op) { assertObjectNotNull("subQuery", sq); WhiteBaseOne01SeaBroadwayCB cb = new WhiteBaseOne01SeaBroadwayCB(); cb.xsetupForDerivedReferrer(this); lockCall(() -> sq.query(cb)); String pp = keepSpecifyMyselfDerived(cb.query()); String pk = "BROADWAY_ID"; registerSpecifyMyselfDerived(fn, cb.query(), pk, pk, pp, "myselfDerived", al, op); } public abstract String keepSpecifyMyselfDerived(WhiteBaseOne01SeaBroadwayCQ sq); /** * Prepare for (Query)MyselfDerived (correlated sub-query). * @return The object to set up a function for myself table. (NotNull) */ public HpQDRFunction<WhiteBaseOne01SeaBroadwayCB> myselfDerived() { return xcreateQDRFunctionMyselfDerived(WhiteBaseOne01SeaBroadwayCB.class); } @SuppressWarnings("unchecked") protected <CB extends ConditionBean> void xqderiveMyselfDerived(String fn, SubQuery<CB> sq, String rd, Object vl, DerivedReferrerOption op) { assertObjectNotNull("subQuery", sq); WhiteBaseOne01SeaBroadwayCB cb = new WhiteBaseOne01SeaBroadwayCB(); cb.xsetupForDerivedReferrer(this); sq.query((CB)cb); String pk = "BROADWAY_ID"; String sqpp = keepQueryMyselfDerived(cb.query()); // for saving query-value. String prpp = keepQueryMyselfDerivedParameter(vl); registerQueryMyselfDerived(fn, cb.query(), pk, pk, sqpp, "myselfDerived", rd, vl, prpp, op); } public abstract String keepQueryMyselfDerived(WhiteBaseOne01SeaBroadwayCQ sq); public abstract String keepQueryMyselfDerivedParameter(Object vl); // =================================================================================== // MyselfExists // ============ /** * Prepare for MyselfExists (correlated sub-query). * @param subCBLambda The implementation of sub-query. (NotNull) */ public void myselfExists(SubQuery<WhiteBaseOne01SeaBroadwayCB> subCBLambda) { assertObjectNotNull("subCBLambda", subCBLambda); WhiteBaseOne01SeaBroadwayCB cb = new WhiteBaseOne01SeaBroadwayCB(); cb.xsetupForMyselfExists(this); lockCall(() -> subCBLambda.query(cb)); String pp = keepMyselfExists(cb.query()); registerMyselfExists(cb.query(), pp); } public abstract String keepMyselfExists(WhiteBaseOne01SeaBroadwayCQ sq); // =================================================================================== // Manual Order // ============ /** * Order along manual ordering information. * <pre> * cb.query().addOrderBy_Birthdate_Asc().<span style="color: #CC4747">withManualOrder</span>(<span style="color: #553000">op</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">op</span>.<span style="color: #CC4747">when_GreaterEqual</span>(priorityDate); <span style="color: #3F7E5E">// e.g. 2000/01/01</span> * }); * <span style="color: #3F7E5E">// order by </span> * <span style="color: #3F7E5E">// case</span> * <span style="color: #3F7E5E">// when BIRTHDATE &gt;= '2000/01/01' then 0</span> * <span style="color: #3F7E5E">// else 1</span> * <span style="color: #3F7E5E">// end asc, ...</span> * * cb.query().addOrderBy_MemberStatusCode_Asc().<span style="color: #CC4747">withManualOrder</span>(<span style="color: #553000">op</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>&gt;</span> { * <span style="color: #553000">op</span>.<span style="color: #CC4747">when_Equal</span>(CDef.MemberStatus.Withdrawal); * <span style="color: #553000">op</span>.<span style="color: #CC4747">when_Equal</span>(CDef.MemberStatus.Formalized); * <span style="color: #553000">op</span>.<span style="color: #CC4747">when_Equal</span>(CDef.MemberStatus.Provisional); * }); * <span style="color: #3F7E5E">// order by </span> * <span style="color: #3F7E5E">// case</span> * <span style="color: #3F7E5E">// when MEMBER_STATUS_CODE = 'WDL' then 0</span> * <span style="color: #3F7E5E">// when MEMBER_STATUS_CODE = 'FML' then 1</span> * <span style="color: #3F7E5E">// when MEMBER_STATUS_CODE = 'PRV' then 2</span> * <span style="color: #3F7E5E">// else 3</span> * <span style="color: #3F7E5E">// end asc, ...</span> * </pre> * <p>This function with Union is unsupported!</p> * <p>The order values are bound (treated as bind parameter).</p> * @param opLambda The callback for option of manual-order containing order values. (NotNull) */ public void withManualOrder(ManualOrderOptionCall opLambda) { // is user public! xdoWithManualOrder(cMOO(opLambda)); } // =================================================================================== // Small Adjustment // ================ // =================================================================================== // Very Internal // ============= protected WhiteBaseOne01SeaBroadwayCB newMyCB() { return new WhiteBaseOne01SeaBroadwayCB(); } // very internal (for suppressing warn about 'Not Use Import') protected String xabUDT() { return Date.class.getName(); } protected String xabCQ() { return WhiteBaseOne01SeaBroadwayCQ.class.getName(); } protected String xabLSO() { return LikeSearchOption.class.getName(); } protected String xabSLCS() { return HpSLCSetupper.class.getName(); } protected String xabSCP() { return SubQuery.class.getName(); } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.frauddetector.model; import java.io.Serializable; import javax.annotation.Generated; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/frauddetector-2019-11-15/GetEventPredictionMetadata" * target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class GetEventPredictionMetadataResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable { /** * <p> * The event ID. * </p> */ private String eventId; /** * <p> * The event type associated with the detector specified for this prediction. * </p> */ private String eventTypeName; /** * <p> * The entity ID. * </p> */ private String entityId; /** * <p> * The entity type. * </p> */ private String entityType; /** * <p> * The timestamp for when the prediction was generated for the associated event ID. * </p> */ private String eventTimestamp; /** * <p> * The detector ID. * </p> */ private String detectorId; /** * <p> * The detector version ID. * </p> */ private String detectorVersionId; /** * <p> * The status of the detector version. * </p> */ private String detectorVersionStatus; /** * <p> * A list of event variables that influenced the prediction scores. * </p> */ private java.util.List<EventVariableSummary> eventVariables; /** * <p> * List of rules associated with the detector version that were used for evaluating variable values. * </p> */ private java.util.List<EvaluatedRule> rules; /** * <p> * The execution mode of the rule used for evaluating variable values. * </p> */ private String ruleExecutionMode; /** * <p> * The outcomes of the matched rule, based on the rule execution mode. * </p> */ private java.util.List<String> outcomes; /** * <p> * Model versions that were evaluated for generating predictions. * </p> */ private java.util.List<EvaluatedModelVersion> evaluatedModelVersions; /** * <p> * External (Amazon SageMaker) models that were evaluated for generating predictions. * </p> */ private java.util.List<EvaluatedExternalModel> evaluatedExternalModels; /** * <p> * The timestamp that defines when the prediction was generated. * </p> */ private String predictionTimestamp; /** * <p> * The event ID. * </p> * * @param eventId * The event ID. */ public void setEventId(String eventId) { this.eventId = eventId; } /** * <p> * The event ID. * </p> * * @return The event ID. */ public String getEventId() { return this.eventId; } /** * <p> * The event ID. * </p> * * @param eventId * The event ID. * @return Returns a reference to this object so that method calls can be chained together. */ public GetEventPredictionMetadataResult withEventId(String eventId) { setEventId(eventId); return this; } /** * <p> * The event type associated with the detector specified for this prediction. * </p> * * @param eventTypeName * The event type associated with the detector specified for this prediction. */ public void setEventTypeName(String eventTypeName) { this.eventTypeName = eventTypeName; } /** * <p> * The event type associated with the detector specified for this prediction. * </p> * * @return The event type associated with the detector specified for this prediction. */ public String getEventTypeName() { return this.eventTypeName; } /** * <p> * The event type associated with the detector specified for this prediction. * </p> * * @param eventTypeName * The event type associated with the detector specified for this prediction. * @return Returns a reference to this object so that method calls can be chained together. */ public GetEventPredictionMetadataResult withEventTypeName(String eventTypeName) { setEventTypeName(eventTypeName); return this; } /** * <p> * The entity ID. * </p> * * @param entityId * The entity ID. */ public void setEntityId(String entityId) { this.entityId = entityId; } /** * <p> * The entity ID. * </p> * * @return The entity ID. */ public String getEntityId() { return this.entityId; } /** * <p> * The entity ID. * </p> * * @param entityId * The entity ID. * @return Returns a reference to this object so that method calls can be chained together. */ public GetEventPredictionMetadataResult withEntityId(String entityId) { setEntityId(entityId); return this; } /** * <p> * The entity type. * </p> * * @param entityType * The entity type. */ public void setEntityType(String entityType) { this.entityType = entityType; } /** * <p> * The entity type. * </p> * * @return The entity type. */ public String getEntityType() { return this.entityType; } /** * <p> * The entity type. * </p> * * @param entityType * The entity type. * @return Returns a reference to this object so that method calls can be chained together. */ public GetEventPredictionMetadataResult withEntityType(String entityType) { setEntityType(entityType); return this; } /** * <p> * The timestamp for when the prediction was generated for the associated event ID. * </p> * * @param eventTimestamp * The timestamp for when the prediction was generated for the associated event ID. */ public void setEventTimestamp(String eventTimestamp) { this.eventTimestamp = eventTimestamp; } /** * <p> * The timestamp for when the prediction was generated for the associated event ID. * </p> * * @return The timestamp for when the prediction was generated for the associated event ID. */ public String getEventTimestamp() { return this.eventTimestamp; } /** * <p> * The timestamp for when the prediction was generated for the associated event ID. * </p> * * @param eventTimestamp * The timestamp for when the prediction was generated for the associated event ID. * @return Returns a reference to this object so that method calls can be chained together. */ public GetEventPredictionMetadataResult withEventTimestamp(String eventTimestamp) { setEventTimestamp(eventTimestamp); return this; } /** * <p> * The detector ID. * </p> * * @param detectorId * The detector ID. */ public void setDetectorId(String detectorId) { this.detectorId = detectorId; } /** * <p> * The detector ID. * </p> * * @return The detector ID. */ public String getDetectorId() { return this.detectorId; } /** * <p> * The detector ID. * </p> * * @param detectorId * The detector ID. * @return Returns a reference to this object so that method calls can be chained together. */ public GetEventPredictionMetadataResult withDetectorId(String detectorId) { setDetectorId(detectorId); return this; } /** * <p> * The detector version ID. * </p> * * @param detectorVersionId * The detector version ID. */ public void setDetectorVersionId(String detectorVersionId) { this.detectorVersionId = detectorVersionId; } /** * <p> * The detector version ID. * </p> * * @return The detector version ID. */ public String getDetectorVersionId() { return this.detectorVersionId; } /** * <p> * The detector version ID. * </p> * * @param detectorVersionId * The detector version ID. * @return Returns a reference to this object so that method calls can be chained together. */ public GetEventPredictionMetadataResult withDetectorVersionId(String detectorVersionId) { setDetectorVersionId(detectorVersionId); return this; } /** * <p> * The status of the detector version. * </p> * * @param detectorVersionStatus * The status of the detector version. */ public void setDetectorVersionStatus(String detectorVersionStatus) { this.detectorVersionStatus = detectorVersionStatus; } /** * <p> * The status of the detector version. * </p> * * @return The status of the detector version. */ public String getDetectorVersionStatus() { return this.detectorVersionStatus; } /** * <p> * The status of the detector version. * </p> * * @param detectorVersionStatus * The status of the detector version. * @return Returns a reference to this object so that method calls can be chained together. */ public GetEventPredictionMetadataResult withDetectorVersionStatus(String detectorVersionStatus) { setDetectorVersionStatus(detectorVersionStatus); return this; } /** * <p> * A list of event variables that influenced the prediction scores. * </p> * * @return A list of event variables that influenced the prediction scores. */ public java.util.List<EventVariableSummary> getEventVariables() { return eventVariables; } /** * <p> * A list of event variables that influenced the prediction scores. * </p> * * @param eventVariables * A list of event variables that influenced the prediction scores. */ public void setEventVariables(java.util.Collection<EventVariableSummary> eventVariables) { if (eventVariables == null) { this.eventVariables = null; return; } this.eventVariables = new java.util.ArrayList<EventVariableSummary>(eventVariables); } /** * <p> * A list of event variables that influenced the prediction scores. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setEventVariables(java.util.Collection)} or {@link #withEventVariables(java.util.Collection)} if you want * to override the existing values. * </p> * * @param eventVariables * A list of event variables that influenced the prediction scores. * @return Returns a reference to this object so that method calls can be chained together. */ public GetEventPredictionMetadataResult withEventVariables(EventVariableSummary... eventVariables) { if (this.eventVariables == null) { setEventVariables(new java.util.ArrayList<EventVariableSummary>(eventVariables.length)); } for (EventVariableSummary ele : eventVariables) { this.eventVariables.add(ele); } return this; } /** * <p> * A list of event variables that influenced the prediction scores. * </p> * * @param eventVariables * A list of event variables that influenced the prediction scores. * @return Returns a reference to this object so that method calls can be chained together. */ public GetEventPredictionMetadataResult withEventVariables(java.util.Collection<EventVariableSummary> eventVariables) { setEventVariables(eventVariables); return this; } /** * <p> * List of rules associated with the detector version that were used for evaluating variable values. * </p> * * @return List of rules associated with the detector version that were used for evaluating variable values. */ public java.util.List<EvaluatedRule> getRules() { return rules; } /** * <p> * List of rules associated with the detector version that were used for evaluating variable values. * </p> * * @param rules * List of rules associated with the detector version that were used for evaluating variable values. */ public void setRules(java.util.Collection<EvaluatedRule> rules) { if (rules == null) { this.rules = null; return; } this.rules = new java.util.ArrayList<EvaluatedRule>(rules); } /** * <p> * List of rules associated with the detector version that were used for evaluating variable values. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setRules(java.util.Collection)} or {@link #withRules(java.util.Collection)} if you want to override the * existing values. * </p> * * @param rules * List of rules associated with the detector version that were used for evaluating variable values. * @return Returns a reference to this object so that method calls can be chained together. */ public GetEventPredictionMetadataResult withRules(EvaluatedRule... rules) { if (this.rules == null) { setRules(new java.util.ArrayList<EvaluatedRule>(rules.length)); } for (EvaluatedRule ele : rules) { this.rules.add(ele); } return this; } /** * <p> * List of rules associated with the detector version that were used for evaluating variable values. * </p> * * @param rules * List of rules associated with the detector version that were used for evaluating variable values. * @return Returns a reference to this object so that method calls can be chained together. */ public GetEventPredictionMetadataResult withRules(java.util.Collection<EvaluatedRule> rules) { setRules(rules); return this; } /** * <p> * The execution mode of the rule used for evaluating variable values. * </p> * * @param ruleExecutionMode * The execution mode of the rule used for evaluating variable values. * @see RuleExecutionMode */ public void setRuleExecutionMode(String ruleExecutionMode) { this.ruleExecutionMode = ruleExecutionMode; } /** * <p> * The execution mode of the rule used for evaluating variable values. * </p> * * @return The execution mode of the rule used for evaluating variable values. * @see RuleExecutionMode */ public String getRuleExecutionMode() { return this.ruleExecutionMode; } /** * <p> * The execution mode of the rule used for evaluating variable values. * </p> * * @param ruleExecutionMode * The execution mode of the rule used for evaluating variable values. * @return Returns a reference to this object so that method calls can be chained together. * @see RuleExecutionMode */ public GetEventPredictionMetadataResult withRuleExecutionMode(String ruleExecutionMode) { setRuleExecutionMode(ruleExecutionMode); return this; } /** * <p> * The execution mode of the rule used for evaluating variable values. * </p> * * @param ruleExecutionMode * The execution mode of the rule used for evaluating variable values. * @return Returns a reference to this object so that method calls can be chained together. * @see RuleExecutionMode */ public GetEventPredictionMetadataResult withRuleExecutionMode(RuleExecutionMode ruleExecutionMode) { this.ruleExecutionMode = ruleExecutionMode.toString(); return this; } /** * <p> * The outcomes of the matched rule, based on the rule execution mode. * </p> * * @return The outcomes of the matched rule, based on the rule execution mode. */ public java.util.List<String> getOutcomes() { return outcomes; } /** * <p> * The outcomes of the matched rule, based on the rule execution mode. * </p> * * @param outcomes * The outcomes of the matched rule, based on the rule execution mode. */ public void setOutcomes(java.util.Collection<String> outcomes) { if (outcomes == null) { this.outcomes = null; return; } this.outcomes = new java.util.ArrayList<String>(outcomes); } /** * <p> * The outcomes of the matched rule, based on the rule execution mode. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setOutcomes(java.util.Collection)} or {@link #withOutcomes(java.util.Collection)} if you want to override * the existing values. * </p> * * @param outcomes * The outcomes of the matched rule, based on the rule execution mode. * @return Returns a reference to this object so that method calls can be chained together. */ public GetEventPredictionMetadataResult withOutcomes(String... outcomes) { if (this.outcomes == null) { setOutcomes(new java.util.ArrayList<String>(outcomes.length)); } for (String ele : outcomes) { this.outcomes.add(ele); } return this; } /** * <p> * The outcomes of the matched rule, based on the rule execution mode. * </p> * * @param outcomes * The outcomes of the matched rule, based on the rule execution mode. * @return Returns a reference to this object so that method calls can be chained together. */ public GetEventPredictionMetadataResult withOutcomes(java.util.Collection<String> outcomes) { setOutcomes(outcomes); return this; } /** * <p> * Model versions that were evaluated for generating predictions. * </p> * * @return Model versions that were evaluated for generating predictions. */ public java.util.List<EvaluatedModelVersion> getEvaluatedModelVersions() { return evaluatedModelVersions; } /** * <p> * Model versions that were evaluated for generating predictions. * </p> * * @param evaluatedModelVersions * Model versions that were evaluated for generating predictions. */ public void setEvaluatedModelVersions(java.util.Collection<EvaluatedModelVersion> evaluatedModelVersions) { if (evaluatedModelVersions == null) { this.evaluatedModelVersions = null; return; } this.evaluatedModelVersions = new java.util.ArrayList<EvaluatedModelVersion>(evaluatedModelVersions); } /** * <p> * Model versions that were evaluated for generating predictions. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setEvaluatedModelVersions(java.util.Collection)} or * {@link #withEvaluatedModelVersions(java.util.Collection)} if you want to override the existing values. * </p> * * @param evaluatedModelVersions * Model versions that were evaluated for generating predictions. * @return Returns a reference to this object so that method calls can be chained together. */ public GetEventPredictionMetadataResult withEvaluatedModelVersions(EvaluatedModelVersion... evaluatedModelVersions) { if (this.evaluatedModelVersions == null) { setEvaluatedModelVersions(new java.util.ArrayList<EvaluatedModelVersion>(evaluatedModelVersions.length)); } for (EvaluatedModelVersion ele : evaluatedModelVersions) { this.evaluatedModelVersions.add(ele); } return this; } /** * <p> * Model versions that were evaluated for generating predictions. * </p> * * @param evaluatedModelVersions * Model versions that were evaluated for generating predictions. * @return Returns a reference to this object so that method calls can be chained together. */ public GetEventPredictionMetadataResult withEvaluatedModelVersions(java.util.Collection<EvaluatedModelVersion> evaluatedModelVersions) { setEvaluatedModelVersions(evaluatedModelVersions); return this; } /** * <p> * External (Amazon SageMaker) models that were evaluated for generating predictions. * </p> * * @return External (Amazon SageMaker) models that were evaluated for generating predictions. */ public java.util.List<EvaluatedExternalModel> getEvaluatedExternalModels() { return evaluatedExternalModels; } /** * <p> * External (Amazon SageMaker) models that were evaluated for generating predictions. * </p> * * @param evaluatedExternalModels * External (Amazon SageMaker) models that were evaluated for generating predictions. */ public void setEvaluatedExternalModels(java.util.Collection<EvaluatedExternalModel> evaluatedExternalModels) { if (evaluatedExternalModels == null) { this.evaluatedExternalModels = null; return; } this.evaluatedExternalModels = new java.util.ArrayList<EvaluatedExternalModel>(evaluatedExternalModels); } /** * <p> * External (Amazon SageMaker) models that were evaluated for generating predictions. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setEvaluatedExternalModels(java.util.Collection)} or * {@link #withEvaluatedExternalModels(java.util.Collection)} if you want to override the existing values. * </p> * * @param evaluatedExternalModels * External (Amazon SageMaker) models that were evaluated for generating predictions. * @return Returns a reference to this object so that method calls can be chained together. */ public GetEventPredictionMetadataResult withEvaluatedExternalModels(EvaluatedExternalModel... evaluatedExternalModels) { if (this.evaluatedExternalModels == null) { setEvaluatedExternalModels(new java.util.ArrayList<EvaluatedExternalModel>(evaluatedExternalModels.length)); } for (EvaluatedExternalModel ele : evaluatedExternalModels) { this.evaluatedExternalModels.add(ele); } return this; } /** * <p> * External (Amazon SageMaker) models that were evaluated for generating predictions. * </p> * * @param evaluatedExternalModels * External (Amazon SageMaker) models that were evaluated for generating predictions. * @return Returns a reference to this object so that method calls can be chained together. */ public GetEventPredictionMetadataResult withEvaluatedExternalModels(java.util.Collection<EvaluatedExternalModel> evaluatedExternalModels) { setEvaluatedExternalModels(evaluatedExternalModels); return this; } /** * <p> * The timestamp that defines when the prediction was generated. * </p> * * @param predictionTimestamp * The timestamp that defines when the prediction was generated. */ public void setPredictionTimestamp(String predictionTimestamp) { this.predictionTimestamp = predictionTimestamp; } /** * <p> * The timestamp that defines when the prediction was generated. * </p> * * @return The timestamp that defines when the prediction was generated. */ public String getPredictionTimestamp() { return this.predictionTimestamp; } /** * <p> * The timestamp that defines when the prediction was generated. * </p> * * @param predictionTimestamp * The timestamp that defines when the prediction was generated. * @return Returns a reference to this object so that method calls can be chained together. */ public GetEventPredictionMetadataResult withPredictionTimestamp(String predictionTimestamp) { setPredictionTimestamp(predictionTimestamp); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getEventId() != null) sb.append("EventId: ").append(getEventId()).append(","); if (getEventTypeName() != null) sb.append("EventTypeName: ").append(getEventTypeName()).append(","); if (getEntityId() != null) sb.append("EntityId: ").append(getEntityId()).append(","); if (getEntityType() != null) sb.append("EntityType: ").append(getEntityType()).append(","); if (getEventTimestamp() != null) sb.append("EventTimestamp: ").append(getEventTimestamp()).append(","); if (getDetectorId() != null) sb.append("DetectorId: ").append(getDetectorId()).append(","); if (getDetectorVersionId() != null) sb.append("DetectorVersionId: ").append(getDetectorVersionId()).append(","); if (getDetectorVersionStatus() != null) sb.append("DetectorVersionStatus: ").append(getDetectorVersionStatus()).append(","); if (getEventVariables() != null) sb.append("EventVariables: ").append(getEventVariables()).append(","); if (getRules() != null) sb.append("Rules: ").append(getRules()).append(","); if (getRuleExecutionMode() != null) sb.append("RuleExecutionMode: ").append(getRuleExecutionMode()).append(","); if (getOutcomes() != null) sb.append("Outcomes: ").append(getOutcomes()).append(","); if (getEvaluatedModelVersions() != null) sb.append("EvaluatedModelVersions: ").append(getEvaluatedModelVersions()).append(","); if (getEvaluatedExternalModels() != null) sb.append("EvaluatedExternalModels: ").append(getEvaluatedExternalModels()).append(","); if (getPredictionTimestamp() != null) sb.append("PredictionTimestamp: ").append(getPredictionTimestamp()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof GetEventPredictionMetadataResult == false) return false; GetEventPredictionMetadataResult other = (GetEventPredictionMetadataResult) obj; if (other.getEventId() == null ^ this.getEventId() == null) return false; if (other.getEventId() != null && other.getEventId().equals(this.getEventId()) == false) return false; if (other.getEventTypeName() == null ^ this.getEventTypeName() == null) return false; if (other.getEventTypeName() != null && other.getEventTypeName().equals(this.getEventTypeName()) == false) return false; if (other.getEntityId() == null ^ this.getEntityId() == null) return false; if (other.getEntityId() != null && other.getEntityId().equals(this.getEntityId()) == false) return false; if (other.getEntityType() == null ^ this.getEntityType() == null) return false; if (other.getEntityType() != null && other.getEntityType().equals(this.getEntityType()) == false) return false; if (other.getEventTimestamp() == null ^ this.getEventTimestamp() == null) return false; if (other.getEventTimestamp() != null && other.getEventTimestamp().equals(this.getEventTimestamp()) == false) return false; if (other.getDetectorId() == null ^ this.getDetectorId() == null) return false; if (other.getDetectorId() != null && other.getDetectorId().equals(this.getDetectorId()) == false) return false; if (other.getDetectorVersionId() == null ^ this.getDetectorVersionId() == null) return false; if (other.getDetectorVersionId() != null && other.getDetectorVersionId().equals(this.getDetectorVersionId()) == false) return false; if (other.getDetectorVersionStatus() == null ^ this.getDetectorVersionStatus() == null) return false; if (other.getDetectorVersionStatus() != null && other.getDetectorVersionStatus().equals(this.getDetectorVersionStatus()) == false) return false; if (other.getEventVariables() == null ^ this.getEventVariables() == null) return false; if (other.getEventVariables() != null && other.getEventVariables().equals(this.getEventVariables()) == false) return false; if (other.getRules() == null ^ this.getRules() == null) return false; if (other.getRules() != null && other.getRules().equals(this.getRules()) == false) return false; if (other.getRuleExecutionMode() == null ^ this.getRuleExecutionMode() == null) return false; if (other.getRuleExecutionMode() != null && other.getRuleExecutionMode().equals(this.getRuleExecutionMode()) == false) return false; if (other.getOutcomes() == null ^ this.getOutcomes() == null) return false; if (other.getOutcomes() != null && other.getOutcomes().equals(this.getOutcomes()) == false) return false; if (other.getEvaluatedModelVersions() == null ^ this.getEvaluatedModelVersions() == null) return false; if (other.getEvaluatedModelVersions() != null && other.getEvaluatedModelVersions().equals(this.getEvaluatedModelVersions()) == false) return false; if (other.getEvaluatedExternalModels() == null ^ this.getEvaluatedExternalModels() == null) return false; if (other.getEvaluatedExternalModels() != null && other.getEvaluatedExternalModels().equals(this.getEvaluatedExternalModels()) == false) return false; if (other.getPredictionTimestamp() == null ^ this.getPredictionTimestamp() == null) return false; if (other.getPredictionTimestamp() != null && other.getPredictionTimestamp().equals(this.getPredictionTimestamp()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getEventId() == null) ? 0 : getEventId().hashCode()); hashCode = prime * hashCode + ((getEventTypeName() == null) ? 0 : getEventTypeName().hashCode()); hashCode = prime * hashCode + ((getEntityId() == null) ? 0 : getEntityId().hashCode()); hashCode = prime * hashCode + ((getEntityType() == null) ? 0 : getEntityType().hashCode()); hashCode = prime * hashCode + ((getEventTimestamp() == null) ? 0 : getEventTimestamp().hashCode()); hashCode = prime * hashCode + ((getDetectorId() == null) ? 0 : getDetectorId().hashCode()); hashCode = prime * hashCode + ((getDetectorVersionId() == null) ? 0 : getDetectorVersionId().hashCode()); hashCode = prime * hashCode + ((getDetectorVersionStatus() == null) ? 0 : getDetectorVersionStatus().hashCode()); hashCode = prime * hashCode + ((getEventVariables() == null) ? 0 : getEventVariables().hashCode()); hashCode = prime * hashCode + ((getRules() == null) ? 0 : getRules().hashCode()); hashCode = prime * hashCode + ((getRuleExecutionMode() == null) ? 0 : getRuleExecutionMode().hashCode()); hashCode = prime * hashCode + ((getOutcomes() == null) ? 0 : getOutcomes().hashCode()); hashCode = prime * hashCode + ((getEvaluatedModelVersions() == null) ? 0 : getEvaluatedModelVersions().hashCode()); hashCode = prime * hashCode + ((getEvaluatedExternalModels() == null) ? 0 : getEvaluatedExternalModels().hashCode()); hashCode = prime * hashCode + ((getPredictionTimestamp() == null) ? 0 : getPredictionTimestamp().hashCode()); return hashCode; } @Override public GetEventPredictionMetadataResult clone() { try { return (GetEventPredictionMetadataResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/** * Licensed to the Austrian Association for Software Tool Integration (AASTI) * under one or more contributor license agreements. See the NOTICE file * distributed with this work for additional information regarding copyright * ownership. The AASTI licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.openengsb.itests.exam; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertThat; import java.util.Dictionary; import java.util.HashMap; import java.util.Hashtable; import java.util.Map; import java.util.concurrent.atomic.AtomicReference; import javax.inject.Inject; import org.apache.karaf.features.FeaturesService; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; import org.junit.runner.RunWith; import org.openengsb.core.api.AliveState; import org.openengsb.core.api.Event; import org.openengsb.core.api.EventSupport; import org.openengsb.core.api.context.ContextHolder; import org.openengsb.core.api.model.ConnectorDescription; import org.openengsb.core.common.AbstractOpenEngSBService; import org.openengsb.core.workflow.api.RuleManager; import org.openengsb.core.workflow.api.WorkflowService; import org.openengsb.core.workflow.api.model.RuleBaseElementId; import org.openengsb.core.workflow.api.model.RuleBaseElementType; import org.openengsb.domain.example.ExampleDomain; import org.openengsb.domain.example.event.LogEvent; import org.openengsb.domain.example.model.ExampleRequestModel; import org.openengsb.domain.example.model.ExampleResponseModel; import org.openengsb.itests.remoteclient.ExampleConnector; import org.openengsb.itests.remoteclient.SecureSampleConnector; import org.openengsb.itests.util.AbstractPreConfiguredExamTestHelper; import org.ops4j.pax.exam.junit.PaxExam; import org.ops4j.pax.exam.spi.reactors.ExamReactorStrategy; import org.ops4j.pax.exam.spi.reactors.PerMethod; @RunWith(PaxExam.class) // This one will run each test in it's own container (slower speed) @ExamReactorStrategy(PerMethod.class) public class WorkflowIT extends AbstractPreConfiguredExamTestHelper { private DummyLogDomain exampleMock; @Inject private FeaturesService featuresService; public static class DummyLogDomain extends AbstractOpenEngSBService implements ExampleDomain, EventSupport { private boolean wasCalled = false; private Event lastEvent; @Override public String doSomethingWithMessage(String message) { wasCalled = true; return "something"; } @Override public AliveState getAliveState() { return AliveState.OFFLINE; } @Override public String doSomethingWithLogEvent(LogEvent event) { wasCalled = true; return "something"; } public boolean isWasCalled() { return wasCalled; } @Override public ExampleResponseModel doSomethingWithModel(ExampleRequestModel model) { wasCalled = true; return new ExampleResponseModel(); } @Override public void onEvent(Event event) { lastEvent = event; } } @Before public void setUp() throws Exception { exampleMock = new DummyLogDomain(); Dictionary<String, Object> properties = new Hashtable<String, Object>(); properties.put("domain", "example"); properties.put("connector", "example"); properties.put("location.foo", "example2"); properties.put(org.osgi.framework.Constants.SERVICE_PID, "example2"); getBundleContext().registerService(new String[]{ ExampleDomain.class.getName(), EventSupport.class.getName() }, exampleMock, properties); } @Test public void testCreateRuleAndTriggerDomain_shouldTriggerDomain() throws Exception { RuleManager ruleManager = getOsgiService(RuleManager.class); ruleManager.addImport(ExampleDomain.class.getName()); ruleManager.addImport(LogEvent.class.getName()); ruleManager.addGlobal(ExampleDomain.class.getName(), "example2"); ruleManager.add(new RuleBaseElementId(RuleBaseElementType.Rule, "example-trigger"), "" + "when\n" + " l : LogEvent()\n" + "then\n" + " example2.doSomethingWithMessage(\"42\");\n" ); ContextHolder.get().setCurrentContextId("foo"); WorkflowService workflowService = getOsgiService(WorkflowService.class); authenticate("admin", "password"); workflowService.processEvent(new LogEvent()); assertThat(exampleMock.wasCalled, is(true)); } @Test public void testCreateAndTriggerResponseRule_shouldCallOrigin() throws Exception { RuleManager ruleManager = getOsgiService(RuleManager.class); ruleManager.addImport(ExampleDomain.class.getName()); ruleManager.addImport(LogEvent.class.getName()); ruleManager.addGlobal(ExampleDomain.class.getName(), "example2"); ruleManager.add(new RuleBaseElementId(RuleBaseElementType.Rule, "example-response"), "" + "when\n" + " l : LogEvent()\n" + "then\n" + " ExampleDomain origin = (ExampleDomain) OsgiHelper.getResponseProxy(l, ExampleDomain.class);" + " origin.doSomethingWithMessage(\"42\");" ); ContextHolder.get().setCurrentContextId("foo"); WorkflowService workflowService = getOsgiService(WorkflowService.class); LogEvent event = new LogEvent(); event.setOrigin("example2"); authenticateAsAdmin(); workflowService.processEvent(event); assertThat(exampleMock.wasCalled, is(true)); } @Test @Ignore public void testRaiseEvent_shouldForwardToConnector() throws Exception { ContextHolder.get().setCurrentContextId("foo"); WorkflowService workflowService = getOsgiService(WorkflowService.class); authenticateAsAdmin(); Event event = new Event(); workflowService.processEvent(event); assertThat(exampleMock.lastEvent, equalTo(event)); } @Test public void testRaiseEvent_shouldForwardToRemoteConnector() throws Exception { featuresService.installFeature("openengsb-ports-jms"); String openwirePort = getConfigProperty("org.openengsb.infrastructure.jms", "openwire"); SecureSampleConnector remoteConnector = new SecureSampleConnector(openwirePort); final AtomicReference<Event> eventRef = new AtomicReference<Event>(); Map<String, String> attributes = new HashMap<String, String>(); Map<String, Object> properties = new HashMap<String, Object>(); attributes.put("mixin.1", EventSupport.class.getName()); remoteConnector.start(new MyExampleConnector(eventRef), new ConnectorDescription("example", "external-connector-proxy", attributes, properties)); WorkflowService workflowService = getOsgiService(WorkflowService.class); Event event = new Event("test"); ContextHolder.get().setCurrentContextId("foo"); authenticateAsAdmin(); workflowService.processEvent(event); assertThat(eventRef.get().getName(), equalTo("test")); } public static class MyExampleConnector extends ExampleConnector { private final AtomicReference<Event> eventRef; public MyExampleConnector(AtomicReference<Event> eventRef) { this.eventRef = eventRef; } @Override public void onEvent(Event event) { eventRef.set(event); } } }
package com.kotcrab.vis.usl; import com.kotcrab.vis.usl.Token.Type; import java.io.File; import java.io.InputStream; import java.util.List; import java.util.Scanner; import java.util.regex.Pattern; /** USL Lexer that turns USL file intro stream of tokens */ public class Lexer { private static final String INCLUDE = "include"; private static final String EXTENDS = "extends"; private static final String INHERITS = "inherits"; private static final String PACKAGE = "package"; private static final Pattern globalStyleRegex = Pattern.compile("^\\.[a-zA-Z0-9-_]+:.*$", Pattern.DOTALL); private static final Pattern metaStyleRegex = Pattern.compile("^-[a-zA-Z0-9-_ ]+:.*$", Pattern.DOTALL); static void lexUsl (LexerContext ctx, String usl) { for (int i = 0; i < usl.length(); ) { char ch = usl.charAt(i); if (Character.isWhitespace(ch)) { //white space i++; } else if (usl.startsWith("//", i)) { //line comment i = skipLineComment(usl, i); } else if (usl.startsWith(INCLUDE + " ", i)) { //include <type> <path> directive i = parseAndLexInclude(ctx, usl, i + INCLUDE.length() + 1); } else if (usl.startsWith(PACKAGE + " ", i)) { //package <path> directive i = lexPackage(ctx, usl, i + PACKAGE.length() + 1); } else if (ch == '#') { //block style definition i = lexStyleBlock(ctx, usl, i + 1); } else if (ch == '^') { //block style override block definition i = lexStyleBlockOverride(ctx, usl, i + 1); } else if (ch == '.') { //global block style definition if (globalStyleRegex.matcher(usl.substring(i)).matches() == false) Utils.throwException("Unexpected '.' or invalid global style block declaration", usl, i); i = leaGlobalStyleDeclaration(ctx, usl, i + 1); } else if (ch == '-') { //meta style definition if (metaStyleRegex.matcher(usl.substring(i)).matches() == false) Utils.throwException("Unexpected '-'", usl, i); //put meta token and continue, lexIdentifier will be called in next loop ctx.tokens.add(new Token(usl, i, Type.META_STYLE)); i++; } else if (ch == '{') { ctx.curliesLevel++; ctx.tokens.add(new Token(usl, i, Type.LCURL)); i++; } else if (ch == '}') { ctx.curliesLevel--; if (ctx.curliesLevel < 0) Utils.throwException("Unexpected '}'", usl, i); ctx.tokens.add(new Token(usl, i, Type.RCURL)); i++; } else if (ch == ',') { Utils.throwException("Unexpected ','", usl, i); } else if (peek(ctx.tokens).type == Type.IDENTIFIER) { //identifier content: someName: content i = lexIdentifierContent(ctx, usl, i); } else if (checkIdentifierDef(usl, i)) { // identifier: someName: content i = lexIdentifier(ctx, usl, i); } else { Utils.throwException("Unrecognized symbol '" + usl.substring(i, usl.indexOf(" ", i)) + "'", usl, i); } } } private static int lexPackage (LexerContext ctx, String usl, int i) { int curlyIndex = usl.indexOf('{', i); String packageName = usl.substring(i, curlyIndex); ctx.tokens.add(new Token(usl, i, Type.PACKAGE, packageName.replace(" ", ""))); return curlyIndex - 1; } private static int lexIdentifier (LexerContext ctx, String usl, int i) { int idDefEnd = usl.indexOf(":", i); String idDef = usl.substring(i, idDefEnd); if (idDef.contains(" ")) { //blocks definition contains inherits if (idDef.contains(INHERITS) == false) Utils.throwException("Expected inherits", usl, i); String parts[] = idDef.split(" ", 3); if (parts.length != 3) Utils.throwException("Invalid inherits format", usl, i); ctx.tokens.add(new Token(usl, i, Type.IDENTIFIER, parts[0])); ctx.tokens.add(new Token(usl, i, Type.INHERITS)); lexInherits(usl, i, ctx, parts[2]); } else { ctx.tokens.add(new Token(usl, i, Type.IDENTIFIER, idDef)); } return idDefEnd + 1; //+1 for : at the end of id definition } private static int lexIdentifierContent (LexerContext ctx, String usl, int i) { int commaIndex = usl.indexOf(',', i); int curlyIndex = usl.indexOf('}', i); if (commaIndex == -1) commaIndex = Integer.MAX_VALUE; if (curlyIndex == -1) curlyIndex = Integer.MAX_VALUE; int end = Math.min(commaIndex, curlyIndex); if (end == -1) Utils.throwException("Identifier content end could not be found", usl, i); String content = usl.substring(i, end); int origLength = content.length(); if (content.endsWith(" ")) content = content.substring(0, content.length() - 1); ctx.tokens.add(new Token(usl, i, Type.IDENTIFIER_CONTENT, content)); i = i + origLength; if (usl.charAt(i) == ',') i++; return i; } private static int leaGlobalStyleDeclaration (LexerContext ctx, String usl, int i) { int end = usl.indexOf(':', i); if (end == -1) Utils.throwException("Global style definition end could not be found", usl, i); String declaration = usl.substring(i, end); if (declaration.contains(" ")) { //global style definition contains inherits if (declaration.contains(INHERITS) == false) Utils.throwException("Expected inherits", usl, i); String parts[] = declaration.split(" ", 3); if (parts.length != 3) Utils.throwException("Invalid inherits format", usl, i); ctx.tokens.add(new Token(usl, i, Type.GLOBAL_STYLE, parts[0])); ctx.tokens.add(new Token(usl, i, Type.INHERITS)); lexInherits(usl, i, ctx, parts[2]); } else { ctx.tokens.add(new Token(usl, i, Type.GLOBAL_STYLE, declaration)); } return end + 1; } private static void lexInherits (String usl, int i, LexerContext ctx, String inheritString) { inheritString = inheritString.replace(" ", ""); String[] inherits = inheritString.split(","); for (String inherit : inherits) ctx.tokens.add(new Token(usl, i, Type.INHERITS_NAME, inherit)); } private static boolean checkIdentifierDef (String usl, int i) { //checks if this is possible identifier definition int colonIndex = usl.indexOf(':', i); int spaceIndex = usl.indexOf(' ', i); if (colonIndex == -1) return false; if (colonIndex < spaceIndex) return true; else { if (usl.substring(spaceIndex + 1, colonIndex).startsWith(INHERITS)) return true; } return false; } private static int lexStyleBlock (LexerContext ctx, String usl, int i) { int blockDefEnd = usl.indexOf(":", i); if (blockDefEnd == -1) Utils.throwException("Expected block definition end", usl, i); String blockDef = usl.substring(i, blockDefEnd); if (blockDef.contains(" ")) { //blocks definition contains extends if (blockDef.contains(EXTENDS) == false) Utils.throwException("Expected extends", usl, i); String parts[] = blockDef.split(" "); if (parts.length != 3) Utils.throwException("Invalid extends format", usl, i); ctx.tokens.add(new Token(usl, i, Type.STYLE_BLOCK, parts[0])); ctx.tokens.add(new Token(usl, i, Type.STYLE_BLOCK_EXTENDS, parts[2])); } else ctx.tokens.add(new Token(usl, i, Type.STYLE_BLOCK, blockDef)); return blockDefEnd + 1; //+1 for : at the end of style definition } private static int lexStyleBlockOverride (LexerContext ctx, String usl, int i) { int blockDefEnd = usl.indexOf(":", i); if (blockDefEnd == -1) Utils.throwException("Expected block definition end", usl, i); String blockDef = usl.substring(i, blockDefEnd); if (blockDef.contains(" ")) { if (blockDef.contains("extends")) Utils.throwException("Override style block cannot extend other style", usl, i); else Utils.throwException("Invalid block definition", usl, i); } else ctx.tokens.add(new Token(usl, i, Type.STYLE_BLOCK_OVERRIDE, blockDef)); return blockDefEnd + 1; //+1 for : at the end of style definition } private static int parseAndLexInclude (LexerContext ctx, String usl, int i) { if (usl.startsWith("<", i)) { int includeEnd = usl.indexOf(">", i); if (includeEnd == -1) Utils.throwException("Invalid include format", usl, i); String content = null; String includeName = usl.substring(i + 1, includeEnd); if (includeName.equals("gdx")) content = streamToString(USL.class.getResourceAsStream("gdx.usl")); else if (includeName.equals("visui-0.7.7")) content = streamToString(USL.class.getResourceAsStream("visui-0.7.7.usl")); else if (includeName.equals("visui-0.8.0")) content = streamToString(USL.class.getResourceAsStream("visui-0.8.0.usl")); else if (includeName.equals("visui-0.8.1")) content = streamToString(USL.class.getResourceAsStream("visui-0.8.1.usl")); else if (includeName.equals("visui-0.8.2")) content = streamToString(USL.class.getResourceAsStream("visui-0.8.2.usl")); else if (includeName.equals("visui-0.9.0")) content = streamToString(USL.class.getResourceAsStream("visui-0.9.0.usl")); else if (includeName.equals("visui-0.9.1") || includeName.equals("visui")) content = streamToString(USL.class.getResourceAsStream("visui-0.9.1.usl")); else if (includeName.equals("visui-0.9.2")) //snapshot content = streamToString(USL.class.getResourceAsStream("visui-0.9.2.usl")); else Utils.throwException("Invalid internal include file: " + includeName, usl, i); lexUsl(ctx, content); return includeEnd + 1; } else if (usl.startsWith("\"", i)) { int includeEnd = usl.indexOf("\"", i + 1); if (includeEnd == -1) Utils.throwException("Invalid include format", usl, i); String path = usl.substring(i + 1, includeEnd); File file = new File(path); if (file.exists() == false) Utils.throwException("Include file does not exist, file: " + file.getAbsolutePath(), usl, i); lexUsl(ctx, Utils.readFile(file)); return includeEnd + 1; } else Utils.throwException("Invalid include format", usl, i); return -1; } private static int skipLineComment (String usl, int i) { for (; i < usl.length(); i++) if (usl.charAt(i) == '\n') break; return i; } private static <T> T peek (List<T> list) { if (list != null && !list.isEmpty()) { return list.get(list.size() - 1); } return null; } private static String streamToString (InputStream is) { Scanner s = new Scanner(is).useDelimiter("\\A"); return s.hasNext() ? s.next() : ""; } }
package libmemcached.wrapper; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import libmemcached.exception.LibMemcachedException; import libmemcached.wrapper.type.BehaviorType; import libmemcached.wrapper.type.ReturnType; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; public class MemcachedStorageTest { protected MemcachedClient client; protected MemcachedServerList servers; @Before public void setup(){ client = new MemcachedClient(); } @After public void destroy(){ if(null != servers){ client.getStorage().flush(0); servers = null; } client.getBehavior().set(BehaviorType.SUPPORT_CAS, 0); client.free(); client = null; } private void connectServer() throws LibMemcachedException { servers = client.getServerList(); servers.append("localhost", 11211); servers.push(); } private void behaviorCas(){ client.getBehavior().set(BehaviorType.SUPPORT_CAS, 1); } @Test public void get_no_set() throws LibMemcachedException { connectServer(); MemcachedStorage storage = client.getStorage(); Assert.assertNull(storage.get("key-1")); } @Test public void get() throws LibMemcachedException { connectServer(); MemcachedStorage storage = client.getStorage(); Assert.assertEquals(storage.set("key-1", "value-1", 10, 0), ReturnType.SUCCESS); Assert.assertEquals(storage.get("key-1"), "value-1"); } @Test public void getResult() throws LibMemcachedException { connectServer(); MemcachedStorage storage = client.getStorage(); Assert.assertEquals(storage.set("key-1", "value-1", 10, 0), ReturnType.SUCCESS); SimpleResult result = storage.getResult("key-1"); Assert.assertEquals(result.getKey(), "key-1"); Assert.assertEquals(result.getValue(), "value-1"); Assert.assertEquals(result.getFlags(), 0); } @Test public void getMulti(){ } @Test public void getMulti_fetcher() throws LibMemcachedException { connectServer(); int expiration = 10; int flags = 0; MemcachedStorage storage = client.getStorage(); Assert.assertEquals(storage.set("key-1", "value-1", expiration, flags), ReturnType.SUCCESS); Assert.assertEquals(storage.set("key-2", "value-2", expiration, flags), ReturnType.SUCCESS); Assert.assertEquals(storage.set("key-3", "value-3", expiration, flags), ReturnType.SUCCESS); Assert.assertEquals(storage.set("key-4", "value-4", expiration, flags), ReturnType.SUCCESS); Assert.assertEquals(storage.set("key-5", "value-5", expiration, flags), ReturnType.SUCCESS); final HashMap<String, String> map = new HashMap<String, String>(){ private static final long serialVersionUID = 1L; { put("key-1", "value-1"); put("key-2", "value-2"); put("key-3", "value-3"); put("key-4", "value-4"); put("key-5", "value-5"); }}; storage.getMulti(new Fetcher() { public void fetch(SimpleResult result) { Assert.assertEquals(map.get(result.key), result.value); map.remove(result.key); } }, map.keySet().toArray(new String[5])); Assert.assertEquals(map.size(), 0); } @Test public void gets_no_store() throws LibMemcachedException { connectServer(); MemcachedStorage storage = client.getStorage(); Assert.assertNull(storage.gets("hoge")); } @Test public void gets_no_behavior_set() throws LibMemcachedException { connectServer(); MemcachedStorage storage = client.getStorage(); storage.set("hoge", "1234", 10, 0); MemcachedResult result = storage.gets("hoge"); System.out.println("cas => " + result.getCAS()); Assert.assertNotNull(result); Assert.assertEquals(result.getCAS(), 0); } @Test public void gets_cas() throws LibMemcachedException { connectServer(); behaviorCas(); MemcachedStorage storage = client.getStorage(); storage.set("hoge", "1234", 10, 0); MemcachedResult result = storage.gets("hoge"); System.out.println("cas => " + result.getCAS()); Assert.assertNotNull(result); Assert.assertTrue(-1 < result.getCAS()); } @Test public void gets_1() throws LibMemcachedException { connectServer(); MemcachedStorage storage = client.getStorage(); storage.set("key-1", "value-1", 10, 0); { MemcachedResult result = storage.gets("key-1"); Assert.assertEquals(result.getKey(), "key-1"); Assert.assertEquals(result.getValue(), "value-1"); Assert.assertNotNull(result.getCAS()); } { MemcachedResult result = storage.gets("key-2"); Assert.assertNull(result); } } @Test public void gets_2() throws LibMemcachedException { connectServer(); MemcachedStorage storage = client.getStorage(); storage.set("key-1", "value-1", 10, 0); storage.set("key-2", "value-2", 10, 0); { MemcachedResult result = storage.gets("key-1"); Assert.assertEquals(result.getKey(), "key-1"); Assert.assertEquals(result.getValue(), "value-1"); Assert.assertNotNull(result.getCAS()); } { MemcachedResult result = storage.gets("key-2"); Assert.assertEquals(result.getKey(), "key-2"); Assert.assertEquals(result.getValue(), "value-2"); Assert.assertNotNull(result.getCAS()); } { MemcachedResult result = storage.gets("key-3"); Assert.assertNull(result); } } @Test public void gets_set() throws LibMemcachedException { connectServer(); MemcachedStorage storage = client.getStorage(); storage.gets("test"); storage.set("test", "test-value", 10, 0); Assert.assertEquals(storage.get("test"), "test-value"); } @Test public void gets_cas_2() throws LibMemcachedException { connectServer(); behaviorCas(); MemcachedStorage storage = client.getStorage(); storage.gets("test"); storage.set("test", "test-value", 10, 0); { MemcachedResult result = storage.gets("test"); ReturnType rt = storage.cas("test", "test-value2", 10, 0, result.getCAS()); if(!ReturnType.SUCCESS.equals(rt)){ Assert.fail(); } } { MemcachedResult result = storage.gets("test"); ReturnType rt = storage.cas("test", "test-value3", 10, 0, result.getCAS()); if(!ReturnType.SUCCESS.equals(rt)){ Assert.fail(); } } Assert.assertEquals(storage.get("test"), "test-value3"); } @Test public void cas() throws LibMemcachedException { connectServer(); behaviorCas(); MemcachedStorage storage = client.getStorage(); { ReturnType rt = storage.cas("test", "value-1", 10, 0, 0); // cas key dnt accept: 0 Assert.assertEquals(rt, ReturnType.PROTOCOL_ERROR); } { ReturnType rt = storage.cas("test", "value-1", 10, 0, 1); Assert.assertEquals(rt, ReturnType.NOTFOUND); } MemcachedResult result = storage.gets("test"); Assert.assertNull(result); } @Test @Ignore("BUFFER_REQUESTS gets might not work well without NO_BLOCK also enabled") public void gets_cas_with_buffer_request() throws LibMemcachedException { connectServer(); behaviorCas(); client.getBehavior().set(BehaviorType.BUFFER_REQUESTS, true); MemcachedStorage storage = client.getStorage(); storage.gets("test"); storage.set("test", "test-value", 10, 0); { MemcachedResult result = storage.gets("test"); ReturnType rt = storage.cas("test", "test-value2", 10, 0, result.getCAS()); if(!ReturnType.SUCCESS.equals(rt)){ Assert.fail(); } } { MemcachedResult result = storage.gets("test"); ReturnType rt = storage.cas("test", "test-value3", 10, 0, result.getCAS()); if(!ReturnType.SUCCESS.equals(rt)){ Assert.fail(); } } Assert.assertEquals(storage.get("test"), "test-value3"); } @Test public void gets_cas_with_buffer_request__and__no_block() throws LibMemcachedException { connectServer(); behaviorCas(); client.getBehavior().set(BehaviorType.BUFFER_REQUESTS, true); client.getBehavior().set(BehaviorType.NO_BLOCK, true); MemcachedStorage storage = client.getStorage(); storage.gets("test"); storage.set("test", "test-value", 10, 0); { MemcachedResult result = storage.gets("test"); ReturnType rt = storage.cas("test", "test-value2", 10, 0, result.getCAS()); if(!ReturnType.SUCCESS.equals(rt)){ Assert.fail(); } } { MemcachedResult result = storage.gets("test"); ReturnType rt = storage.cas("test", "test-value3", 10, 0, result.getCAS()); if(!ReturnType.SUCCESS.equals(rt)){ Assert.fail(); } } Assert.assertEquals(storage.get("test"), "test-value3"); } @Test public void cas_MT() throws LibMemcachedException, InterruptedException, ExecutionException { connectServer(); behaviorCas(); List<String> accepts = new ArrayList<String>(); List<Callable<String>> tasks = new ArrayList<Callable<String>>(); for(int i = 0; i < 50; i++){ tasks.add(new TestCAS_increment(client.getStorage())); accepts.add(Integer.toString(i)); } ExecutorService exec = Executors.newSingleThreadExecutor(); List<Future<String>> futures = exec.invokeAll(tasks); for(Future<String> f: futures){ String v = f.get(); System.out.println(v); Assert.assertTrue(accepts.contains(v)); accepts.remove(v); } Assert.assertTrue(accepts.isEmpty()); } private static class TestCAS_increment implements Callable<String> { MemcachedStorage s; TestCAS_increment(MemcachedStorage s){ this.s = s; } public String call() throws Exception { int i = 0; while(true){ if(10 < i++){ throw new RuntimeException("max execution"); } MemcachedResult r = s.gets("cas-test"); if(r == null){ s.set("cas-test", "0", 10, 0); return "0"; } int val = Integer.parseInt(r.getValue()); String valStr = Integer.toString(val + 1); ReturnType rt = s.cas("cas-test", valStr, 10, 0, r.getCAS()); if(!ReturnType.SUCCESS.equals(rt)){ continue; } return valStr; } } } }
package obt.completablefuture; import org.junit.Test; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutorService; import java.util.stream.Collectors; import java.util.stream.Stream; import static java.lang.Runtime.getRuntime; import static java.lang.System.out; import static java.util.concurrent.CompletableFuture.supplyAsync; import static java.util.concurrent.Executors.newFixedThreadPool; import static obt.completablefuture.Support.*; import static obt.completablefuture.WebCralwer.download; import static obt.parallelstreams.Utils.println; import static obt.parallelstreams.Utils.printlnCurrentThread; public class CompletableFutureTest { public static final ExecutorService EXECUTOR_SERVICE = newFixedThreadPool(getRuntime().availableProcessors()); @Test public void completeFuture() throws Exception { CompletableFuture<Long> f = new CompletableFuture<>(); // when 'f' will be available print it! CompletableFuture<Void> f2 = f.thenAccept(out::println); // somewhere else in the system f.complete(42L); f2.get(); } @Test public void completeFutureWithExecutor() throws Exception { CompletableFuture<Long> f = new CompletableFuture<>(); CompletableFuture<Void> f2 = f.thenAcceptAsync(out::println, newFixedThreadPool(777)); f.complete(42L); f2.get(); } @Test public void supplyIt() throws Exception { printlnCurrentThread(); CompletableFuture<Long> f = CompletableFuture .supplyAsync(() -> { printlnCurrentThread(); return 42L; }); f.get(); } @Test public void acceptIt() throws Exception { printlnCurrentThread(); CompletableFuture<Void> f = supplyAsync(() -> { printlnCurrentThread(); return 42l; }) .thenAccept(v -> printlnCurrentThread()); f.get(); } @Test public void acceptItAsynchronously() throws Exception { printlnCurrentThread(); CompletableFuture<Void> f = supplyAsync(() -> { printlnCurrentThread(); return 42l; }) .thenAcceptAsync(v -> printlnCurrentThread()); f.get(); } @Test public void chaining() throws Exception { CompletableFuture<Long> f = new CompletableFuture<>(); f .thenApplyAsync(v -> v * 2) .thenApplyAsync(v -> v - 42) .thenAccept(out::println); f.complete(42L); } @Test public void combining() throws Exception { CompletableFuture<Pizza> bakePizza = bakePizza(); CompletableFuture<Coffee> brewCoffee = brewCoffee(); CompletableFuture<String> code = bakePizza .thenCombineAsync(brewCoffee, (pizza, coffee) -> "Piece of code"); println(code.get()); } @Test public void composingUgly() throws Exception { CompletableFuture<Coffee> drinkCoffee = starbucksCoffee(); CompletableFuture<Pizza> eatPizza = bakePizza(); CompletableFuture<CompletableFuture<Coffee>> fed = eatPizza.thenApply(p -> drinkCoffee); fed.thenAccept(pizzaDone -> pizzaDone .thenAccept( coffeeDone -> println("Some ugly piece of code"))); fed.get(); } @Test public void composingNice() throws Exception { CompletableFuture<Coffee> drinkCoffee = starbucksCoffee(); CompletableFuture<Pizza> eatPizza = bakePizza(); CompletableFuture<Coffee> fed = eatPizza. thenCompose(p -> drinkCoffee); fed.thenAccept(allDone -> println("Some ugly piece of code")); fed.get(); } @Test public void horseRacing() throws Exception { CompletableFuture<Horse> blackHorse = blackHorse(); CompletableFuture<Horse> prettyGoodHorse = prettyGoodHorse(); CompletableFuture<Horse> winner = blackHorse.applyToEither(prettyGoodHorse, ignoreMe()); println(winner.get()); } @Test public void pageRank() throws Exception { List<String> websites = notYetIndexedSites(); Stream<CompletableFuture<Long>> $$$ = websites.stream() .map(website -> supplyAsync(() -> download(website), EXECUTOR_SERVICE)) .map(rawContentFuture -> rawContentFuture.thenApplyAsync(PageBuilder::parse)) .map(documentFuture -> documentFuture.thenComposeAsync(Ranker::rank)); List<CompletableFuture<Long>> ranks = $$$.collect(Collectors.<CompletableFuture<Long>>toList()); CompletableFuture.allOf(ranks.toArray(new CompletableFuture[ranks.size()])).join(); println("All ranks are ready!"); ranks.stream().forEach(r -> println(r.join())); } }
package org.broadinstitute.hellbender.tools.spark.sv.evidence; import com.esotericsoftware.kryo.DefaultSerializer; import com.esotericsoftware.kryo.Kryo; import com.esotericsoftware.kryo.io.Input; import com.esotericsoftware.kryo.io.Output; import com.google.common.annotations.VisibleForTesting; import htsjdk.samtools.SAMFileHeader; import htsjdk.samtools.SAMReadGroupRecord; import htsjdk.samtools.SAMSequenceDictionary; import htsjdk.samtools.SAMSequenceRecord; import org.apache.logging.log4j.Logger; import org.apache.spark.api.java.JavaRDD; import org.broadinstitute.hellbender.exceptions.GATKException; import org.broadinstitute.hellbender.tools.spark.sv.utils.SVUtils; import org.broadinstitute.hellbender.tools.spark.utils.IntHistogram; import org.broadinstitute.hellbender.utils.gcs.BucketUtils; import org.broadinstitute.hellbender.utils.read.CigarUtils; import org.broadinstitute.hellbender.utils.read.GATKRead; import java.io.BufferedWriter; import java.io.IOException; import java.io.OutputStreamWriter; import java.io.Writer; import java.util.*; /** * A bag of data about reads: contig name to id mapping, fragment length statistics by read group, mean length. * The fragment length statistics pertain to a library, but they're accessed by read group. (I.e., in the * readGroupToFragmentStatistics map, all read groups that are derived from a given library point to the same set of * statistics.) */ @DefaultSerializer(ReadMetadata.Serializer.class) public class ReadMetadata { private final Set<Integer> crossContigIgnoreSet; private final Map<String, Integer> contigNameToID; private final String[] contigIDToName; private final Map<String, String> readGroupToLibrary; private final long nReads; private final int avgReadLen; private final long nRefBases; private final long maxReadsInPartition; private final float coverage; private final float meanBaseQuality; private final PartitionBounds[] partitionBounds; private final Map<String, LibraryStatistics> libraryToFragmentStatistics; private static final String NO_GROUP = "NoGroup"; private static final float MIN_COVERAGE = 10.f; private static final float DEFAULT_MEAN_BASE_QUALITY_FOR_TESTING = 30.f; public ReadMetadata( final Set<Integer> crossContigIgnoreSet, final SAMFileHeader header, final int maxTrackedFragmentLength, final JavaRDD<GATKRead> unfilteredReads, final SVReadFilter filter, final Logger logger ) { this.crossContigIgnoreSet = crossContigIgnoreSet; contigNameToID = buildContigNameToIDMap(header.getSequenceDictionary()); contigIDToName = buildContigIDToNameArray(contigNameToID); readGroupToLibrary = buildGroupToLibMap(header); final Map<String, String> grpToLib = readGroupToLibrary; final List<PartitionStatistics> perPartitionStatistics = unfilteredReads .mapPartitions(readItr -> SVUtils.singletonIterator( new PartitionStatistics(readItr, filter, maxTrackedFragmentLength, grpToLib))) .collect(); maxReadsInPartition = perPartitionStatistics.stream().mapToLong(PartitionStatistics::getNReads).max().orElse(0L); final int nPartitions = perPartitionStatistics.size(); partitionBounds = new PartitionBounds[nPartitions]; for ( int idx = 0; idx != nPartitions; ++idx ) { final PartitionStatistics stats = perPartitionStatistics.get(idx); final Integer firstContigID = contigNameToID.get(stats.getFirstContig()); final Integer lastContigID = contigNameToID.get(stats.getLastContig()); partitionBounds[idx] = new PartitionBounds( firstContigID==null ? PartitionBounds.UNMAPPED : firstContigID, stats.getFirstLocation(), lastContigID==null ? PartitionBounds.UNMAPPED : lastContigID, stats.getLastLocation(), stats.getSpan() ); } final Map<String, LibraryRawStatistics> combinedMaps = perPartitionStatistics.stream() .map(PartitionStatistics::getLibraryNameToStatisticsMap) .reduce(new HashMap<>(), ReadMetadata::combineMaps); nReads = combinedMaps.values().stream().mapToLong(LibraryRawStatistics::getNReads).sum(); final long nReadBases = combinedMaps.values().stream().mapToLong(LibraryRawStatistics::getNBases).sum(); avgReadLen = (int)(nReadBases / nReads); nRefBases = header.getSequenceDictionary().getSequences() .stream().mapToLong(SAMSequenceRecord::getSequenceLength).sum(); final float cov = (float)nReadBases / nRefBases; if ( cov >= MIN_COVERAGE ) coverage = cov; else { logger.warn("Apparent coverage (" + cov + ") too low. Pretending it's 10x."); coverage = MIN_COVERAGE; } final long totalBaseQuals = combinedMaps.values().stream().mapToLong(LibraryRawStatistics::getTotalBaseQuality).sum(); meanBaseQuality = (float)totalBaseQuals / nReadBases; libraryToFragmentStatistics = new HashMap<>(SVUtils.hashMapCapacity(combinedMaps.size())); combinedMaps.forEach( (libName, rawStats) -> libraryToFragmentStatistics.put(libName,rawStats.createLibraryStatistics(nRefBases))); } /** This constructor is for testing only. It applies a single LibraryStatistics object to all libraries. */ @VisibleForTesting ReadMetadata( final Set<Integer> crossContigIgnoreSet, final SAMFileHeader header, final LibraryStatistics stats, final PartitionBounds[] partitionBounds, final long nReads, final long maxReadsInPartition, final float coverage ) { this.crossContigIgnoreSet = crossContigIgnoreSet; contigNameToID = buildContigNameToIDMap(header.getSequenceDictionary()); contigIDToName = buildContigIDToNameArray(contigNameToID); readGroupToLibrary = buildGroupToLibMap(header); this.nReads = nReads; nRefBases = header.getSequenceDictionary().getSequences() .stream().mapToLong(SAMSequenceRecord::getSequenceLength).sum(); avgReadLen = (int)(coverage * nRefBases / nReads); this.maxReadsInPartition = maxReadsInPartition; this.coverage = coverage; meanBaseQuality = DEFAULT_MEAN_BASE_QUALITY_FOR_TESTING; this.partitionBounds = partitionBounds; libraryToFragmentStatistics = new HashMap<>(6); libraryToFragmentStatistics.put(null, stats); for ( final SAMReadGroupRecord readGroupRecord : header.getReadGroups() ) { libraryToFragmentStatistics.put(readGroupRecord.getLibrary(), stats); } } private ReadMetadata( final Kryo kryo, final Input input ) { final int crossContigIgnoreSetSize = input.readInt(); this.crossContigIgnoreSet = new HashSet<>(SVUtils.hashMapCapacity(crossContigIgnoreSetSize)); for ( int idx = 0; idx != crossContigIgnoreSetSize; ++idx ) { crossContigIgnoreSet.add(input.readInt()); } final int groupMapSize = input.readInt(); readGroupToLibrary = new HashMap<>(SVUtils.hashMapCapacity(groupMapSize)); for ( int idx = 0; idx != groupMapSize; ++idx ) { final String groupName = input.readString(); final String libName = input.readString(); readGroupToLibrary.put(groupName, libName); } final int contigMapSize = input.readInt(); contigNameToID = new HashMap<>(SVUtils.hashMapCapacity(contigMapSize)); for ( int idx = 0; idx != contigMapSize; ++idx ) { final String contigName = input.readString(); final int contigId = input.readInt(); contigNameToID.put(contigName, contigId); } contigIDToName = buildContigIDToNameArray(contigNameToID); nReads = input.readLong(); avgReadLen = input.readInt(); nRefBases = input.readLong(); maxReadsInPartition = input.readLong(); coverage = input.readFloat(); meanBaseQuality = input.readFloat(); final int nPartitions = input.readInt(); partitionBounds = new PartitionBounds[nPartitions]; final PartitionBounds.Serializer boundsSerializer = new PartitionBounds.Serializer(); for ( int idx = 0; idx != nPartitions; ++idx ) { partitionBounds[idx] = boundsSerializer.read(kryo, input, PartitionBounds.class); } final int libMapSize = input.readInt(); final LibraryStatistics.Serializer statsSerializer = new LibraryStatistics.Serializer(); libraryToFragmentStatistics = new HashMap<>(SVUtils.hashMapCapacity(libMapSize)); for ( int idx = 0; idx != libMapSize; ++idx ) { final String libraryName = input.readString(); final LibraryStatistics stats = statsSerializer.read(kryo, input, LibraryStatistics.class); libraryToFragmentStatistics.put(libraryName, stats); } } private void serialize( final Kryo kryo, final Output output ) { output.writeInt(crossContigIgnoreSet.size()); for ( final Integer tigId : crossContigIgnoreSet ) { output.writeInt(tigId); } output.writeInt(readGroupToLibrary.size()); for ( final Map.Entry<String, String> entry : readGroupToLibrary.entrySet() ) { output.writeString(entry.getKey()); output.writeString(entry.getValue()); } output.writeInt(contigNameToID.size()); for ( final Map.Entry<String, Integer> entry : contigNameToID.entrySet() ) { output.writeString(entry.getKey()); output.writeInt(entry.getValue()); } output.writeLong(nReads); output.writeInt(avgReadLen); output.writeLong(nRefBases); output.writeLong(maxReadsInPartition); output.writeFloat(coverage); output.writeFloat(meanBaseQuality); output.writeInt(partitionBounds.length); final PartitionBounds.Serializer boundsSerializer = new PartitionBounds.Serializer(); for ( final PartitionBounds bounds : partitionBounds ) { boundsSerializer.write(kryo, output, bounds); } output.writeInt(libraryToFragmentStatistics.size()); final LibraryStatistics.Serializer statsSerializer = new LibraryStatistics.Serializer(); for ( final Map.Entry<String, LibraryStatistics> entry : libraryToFragmentStatistics.entrySet() ) { output.writeString(entry.getKey()); statsSerializer.write(kryo, output, entry.getValue()); } } public boolean ignoreCrossContigID( final int contigID ) { return crossContigIgnoreSet.contains(contigID); } @VisibleForTesting Set<Integer> getCrossContigIgnoreSet() { return crossContigIgnoreSet; } public Map<String, Integer> getContigNameMap() { return Collections.unmodifiableMap(contigNameToID); } public int getContigID( final String contigName ) { final Integer result = contigNameToID.get(contigName); if ( result == null ) { throw new GATKException("No such contig name: " + contigName); } return result; } public String getContigName( final int contigID ) { return contigIDToName[contigID]; } public String getLibraryName( final String readGroupName ) { if ( readGroupName == null ) return null; if ( !readGroupToLibrary.containsKey(readGroupName) ) { throw new GATKException("No such read group in header: "+readGroupName); } return readGroupToLibrary.get(readGroupName); } @VisibleForTesting Map<String, String> getReadGroupToLibraryMap() { return readGroupToLibrary; } public float getZishScore( final String readGroup, final int fragmentSize ) { return getFragmentLengthStatistics(readGroup).getZishScore(fragmentSize); } @VisibleForTesting LibraryStatistics getFragmentLengthStatistics( final String readGroup ) { return libraryToFragmentStatistics.get(getLibraryName(readGroup)); } public long getNReads() { return nReads; } public int getAvgReadLen() { return avgReadLen; } public long getNRefBases() { return nRefBases; } public int getNPartitions() { return partitionBounds.length; } public PartitionBounds getPartitionBounds( final int partitionIdx ) { return partitionBounds[partitionIdx]; } @VisibleForTesting PartitionBounds[] getAllPartitionBounds() { return partitionBounds; } public long getMaxReadsInPartition() { return maxReadsInPartition; } public float getCoverage() { return coverage; } public float getMeanBaseQuality() { return meanBaseQuality; } public float getAccurateKmerCoverage( final int kSize ) { // i.e., p = (1 - 10**(-Q/10))**K final float probabilityOfAnAccurateKmer = (float)Math.exp(kSize * Math.log1p(-Math.pow(10.,meanBaseQuality/-10.))); return coverage * probabilityOfAnAccurateKmer; } public int getMedianPartitionSpan() { final int[] spans = new int[partitionBounds.length]; for ( int idx = 0; idx != partitionBounds.length; ++idx ) { spans[idx] = partitionBounds[idx].getSpan(); } Arrays.sort(spans); return spans[partitionBounds.length/2]; } public Map<String, LibraryStatistics> getAllLibraryStatistics() { return libraryToFragmentStatistics; } public LibraryStatistics getLibraryStatistics( final String libraryName ) { final LibraryStatistics stats = libraryToFragmentStatistics.get(libraryName); if ( stats == null ) { throw new GATKException("No such library: " + libraryName); } return stats; } public int getMaxMedianFragmentSize() { return libraryToFragmentStatistics.entrySet().stream() .mapToInt(entry -> entry.getValue().getMedian()) .max() .orElse(0); } private static Map<String, LibraryRawStatistics> combineMaps( final Map<String, LibraryRawStatistics> accumulator, final Map<String, LibraryRawStatistics> element ) { for ( final Map.Entry<String, LibraryRawStatistics> entry : element.entrySet() ) { final String libraryName = entry.getKey(); final LibraryRawStatistics accumulatorStats = accumulator.get(libraryName); if ( accumulatorStats == null ) { accumulator.put(libraryName, entry.getValue()); } else { LibraryRawStatistics.reduce(accumulatorStats, entry.getValue()); } } return accumulator; } public static Map<String, Integer> buildContigNameToIDMap( final SAMSequenceDictionary dictionary ) { final List<SAMSequenceRecord> contigs = dictionary.getSequences(); final Map<String, Integer> contigNameToID = new HashMap<>(SVUtils.hashMapCapacity(contigs.size())); final int nContigs = contigs.size(); for ( int contigID = 0; contigID < nContigs; ++contigID ) { contigNameToID.put(contigs.get(contigID).getSequenceName(), contigID); } return contigNameToID; } public static String[] buildContigIDToNameArray( final Map<String, Integer> nameToIDMap ) { final String[] result = new String[nameToIDMap.size()]; for ( final Map.Entry<String, Integer> entry : nameToIDMap.entrySet() ) { result[entry.getValue()] = entry.getKey(); } return result; } public static Map<String, String> buildGroupToLibMap( final SAMFileHeader header ) { final List<SAMReadGroupRecord> readGroups = header.getReadGroups(); final int mapCapacity = SVUtils.hashMapCapacity(header.getReadGroups().size()); final Map<String, String> readGroupToLibraryMap = new HashMap<>(mapCapacity); for ( final SAMReadGroupRecord groupRecord : readGroups ) { readGroupToLibraryMap.put(groupRecord.getId(), groupRecord.getLibrary()); } return readGroupToLibraryMap; } public static void writeMetadata(final ReadMetadata readMetadata, final String filename ) { try ( final Writer writer = new BufferedWriter(new OutputStreamWriter(BucketUtils.createFile(filename))) ) { writer.write("#reads:\t" + readMetadata.getNReads() + "\n"); writer.write("#partitions:\t" + readMetadata.getNPartitions() + "\n"); writer.write("max reads/partition:\t" + readMetadata.getMaxReadsInPartition() + "\n"); writer.write("coverage:\t" + readMetadata.getCoverage() + "\n"); writer.write( "meanQ:\t" + readMetadata.getMeanBaseQuality() + "\n"); writer.write("\nLibrary Statistics\n"); for ( final Map.Entry<String, LibraryStatistics> entry : readMetadata.getAllLibraryStatistics().entrySet() ) { final LibraryStatistics stats = entry.getValue(); String name = entry.getKey(); if ( name == null ) { name = NO_GROUP; } final int median = stats.getMedian(); writer.write(name + ":\t" + median + "-" + stats.getNegativeMAD() + "+" + stats.getPositiveMAD() + "\t" + stats.getCoverage() + "\t" + stats.getMeanBaseQuality() + "\t" + stats.getNReads() + "\t" + stats.getReadStartFrequency() + "\n"); final IntHistogram.CDF templateSizeCDF = stats.getCDF(); final int cdfSize = templateSizeCDF.size(); final long totalObservations = templateSizeCDF.getTotalObservations(); writer.write("template size cumulative counts:"); for(int idx = 0; idx < cdfSize; ++idx) { final long cumulativeCounts = Math.round(templateSizeCDF.getFraction(idx) * totalObservations); writer.write("\t" + cumulativeCounts); } writer.write("\n"); } final PartitionBounds[] partitionBounds = readMetadata.partitionBounds; writer.write("\nPartition Boundaries\n"); for ( int idx = 0; idx != partitionBounds.length; ++idx ) { final PartitionBounds bounds = partitionBounds[idx]; writer.write(idx + "\t" + bounds.firstContigID + "\t" + bounds.getFirstStart() + "\t" + bounds.getLastContigID() + "\t" + bounds.getLastEnd() + "\n"); } writer.write("contigs map:\n"); try { for (int i = 0; i < readMetadata.contigIDToName.length; ++i) { writer.write(i + ":" + readMetadata.contigIDToName[i] + "\n"); } } catch ( final IOException ex ) { throw new GATKException("Can't write metadata contig entry", ex); } } catch ( final IOException ioe ) { throw new GATKException("Can't write metadata file.", ioe); } } public static final class Serializer extends com.esotericsoftware.kryo.Serializer<ReadMetadata> { @Override public void write( final Kryo kryo, final Output output, final ReadMetadata readMetadata ) { readMetadata.serialize(kryo, output); } @Override public ReadMetadata read( final Kryo kryo, final Input input, final Class<ReadMetadata> klass ) { return new ReadMetadata(kryo, input); } } @DefaultSerializer(LibraryRawStatistics.Serializer.class) public static final class LibraryRawStatistics { private final IntHistogram fragmentSizes; private long nReads; private long nBases; private long totalBaseQuality; public LibraryRawStatistics( final int maxTrackedValue ) { fragmentSizes = new IntHistogram(maxTrackedValue); nReads = nBases = 0; } private LibraryRawStatistics( final Kryo kryo, final Input input ) { fragmentSizes = new IntHistogram.Serializer().read(kryo, input, IntHistogram.class); nReads = input.readLong(); nBases = input.readLong(); totalBaseQuality = input.readLong(); } private void serialize( final Kryo kryo, final Output output ) { new IntHistogram.Serializer().write(kryo, output, fragmentSizes); output.writeLong(nReads); output.writeLong(nBases); output.writeLong(totalBaseQuality); } public void addRead( final int readLength, final int summedQuals, final int templateLength, final boolean isTemplateLengthTestable ) { nReads += 1; nBases += readLength; totalBaseQuality += summedQuals; if ( isTemplateLengthTestable ) { fragmentSizes.addObservation(Math.abs(templateLength)); } } public long getNReads() { return nReads; } public long getNBases() { return nBases; } public long getTotalBaseQuality() { return totalBaseQuality; } public LibraryStatistics createLibraryStatistics( final long nRefBases ) { return new LibraryStatistics(fragmentSizes.getCDF(), nBases, nReads, totalBaseQuality, nRefBases); } // assumes that the BAM partitioning has no overlap -- // we'll see each primary line exactly once across all partitions public static LibraryRawStatistics reduce( final LibraryRawStatistics stats1, final LibraryRawStatistics stats2 ) { stats1.fragmentSizes.addObservations(stats2.fragmentSizes); stats1.nBases += stats2.nBases; stats1.nReads += stats2.nReads; stats1.totalBaseQuality += stats2.totalBaseQuality; return stats1; } public static final class Serializer extends com.esotericsoftware.kryo.Serializer<LibraryRawStatistics> { @Override public void write( final Kryo kryo, final Output output, final LibraryRawStatistics libraryRawStatistics ) { libraryRawStatistics.serialize(kryo, output); } @Override public LibraryRawStatistics read( final Kryo kryo, final Input input, final Class<LibraryRawStatistics> klass ) { return new LibraryRawStatistics(kryo, input); } } } @DefaultSerializer(PartitionStatistics.Serializer.class) public static final class PartitionStatistics { private final Map<String, LibraryRawStatistics> libraryNameToStatisticsMap; private final String firstContig; private final int firstLocation; private final String lastContig; private final int lastLocation; private final int span; public PartitionStatistics( final Iterator<GATKRead> unfilteredReadItr, final SVReadFilter filter, final int maxTrackedFragmentLength, final Map<String, String> readGroupToLibraryMap ) { final Iterator<GATKRead> mappedReadItr = filter.applyFilter(unfilteredReadItr, SVReadFilter::isMappedPrimary); libraryNameToStatisticsMap = new HashMap<>(); if ( !mappedReadItr.hasNext() ) { firstContig = lastContig = null; firstLocation = lastLocation = -1; span = 0; return; } GATKRead mappedRead = mappedReadItr.next(); firstContig = mappedRead.getContig(); firstLocation = mappedRead.getStart(); String currentContig = firstContig; int currentSpan = -firstLocation; while ( true ) { final String libraryName = readGroupToLibraryMap.get(mappedRead.getReadGroup()); final boolean isTestable = filter.isTemplateLenTestable(mappedRead); int summedQuals = 0; for ( final byte qual : mappedRead.getBaseQualities() ) { summedQuals += qual; } libraryNameToStatisticsMap .computeIfAbsent(libraryName, key -> new LibraryRawStatistics(maxTrackedFragmentLength)) .addRead(CigarUtils.countAlignedBases(mappedRead.getCigar()), summedQuals, mappedRead.getFragmentLength(), isTestable); if ( !mappedReadItr.hasNext() ) break; final int endPos = mappedRead.getEnd() + 1; mappedRead = mappedReadItr.next(); if ( !mappedRead.getContig().equals(currentContig) ) { currentSpan += endPos; currentContig = mappedRead.getContig(); currentSpan -= mappedRead.getStart(); } } lastContig = mappedRead.getContig(); lastLocation = mappedRead.getEnd() + 1; span = currentSpan + lastLocation; } private PartitionStatistics( final Kryo kryo, final Input input ) { final LibraryRawStatistics.Serializer rawStatsSerializer = new LibraryRawStatistics.Serializer(); int nEntries = input.readInt(); libraryNameToStatisticsMap = new HashMap<>(SVUtils.hashMapCapacity(nEntries)); while ( nEntries-- > 0 ) { final String libName = input.readString(); final LibraryRawStatistics rawStats = rawStatsSerializer.read(kryo, input, LibraryRawStatistics.class); libraryNameToStatisticsMap.put(libName, rawStats); } firstContig = input.readString(); firstLocation = input.readInt(); lastContig = input.readString(); lastLocation = input.readInt(); span = input.readInt(); } public long getNReads() { return libraryNameToStatisticsMap.values().stream().mapToLong(LibraryRawStatistics::getNReads).sum(); } public Map<String, LibraryRawStatistics> getLibraryNameToStatisticsMap() { return libraryNameToStatisticsMap; } public String getFirstContig() { return firstContig; } public int getFirstLocation() { return firstLocation; } public String getLastContig() { return lastContig; } public int getLastLocation() { return lastLocation; } public int getSpan() { return span; } private void serialize( final Kryo kryo, final Output output ) { final LibraryRawStatistics.Serializer rawStatsSerializer = new LibraryRawStatistics.Serializer(); output.writeInt(libraryNameToStatisticsMap.size()); for ( final Map.Entry<String, LibraryRawStatistics> entry : libraryNameToStatisticsMap.entrySet() ) { output.writeString(entry.getKey()); rawStatsSerializer.write(kryo, output, entry.getValue()); } output.writeString(firstContig); output.writeInt(firstLocation); output.writeString(lastContig); output.writeInt(lastLocation); output.writeInt(span); } public static final class Serializer extends com.esotericsoftware.kryo.Serializer<PartitionStatistics> { @Override public void write( final Kryo kryo, final Output output, final PartitionStatistics partitionStatistics ) { partitionStatistics.serialize(kryo, output); } @Override public PartitionStatistics read( final Kryo kryo, final Input input, final Class<PartitionStatistics> klass ) { return new PartitionStatistics(kryo, input); } } } /** A class to track the genomic location of the start of the first and last mapped reads in a partition. */ @DefaultSerializer(PartitionBounds.Serializer.class) public static final class PartitionBounds { private final int firstContigID; private final int firstStart; private final int lastContigID; private final int lastEnd; private final int span; public final static int UNMAPPED = Integer.MAX_VALUE; public PartitionBounds( final int firstContigID, final int firstStart, final int lastContigID, final int lastEnd, final int span ) { this.firstContigID = firstContigID; this.firstStart = firstStart; this.lastContigID = lastContigID; this.lastEnd = lastEnd; this.span = span; } private PartitionBounds( final Kryo kryo, final Input input ) { this.firstContigID = input.readInt(); this.firstStart = input.readInt(); this.lastContigID = input.readInt(); this.lastEnd = input.readInt(); this.span = input.readInt(); } private void serialize( final Kryo kryo, final Output output ) { output.writeInt(firstContigID); output.writeInt(firstStart); output.writeInt(lastContigID); output.writeInt(lastEnd); output.writeInt(span); } public int getFirstContigID() { return firstContigID; } public int getFirstStart() { return firstStart; } public int getLastContigID() { return lastContigID; } public int getLastEnd() { return lastEnd; } public int getSpan() { return span; } public static final class Serializer extends com.esotericsoftware.kryo.Serializer<PartitionBounds> { @Override public void write( final Kryo kryo, final Output output, final PartitionBounds partitionBounds ) { partitionBounds.serialize(kryo, output); } @Override public PartitionBounds read( final Kryo kryo, final Input input, final Class<PartitionBounds> klass ) { return new PartitionBounds(kryo, input); } } } }
/* * Copyright (C) 2015 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.example.android.sunshine.app; import android.annotation.TargetApi; import android.app.Activity; import android.content.Intent; import android.content.SharedPreferences; import android.content.res.TypedArray; import android.database.Cursor; import android.net.Uri; import android.os.Build; import android.os.Bundle; import android.preference.PreferenceManager; import android.support.design.widget.AppBarLayout; import android.support.v4.app.Fragment; import android.support.v4.app.LoaderManager; import android.support.v4.content.CursorLoader; import android.support.v4.content.Loader; import android.support.v4.view.ViewCompat; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.util.AttributeSet; import android.util.Log; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.view.ViewTreeObserver; import android.widget.AbsListView; import android.widget.TextView; import com.example.android.sunshine.app.data.WeatherContract; import com.example.android.sunshine.app.sync.SunshineSyncAdapter; /** * Encapsulates fetching the forecast and displaying it as a {@link android.support.v7.widget.RecyclerView} layout. */ public class ForecastFragment extends Fragment implements LoaderManager.LoaderCallbacks<Cursor>, SharedPreferences.OnSharedPreferenceChangeListener { public static final String LOG_TAG = ForecastFragment.class.getSimpleName(); private ForecastAdapter mForecastAdapter; private RecyclerView mRecyclerView; private int mPosition = RecyclerView.NO_POSITION; private boolean mUseTodayLayout, mAutoSelectView; private int mChoiceMode; private boolean mHoldForTransition; private static final String SELECTED_KEY = "selected_position"; private static final int FORECAST_LOADER = 0; // For the forecast view we're showing only a small subset of the stored data. // Specify the columns we need. private static final String[] FORECAST_COLUMNS = { // In this case the id needs to be fully qualified with a table name, since // the content provider joins the location & weather tables in the background // (both have an _id column) // On the one hand, that's annoying. On the other, you can search the weather table // using the location set by the user, which is only in the Location table. // So the convenience is worth it. WeatherContract.WeatherEntry.TABLE_NAME + "." + WeatherContract.WeatherEntry._ID, WeatherContract.WeatherEntry.COLUMN_DATE, WeatherContract.WeatherEntry.COLUMN_SHORT_DESC, WeatherContract.WeatherEntry.COLUMN_MAX_TEMP, WeatherContract.WeatherEntry.COLUMN_MIN_TEMP, WeatherContract.LocationEntry.COLUMN_LOCATION_SETTING, WeatherContract.WeatherEntry.COLUMN_WEATHER_ID, WeatherContract.LocationEntry.COLUMN_COORD_LAT, WeatherContract.LocationEntry.COLUMN_COORD_LONG }; // These indices are tied to FORECAST_COLUMNS. If FORECAST_COLUMNS changes, these // must change. static final int COL_WEATHER_ID = 0; static final int COL_WEATHER_DATE = 1; static final int COL_WEATHER_DESC = 2; static final int COL_WEATHER_MAX_TEMP = 3; static final int COL_WEATHER_MIN_TEMP = 4; static final int COL_LOCATION_SETTING = 5; static final int COL_WEATHER_CONDITION_ID = 6; static final int COL_COORD_LAT = 7; static final int COL_COORD_LONG = 8; /** * A callback interface that all activities containing this fragment must * implement. This mechanism allows activities to be notified of item * selections. */ public interface Callback { /** * DetailFragmentCallback for when an item has been selected. */ public void onItemSelected(Uri dateUri, ForecastAdapter.ForecastAdapterViewHolder vh); } public ForecastFragment() { } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); // Add this line in order for this fragment to handle menu events. setHasOptionsMenu(true); } @Override public void onResume() { SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(getActivity()); sp.registerOnSharedPreferenceChangeListener(this); super.onResume(); } @Override public void onPause() { SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(getActivity()); sp.unregisterOnSharedPreferenceChangeListener(this); super.onPause(); } @Override public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) { inflater.inflate(R.menu.forecastfragment, menu); } @Override public boolean onOptionsItemSelected(MenuItem item) { // Handle action bar item clicks here. The action bar will // automatically handle clicks on the Home/Up button, so long // as you specify a parent activity in AndroidManifest.xml. int id = item.getItemId(); // if (id == R.id.action_refresh) { // updateWeather(); // return true; // } if (id == R.id.action_map) { openPreferredLocationInMap(); return true; } return super.onOptionsItemSelected(item); } @Override public void onInflate(Activity activity, AttributeSet attrs, Bundle savedInstanceState) { super.onInflate(activity, attrs, savedInstanceState); TypedArray a = activity.obtainStyledAttributes(attrs, R.styleable.ForecastFragment, 0, 0); mChoiceMode = a.getInt(R.styleable.ForecastFragment_android_choiceMode, AbsListView.CHOICE_MODE_NONE); mAutoSelectView = a.getBoolean(R.styleable.ForecastFragment_autoSelectView, false); mHoldForTransition = a.getBoolean(R.styleable.ForecastFragment_sharedElementTransitions, false); a.recycle(); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View rootView = inflater.inflate(R.layout.fragment_main, container, false); // Get a reference to the RecyclerView, and attach this adapter to it. mRecyclerView = (RecyclerView) rootView.findViewById(R.id.recyclerview_forecast); // Set the layout manager mRecyclerView.setLayoutManager(new LinearLayoutManager(getActivity())); View emptyView = rootView.findViewById(R.id.recyclerview_forecast_empty); // use this setting to improve performance if you know that changes // in content do not change the layout size of the RecyclerView mRecyclerView.setHasFixedSize(true); // The ForecastAdapter will take data from a source and // use it to populate the RecyclerView it's attached to. mForecastAdapter = new ForecastAdapter(getActivity(), new ForecastAdapter.ForecastAdapterOnClickHandler() { @Override public void onClick(Long date, ForecastAdapter.ForecastAdapterViewHolder vh) { String locationSetting = Utility.getPreferredLocation(getActivity()); ((Callback) getActivity()) .onItemSelected(WeatherContract.WeatherEntry.buildWeatherLocationWithDate( locationSetting, date), vh ); mPosition = vh.getAdapterPosition(); } }, emptyView, mChoiceMode); // specify an adapter (see also next example) mRecyclerView.setAdapter(mForecastAdapter); final View parallaxView = rootView.findViewById(R.id.parallax_bar); if (null != parallaxView) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) { mRecyclerView.addOnScrollListener(new RecyclerView.OnScrollListener() { @TargetApi(Build.VERSION_CODES.HONEYCOMB) @Override public void onScrolled(RecyclerView recyclerView, int dx, int dy) { super.onScrolled(recyclerView, dx, dy); int max = parallaxView.getHeight(); if (dy > 0) { parallaxView.setTranslationY(Math.max(-max, parallaxView.getTranslationY() - dy / 2)); } else { parallaxView.setTranslationY(Math.min(0, parallaxView.getTranslationY() - dy / 2)); } } }); } } final AppBarLayout appbarView=(AppBarLayout) rootView.findViewById(R.id.appbar); if(null!=appbarView){ ViewCompat.setElevation(appbarView,0); mRecyclerView.addOnScrollListener(new RecyclerView.OnScrollListener() { @TargetApi(Build.VERSION_CODES.LOLLIPOP) @Override public void onScrolled(RecyclerView recyclerView, int dx, int dy) { if(0==mRecyclerView.computeVerticalScrollOffset()){ appbarView.setElevation(0); }else{ appbarView.setElevation(appbarView.getTargetElevation()); } } }); } // If there's instance state, mine it for useful information. // The end-goal here is that the user never knows that turning their device sideways // does crazy lifecycle related things. It should feel like some stuff stretched out, // or magically appeared to take advantage of room, but data or place in the app was never // actually *lost*. if (savedInstanceState != null) { if (savedInstanceState.containsKey(SELECTED_KEY)) { // The Recycler View probably hasn't even been populated yet. Actually perform the // swapout in onLoadFinished. mPosition = savedInstanceState.getInt(SELECTED_KEY); } mForecastAdapter.onRestoreInstanceState(savedInstanceState); } mForecastAdapter.setUseTodayLayout(mUseTodayLayout); return rootView; } @Override public void onActivityCreated(Bundle savedInstanceState) { // We hold for transition here just in-case the activity // needs to be re-created. In a standard return transition, // this doesn't actually make a difference. if ( mHoldForTransition ) { getActivity().supportPostponeEnterTransition(); } getLoaderManager().initLoader(FORECAST_LOADER, null, this); super.onActivityCreated(savedInstanceState); } // since we read the location when we create the loader, all we need to do is restart things void onLocationChanged() { getLoaderManager().restartLoader(FORECAST_LOADER, null, this); } private void openPreferredLocationInMap() { // Using the URI scheme for showing a location found on a map. This super-handy // intent can is detailed in the "Common Intents" page of Android's developer site: // http://developer.android.com/guide/components/intents-common.html#Maps if (null != mForecastAdapter) { Cursor c = mForecastAdapter.getCursor(); if (null != c) { c.moveToPosition(0); String posLat = c.getString(COL_COORD_LAT); String posLong = c.getString(COL_COORD_LONG); Uri geoLocation = Uri.parse("geo:" + posLat + "," + posLong); Intent intent = new Intent(Intent.ACTION_VIEW); intent.setData(geoLocation); if (intent.resolveActivity(getActivity().getPackageManager()) != null) { startActivity(intent); } else { Log.d(LOG_TAG, "Couldn't call " + geoLocation.toString() + ", no receiving apps installed!"); } } } } @Override public void onSaveInstanceState(Bundle outState) { // When tablets rotate, the currently selected list item needs to be saved. // When no item is selected, mPosition will be set to RecyclerView.NO_POSITION, // so check for that before storing. if (mPosition != RecyclerView.NO_POSITION) { outState.putInt(SELECTED_KEY, mPosition); } mForecastAdapter.onSaveInstanceState(outState); super.onSaveInstanceState(outState); } @Override public Loader<Cursor> onCreateLoader(int i, Bundle bundle) { // This is called when a new Loader needs to be created. This // fragment only uses one loader, so we don't care about checking the id. // To only show current and future dates, filter the query to return weather only for // dates after or including today. // Sort order: Ascending, by date. String sortOrder = WeatherContract.WeatherEntry.COLUMN_DATE + " ASC"; String locationSetting = Utility.getPreferredLocation(getActivity()); Uri weatherForLocationUri = WeatherContract.WeatherEntry.buildWeatherLocationWithStartDate( locationSetting, System.currentTimeMillis()); return new CursorLoader(getActivity(), weatherForLocationUri, FORECAST_COLUMNS, null, null, sortOrder); } @Override public void onLoadFinished(Loader<Cursor> loader, Cursor data) { mForecastAdapter.swapCursor(data); if (mPosition != RecyclerView.NO_POSITION) { // If we don't need to restart the loader, and there's a desired position to restore // to, do so now. mRecyclerView.smoothScrollToPosition(mPosition); } updateEmptyView(); if ( data.getCount() == 0 ) { getActivity().supportStartPostponedEnterTransition(); } else { mRecyclerView.getViewTreeObserver().addOnPreDrawListener(new ViewTreeObserver.OnPreDrawListener() { @Override public boolean onPreDraw() { // Since we know we're going to get items, we keep the listener around until // we see Children. if (mRecyclerView.getChildCount() > 0) { mRecyclerView.getViewTreeObserver().removeOnPreDrawListener(this); int itemPosition = mForecastAdapter.getSelectedItemPosition(); if ( RecyclerView.NO_POSITION == itemPosition ) itemPosition = 0; RecyclerView.ViewHolder vh = mRecyclerView.findViewHolderForAdapterPosition(itemPosition); if ( null != vh && mAutoSelectView ) { mForecastAdapter.selectView( vh ); } if ( mHoldForTransition ) { getActivity().supportStartPostponedEnterTransition(); } return true; } return false; } }); } } @Override public void onDestroy() { super.onDestroy(); if (null != mRecyclerView) { mRecyclerView.clearOnScrollListeners(); } } @Override public void onLoaderReset(Loader<Cursor> loader) { mForecastAdapter.swapCursor(null); } public void setUseTodayLayout(boolean useTodayLayout) { mUseTodayLayout = useTodayLayout; if (mForecastAdapter != null) { mForecastAdapter.setUseTodayLayout(mUseTodayLayout); } } /* Updates the empty list view with contextually relevant information that the user can use to determine why they aren't seeing weather. */ private void updateEmptyView() { if ( mForecastAdapter.getItemCount() == 0 ) { TextView tv = (TextView) getView().findViewById(R.id.recyclerview_forecast_empty); if ( null != tv ) { // if cursor is empty, why? do we have an invalid location int message = R.string.empty_forecast_list; @SunshineSyncAdapter.LocationStatus int location = Utility.getLocationStatus(getActivity()); switch (location) { case SunshineSyncAdapter.LOCATION_STATUS_SERVER_DOWN: message = R.string.empty_forecast_list_server_down; break; case SunshineSyncAdapter.LOCATION_STATUS_SERVER_INVALID: message = R.string.empty_forecast_list_server_error; break; case SunshineSyncAdapter.LOCATION_STATUS_INVALID: message = R.string.empty_forecast_list_invalid_location; break; default: if (!Utility.isNetworkAvailable(getActivity())) { message = R.string.empty_forecast_list_no_network; } } tv.setText(message); } } } @Override public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String key) { if (key.equals(getString(R.string.pref_location_status_key))) { updateEmptyView(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cocoon.portal.transformation; import org.apache.cocoon.portal.event.impl.CopletLinkEvent; import org.apache.cocoon.portal.om.CopletInstance; import org.apache.cocoon.xml.AttributesImpl; import org.apache.cocoon.xml.XMLUtils; import org.xml.sax.Attributes; import org.xml.sax.ContentHandler; import org.xml.sax.SAXException; /** * This transformer is used to replace links (URIs) from elements * like &lt;a href="URI"&gt; or &lt;form action="URI"&gt; with portal * event uris. Therefore the transformer searches for &lt;eventlink&gt; * elements replaces the URI form the attribute which is specified within * an attribute called "attribute" and renames the element as specified * within an attribute called "element". * * Example:<br><br> * * <pre> * &lt;root xmlns:ev="http://apache.org/cocoon/portal/eventlink/1.0"&gt; * &lt;ev:eventlink href="http://eventlinkexample" element="a" attribute="href"&gt;linktext&lt;/ev:eventlink&gt; * &lt;/root&gt;<br></pre> * * will be replaced with something like:<br><br> * * <pre> * &lt;root&gt; * &lt;a href="portal?cocoon-portal-event=8"&gt;linktext&lt;/a&gt; * &lt;/root&gt;<br></pre> * * The transformer will create two CopletLinkEvents and insert corresponding links * to them to the XML instead of "http://eventlinkexample". If such a link is pressed * the corresponding CopletLinkEvent is sent to the Subscribers to be handled.<br> * Please see also the documentation of superclass AbstractCopletTransformer for how * the coplet instance data are acquired. * * @version $Id$ */ public class NewEventLinkTransformer extends AbstractCopletTransformer { /** * The namespace URI to listen for. */ public static final String NAMESPACE_URI = "http://apache.org/cocoon/portal/eventlink/1.0"; /** * The XML element name to listen for. */ public static final String EVENT_ELEM = "eventlink"; /** * An attribute's name of EVENT_ELEMENT. */ public static final String ATTRIBUTE_ATTR = "attribute"; /** * An attribute's name of EVENT_ELEMENT. */ public static final String ELEMENT_ATTR = "element"; /** * @see java.lang.Object#Object() */ public NewEventLinkTransformer() { this.defaultNamespaceURI = NAMESPACE_URI; this.removeOurNamespacePrefixes = true; } /** * @throws SAXException when the eventlink element does not contain the necessary attributes * "element" and "attribute", retrieving the LinkURI from the LinkService fails, * or an unknown element within the namespaces in encountered. * @see org.apache.cocoon.transformation.AbstractSAXTransformer#startTransformingElement(String, String, String, Attributes) */ public void startTransformingElement(String uri, String name, String raw, Attributes attributes) throws SAXException { if (!EVENT_ELEM.equals(name)) { throw new SAXException("Unknown element encountered: " + name); } String attributeName = attributes.getValue(ATTRIBUTE_ATTR); String elementName = attributes.getValue(ELEMENT_ATTR); if (attributeName == null) { throw new SAXException( "Element " + EVENT_ELEM + " must have an attribute " + ATTRIBUTE_ATTR + "."); } if (elementName == null) { throw new SAXException( "Element " + EVENT_ELEM + " must have an attribute " + ELEMENT_ATTR + "."); } // remove ATTRIBUTE_ATTR, ELEMENT_ATTR and "coplet" from attributes AttributesImpl newAttributes = this.getMutableAttributes(attributes); newAttributes.removeAttribute(ELEMENT_ATTR); newAttributes.removeAttribute(ATTRIBUTE_ATTR); newAttributes.removeAttribute("coplet"); int index = newAttributes.getIndex(attributeName); String link = newAttributes.getValue(index); boolean formSpecialTreatment = false; if ("form".equals(elementName)) { //cut all query parameters from actions with method get, as these will be normaly ignored! formSpecialTreatment = true; if ("GET".equalsIgnoreCase(newAttributes.getValue("method")) && link.indexOf('?') > 0) { link = link.substring(0, link.indexOf('?')); } } String portalAction = null; String portalEvent = null; // if attribute found that contains a link if (link != null) { CopletInstance cid = this.getCopletInstanceData(attributes.getValue("coplet")); // create event link CopletLinkEvent event = new CopletLinkEvent(cid, link); String eventLink = this.portalService.getLinkService().getLinkURI(event); //form elements need hidden inputs to change request parameters if (formSpecialTreatment) { int pos = eventLink.indexOf("cocoon-portal-action="); if ( pos != -1 ) { int begin = pos + "cocoon-portal-action=".length(); int end = eventLink.indexOf('&', begin); if (end == -1) { end = eventLink.length(); } portalAction = eventLink.substring(begin, end); } pos = eventLink.indexOf("cocoon-portal-event="); if ( pos != -1 ) { int begin = pos + "cocoon-portal-event=".length(); int end = eventLink.indexOf('&', begin); if (end == -1) { end = eventLink.length(); } portalEvent = eventLink.substring(begin, end); } pos = eventLink.indexOf('?'); if ( pos != -1 ) { eventLink = eventLink.substring(0, eventLink.indexOf('?')); } } // insert event link newAttributes.setValue(index, eventLink); } this.stack.push(elementName); XMLUtils.createElement(contentHandler, elementName, newAttributes); // generate hidden inputs to add request parameters to the form action if (formSpecialTreatment) { sendHiddenFields(contentHandler, portalAction, portalEvent); } } /** * With forms the uri in the action attribute cannot be enhanced with request parameters. * Instead hidden input fields must be inserted into the SAX stream to add request parameters. * This method sends two hidden inputs adding the "cocoon-portal-action" parameter and * the "cocoon-portal-event" parameter. * @param handler the content handler recieving the SAX events * @param portalAction value of the "cocoon-portal-action" parameter * @param portalEvent value of the "cocoon-portal-event" parameter * @throws SAXException if sending the SAX events failed */ private void sendHiddenFields(ContentHandler handler, String portalAction, String portalEvent) throws SAXException { if ( portalAction != null && portalAction.trim().length() > 0 ) { final AttributesImpl attributes = new AttributesImpl(); attributes.addCDATAAttribute("type", "hidden"); attributes.addCDATAAttribute("name", "cocoon-portal-action"); attributes.addCDATAAttribute("value", portalAction); XMLUtils.createElement(handler, "input", attributes); } if ( portalEvent != null && portalEvent.trim().length() > 0 ) { final AttributesImpl attributes = new AttributesImpl(); attributes.addCDATAAttribute("type", "hidden"); attributes.addCDATAAttribute("name", "cocoon-portal-event"); attributes.addCDATAAttribute("value", portalEvent); XMLUtils.createElement(handler, "input", attributes); } } /** * @see org.apache.cocoon.transformation.AbstractSAXTransformer#endTransformingElement(String, String, String) */ public void endTransformingElement(String uri, String name, String raw) throws SAXException { XMLUtils.createElement(contentHandler, (String) this.stack.pop()); } }
/** * NOTE: This class is auto generated by the swagger code generator program (2.4.16). * https://github.com/swagger-api/swagger-codegen * Do not edit the class manually. */ package com.sequenceiq.mock.swagger.v31.api; import java.io.IOException; import java.util.Optional; import javax.servlet.http.HttpServletRequest; import javax.validation.Valid; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import com.fasterxml.jackson.databind.ObjectMapper; import com.sequenceiq.mock.swagger.model.ApiUser2; import com.sequenceiq.mock.swagger.model.ApiUser2List; import com.sequenceiq.mock.swagger.model.ApiUserSessionList; import io.swagger.annotations.Api; import io.swagger.annotations.ApiOperation; import io.swagger.annotations.ApiParam; import io.swagger.annotations.ApiResponse; import io.swagger.annotations.ApiResponses; import io.swagger.annotations.Authorization; @javax.annotation.Generated(value = "io.swagger.codegen.languages.SpringCodegen", date = "2020-11-16T20:16:58.188+01:00") @Api(value = "UsersResource", description = "the UsersResource API") @RequestMapping(value = "/{mockUuid}/api/v31") public interface UsersResourceApi { Logger log = LoggerFactory.getLogger(UsersResourceApi.class); default Optional<ObjectMapper> getObjectMapper() { return Optional.empty(); } default Optional<HttpServletRequest> getRequest() { return Optional.empty(); } default Optional<String> getAcceptHeader() { return getRequest().map(r -> r.getHeader("Accept")); } @ApiOperation(value = "Creates a list of users.", nickname = "createUsers2", notes = "Creates a list of users. <p> When creating new users, the <i>password</i> property of each user should be their plain text password. The returned user information will not contain any password information. <p/>", response = ApiUser2List.class, authorizations = { @Authorization(value = "basic") }, tags={ "UsersResource", }) @ApiResponses(value = { @ApiResponse(code = 201, message = "Success", response = ApiUser2List.class) }) @RequestMapping(value = "/users", produces = { "application/json" }, consumes = { "application/json" }, method = RequestMethod.POST) default ResponseEntity<ApiUser2List> createUsers2(@ApiParam(value = "The unique id of CB cluster (works in CB test framework only)",required=true) @PathVariable("mockUuid") String mockUuid,@ApiParam(value = "List of users to create." ) @Valid @RequestBody ApiUser2List body) { if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { if (getAcceptHeader().get().contains("application/json")) { try { return new ResponseEntity<>(getObjectMapper().get().readValue("{ \"items\" : [ { \"name\" : \"...\", \"password\" : \"...\", \"authRoles\" : [ { \"displayName\" : \"...\", \"name\" : \"...\", \"uuid\" : \"...\" }, { \"displayName\" : \"...\", \"name\" : \"...\", \"uuid\" : \"...\" } ], \"pwHash\" : \"...\", \"pwSalt\" : 12345, \"pwLogin\" : true }, { \"name\" : \"...\", \"password\" : \"...\", \"authRoles\" : [ { \"displayName\" : \"...\", \"name\" : \"...\", \"uuid\" : \"...\" }, { \"displayName\" : \"...\", \"name\" : \"...\", \"uuid\" : \"...\" } ], \"pwHash\" : \"...\", \"pwSalt\" : 12345, \"pwLogin\" : true } ]}", ApiUser2List.class), HttpStatus.NOT_IMPLEMENTED); } catch (IOException e) { log.error("Couldn't serialize response for content type application/json", e); return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } } } else { log.warn("ObjectMapper or HttpServletRequest not configured in default UsersResourceApi interface so no example is generated"); } return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); } @ApiOperation(value = "Deletes a user from the system.", nickname = "deleteUser2", notes = "Deletes a user from the system. <p/>", response = ApiUser2.class, authorizations = { @Authorization(value = "basic") }, tags={ "UsersResource", }) @ApiResponses(value = { @ApiResponse(code = 204, message = "Success", response = ApiUser2.class) }) @RequestMapping(value = "/users/{userName}", produces = { "application/json" }, method = RequestMethod.DELETE) default ResponseEntity<ApiUser2> deleteUser2(@ApiParam(value = "The unique id of CB cluster (works in CB test framework only)",required=true) @PathVariable("mockUuid") String mockUuid,@ApiParam(value = "The name of the user to delete.",required=true) @PathVariable("userName") String userName) { if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { if (getAcceptHeader().get().contains("application/json")) { try { return new ResponseEntity<>(getObjectMapper().get().readValue("{ \"name\" : \"...\", \"password\" : \"...\", \"authRoles\" : [ { \"displayName\" : \"...\", \"name\" : \"...\", \"uuid\" : \"...\" }, { \"displayName\" : \"...\", \"name\" : \"...\", \"uuid\" : \"...\" } ], \"pwHash\" : \"...\", \"pwSalt\" : 12345, \"pwLogin\" : true}", ApiUser2.class), HttpStatus.NOT_IMPLEMENTED); } catch (IOException e) { log.error("Couldn't serialize response for content type application/json", e); return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } } } else { log.warn("ObjectMapper or HttpServletRequest not configured in default UsersResourceApi interface so no example is generated"); } return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); } @ApiOperation(value = "Expires the sessions associated with interactive authenticated user in Cloudera Manager.", nickname = "expireSessions", notes = "Expires the sessions associated with interactive authenticated user in Cloudera Manager. This can be used by Full Admin/User Admin users only. <p> Note that these sessions are only associated with a user who log into the web interface. Sessions of an API user will not be affected.", authorizations = { @Authorization(value = "basic") }, tags={ "UsersResource", }) @ApiResponses(value = { @ApiResponse(code = 201, message = "Success") }) @RequestMapping(value = "/users/expireSessions/{userName}", method = RequestMethod.POST) default ResponseEntity<Void> expireSessions(@ApiParam(value = "The unique id of CB cluster (works in CB test framework only)",required=true) @PathVariable("mockUuid") String mockUuid,@ApiParam(value = "",required=true) @PathVariable("userName") String userName) { if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { } else { log.warn("ObjectMapper or HttpServletRequest not configured in default UsersResourceApi interface so no example is generated"); } return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); } @ApiOperation(value = "Return a list of the sessions associated with interactive authenticated users in Cloudera Manager.", nickname = "getSessions", notes = "Return a list of the sessions associated with interactive authenticated users in Cloudera Manager. <p> Note that these sessions are only associated with users who log into the web interface. API users will not appear.", response = ApiUserSessionList.class, authorizations = { @Authorization(value = "basic") }, tags={ "UsersResource", }) @ApiResponses(value = { @ApiResponse(code = 200, message = "Success", response = ApiUserSessionList.class) }) @RequestMapping(value = "/users/sessions", produces = { "application/json" }, method = RequestMethod.GET) default ResponseEntity<ApiUserSessionList> getSessions(@ApiParam(value = "The unique id of CB cluster (works in CB test framework only)",required=true) @PathVariable("mockUuid") String mockUuid) { if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { if (getAcceptHeader().get().contains("application/json")) { try { return new ResponseEntity<>(getObjectMapper().get().readValue("{ \"items\" : [ { \"name\" : \"...\", \"remoteAddr\" : \"...\", \"lastRequest\" : \"...\" }, { \"name\" : \"...\", \"remoteAddr\" : \"...\", \"lastRequest\" : \"...\" } ]}", ApiUserSessionList.class), HttpStatus.NOT_IMPLEMENTED); } catch (IOException e) { log.error("Couldn't serialize response for content type application/json", e); return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } } } else { log.warn("ObjectMapper or HttpServletRequest not configured in default UsersResourceApi interface so no example is generated"); } return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); } @ApiOperation(value = "Returns detailed information about a user.", nickname = "readUser2", notes = "Returns detailed information about a user.", response = ApiUser2.class, authorizations = { @Authorization(value = "basic") }, tags={ "UsersResource", }) @ApiResponses(value = { @ApiResponse(code = 200, message = "Success", response = ApiUser2.class) }) @RequestMapping(value = "/users/{userName}", produces = { "application/json" }, method = RequestMethod.GET) default ResponseEntity<ApiUser2> readUser2(@ApiParam(value = "The unique id of CB cluster (works in CB test framework only)",required=true) @PathVariable("mockUuid") String mockUuid,@ApiParam(value = "The user to read.",required=true) @PathVariable("userName") String userName) { if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { if (getAcceptHeader().get().contains("application/json")) { try { return new ResponseEntity<>(getObjectMapper().get().readValue("{ \"name\" : \"...\", \"password\" : \"...\", \"authRoles\" : [ { \"displayName\" : \"...\", \"name\" : \"...\", \"uuid\" : \"...\" }, { \"displayName\" : \"...\", \"name\" : \"...\", \"uuid\" : \"...\" } ], \"pwHash\" : \"...\", \"pwSalt\" : 12345, \"pwLogin\" : true}", ApiUser2.class), HttpStatus.NOT_IMPLEMENTED); } catch (IOException e) { log.error("Couldn't serialize response for content type application/json", e); return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } } } else { log.warn("ObjectMapper or HttpServletRequest not configured in default UsersResourceApi interface so no example is generated"); } return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); } @ApiOperation(value = "Returns a list of the user names configured in the system.", nickname = "readUsers2", notes = "Returns a list of the user names configured in the system.", response = ApiUser2List.class, authorizations = { @Authorization(value = "basic") }, tags={ "UsersResource", }) @ApiResponses(value = { @ApiResponse(code = 200, message = "Success", response = ApiUser2List.class) }) @RequestMapping(value = "/users", produces = { "application/json" }, method = RequestMethod.GET) default ResponseEntity<ApiUser2List> readUsers2(@ApiParam(value = "The unique id of CB cluster (works in CB test framework only)",required=true) @PathVariable("mockUuid") String mockUuid,@ApiParam(value = "", allowableValues = "EXPORT, EXPORT_REDACTED, FULL, FULL_WITH_HEALTH_CHECK_EXPLANATION, SUMMARY", defaultValue = "summary") @Valid @RequestParam(value = "view", required = false, defaultValue="summary") String view) { if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { if (getAcceptHeader().get().contains("application/json")) { try { return new ResponseEntity<>(getObjectMapper().get().readValue("{ \"items\" : [ { \"name\" : \"...\", \"password\" : \"...\", \"authRoles\" : [ { \"displayName\" : \"...\", \"name\" : \"...\", \"uuid\" : \"...\" }, { \"displayName\" : \"...\", \"name\" : \"...\", \"uuid\" : \"...\" } ], \"pwHash\" : \"...\", \"pwSalt\" : 12345, \"pwLogin\" : true }, { \"name\" : \"...\", \"password\" : \"...\", \"authRoles\" : [ { \"displayName\" : \"...\", \"name\" : \"...\", \"uuid\" : \"...\" }, { \"displayName\" : \"...\", \"name\" : \"...\", \"uuid\" : \"...\" } ], \"pwHash\" : \"...\", \"pwSalt\" : 12345, \"pwLogin\" : true } ]}", ApiUser2List.class), HttpStatus.NOT_IMPLEMENTED); } catch (IOException e) { log.error("Couldn't serialize response for content type application/json", e); return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } } } else { log.warn("ObjectMapper or HttpServletRequest not configured in default UsersResourceApi interface so no example is generated"); } return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); } @ApiOperation(value = "Updates the given user's information.", nickname = "updateUser2", notes = "Updates the given user's information. Note that the user's name cannot be changed.", response = ApiUser2.class, authorizations = { @Authorization(value = "basic") }, tags={ "UsersResource", }) @ApiResponses(value = { @ApiResponse(code = 204, message = "Success", response = ApiUser2.class) }) @RequestMapping(value = "/users/{userName}", produces = { "application/json" }, consumes = { "application/json" }, method = RequestMethod.PUT) default ResponseEntity<ApiUser2> updateUser2(@ApiParam(value = "The unique id of CB cluster (works in CB test framework only)",required=true) @PathVariable("mockUuid") String mockUuid,@ApiParam(value = "User name being updated.",required=true) @PathVariable("userName") String userName,@ApiParam(value = "The user information." ) @Valid @RequestBody ApiUser2 body) { if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { if (getAcceptHeader().get().contains("application/json")) { try { return new ResponseEntity<>(getObjectMapper().get().readValue("{ \"name\" : \"...\", \"password\" : \"...\", \"authRoles\" : [ { \"displayName\" : \"...\", \"name\" : \"...\", \"uuid\" : \"...\" }, { \"displayName\" : \"...\", \"name\" : \"...\", \"uuid\" : \"...\" } ], \"pwHash\" : \"...\", \"pwSalt\" : 12345, \"pwLogin\" : true}", ApiUser2.class), HttpStatus.NOT_IMPLEMENTED); } catch (IOException e) { log.error("Couldn't serialize response for content type application/json", e); return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } } } else { log.warn("ObjectMapper or HttpServletRequest not configured in default UsersResourceApi interface so no example is generated"); } return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); } }
package org.bouncycastle.crypto.test; import org.bouncycastle.crypto.BlockCipher; import org.bouncycastle.crypto.CipherParameters; import org.bouncycastle.crypto.DataLengthException; import org.bouncycastle.crypto.InvalidCipherTextException; import org.bouncycastle.crypto.engines.AESEngine; import org.bouncycastle.crypto.engines.AESFastEngine; import org.bouncycastle.crypto.engines.AESLightEngine; import org.bouncycastle.crypto.engines.BlowfishEngine; import org.bouncycastle.crypto.engines.CAST5Engine; import org.bouncycastle.crypto.engines.CAST6Engine; import org.bouncycastle.crypto.engines.DESEngine; import org.bouncycastle.crypto.engines.DESedeEngine; import org.bouncycastle.crypto.engines.NoekeonEngine; import org.bouncycastle.crypto.engines.RC6Engine; import org.bouncycastle.crypto.engines.SEEDEngine; import org.bouncycastle.crypto.engines.SerpentEngine; import org.bouncycastle.crypto.engines.TEAEngine; import org.bouncycastle.crypto.engines.TwofishEngine; import org.bouncycastle.crypto.engines.XTEAEngine; import org.bouncycastle.crypto.modes.CBCBlockCipher; import org.bouncycastle.crypto.modes.CFBBlockCipher; import org.bouncycastle.crypto.modes.GOFBBlockCipher; import org.bouncycastle.crypto.modes.OFBBlockCipher; import org.bouncycastle.crypto.modes.OpenPGPCFBBlockCipher; import org.bouncycastle.crypto.modes.PGPCFBBlockCipher; import org.bouncycastle.crypto.modes.SICBlockCipher; import org.bouncycastle.crypto.params.KeyParameter; import org.bouncycastle.crypto.params.ParametersWithIV; import org.bouncycastle.util.Arrays; import org.bouncycastle.util.encoders.Hex; import org.bouncycastle.util.test.SimpleTest; /** * Test whether block ciphers implement reset contract on init, encrypt/decrypt and reset. */ public class BlockCipherResetTest extends SimpleTest { @Override public String getName() { return "Block Cipher Reset"; } @Override public void performTest() throws Exception { // 128 bit block ciphers testReset("AESFastEngine", new AESFastEngine(), new AESFastEngine(), new KeyParameter(new byte[16])); testReset("AESEngine", new AESEngine(), new AESEngine(), new KeyParameter(new byte[16])); testReset("AESLightEngine", new AESLightEngine(), new AESLightEngine(), new KeyParameter(new byte[16])); testReset("Twofish", new TwofishEngine(), new TwofishEngine(), new KeyParameter(new byte[16])); testReset("NoekeonEngine", new NoekeonEngine(), new NoekeonEngine(), new KeyParameter(new byte[16])); testReset("SerpentEngine", new SerpentEngine(), new SerpentEngine(), new KeyParameter(new byte[16])); testReset("SEEDEngine", new SEEDEngine(), new SEEDEngine(), new KeyParameter(new byte[16])); testReset("CAST6Engine", new CAST6Engine(), new CAST6Engine(), new KeyParameter(new byte[16])); testReset("RC6Engine", new RC6Engine(), new RC6Engine(), new KeyParameter(new byte[16])); // 64 bit block ciphers testReset("DESEngine", new DESEngine(), new DESEngine(), new KeyParameter(new byte[8])); testReset("BlowfishEngine", new BlowfishEngine(), new BlowfishEngine(), new KeyParameter(new byte[8])); testReset("CAST5Engine", new CAST5Engine(), new CAST5Engine(), new KeyParameter(new byte[8])); testReset("DESedeEngine", new DESedeEngine(), new DESedeEngine(), new KeyParameter(new byte[24])); testReset("TEAEngine", new TEAEngine(), new TEAEngine(), new KeyParameter(new byte[16])); testReset("XTEAEngine", new XTEAEngine(), new XTEAEngine(), new KeyParameter(new byte[16])); // primitive block cipher modes (don't reset on processBlock) testModeReset("AES/CBC", new CBCBlockCipher(new AESEngine()), new CBCBlockCipher(new AESEngine()), new ParametersWithIV(new KeyParameter(new byte[16]), new byte[16])); testModeReset("AES/SIC", new SICBlockCipher(new AESEngine()), new SICBlockCipher(new AESEngine()), new ParametersWithIV(new KeyParameter(new byte[16]), new byte[16])); testModeReset("AES/CFB", new CFBBlockCipher(new AESEngine(), 128), new CFBBlockCipher(new AESEngine(), 128), new ParametersWithIV(new KeyParameter(new byte[16]), new byte[16])); testModeReset("AES/OFB", new OFBBlockCipher(new AESEngine(), 128), new OFBBlockCipher(new AESEngine(), 128), new ParametersWithIV(new KeyParameter(new byte[16]), new byte[16])); testModeReset("AES/GCTR", new GOFBBlockCipher(new DESEngine()), new GOFBBlockCipher(new DESEngine()), new ParametersWithIV(new KeyParameter(new byte[8]), new byte[8])); testModeReset("AES/OpenPGPCFB", new OpenPGPCFBBlockCipher(new AESEngine()), new OpenPGPCFBBlockCipher( new AESEngine()), new KeyParameter(new byte[16])); testModeReset("AES/PGPCFB", new PGPCFBBlockCipher(new AESEngine(), false), new PGPCFBBlockCipher( new AESEngine(), false), new KeyParameter(new byte[16])); // PGPCFB with IV is broken (it's also not a PRP, so probably shouldn't be a BlockCipher) // testModeReset("AES/PGPCFBwithIV", new PGPCFBBlockCipher(new AESEngine(), true), new // PGPCFBBlockCipher( // new AESEngine(), true), new ParametersWithIV(new KeyParameter(new byte[16]), new // byte[16])); // testModeReset("AES/PGPCFBwithIV_NoIV", new PGPCFBBlockCipher(new AESEngine(), true), new // PGPCFBBlockCipher( // new AESEngine(), true), new KeyParameter(new byte[16])); } private void testModeReset(String test, BlockCipher cipher1, BlockCipher cipher2, CipherParameters params) throws InvalidCipherTextException { testReset(test, false, cipher1, cipher2, params); } private void testReset(String test, BlockCipher cipher1, BlockCipher cipher2, CipherParameters params) throws InvalidCipherTextException { testReset(test, true, cipher1, cipher2, params); } private void testReset(String test, boolean testCryptReset, BlockCipher cipher1, BlockCipher cipher2, CipherParameters params) throws InvalidCipherTextException { cipher1.init(true, params); byte[] plaintext = new byte[cipher1.getBlockSize()]; byte[] ciphertext = new byte[cipher1.getAlgorithmName().contains("PGPCFBwithIV") ? 2 * cipher1.getBlockSize() + 2 : cipher1.getBlockSize()]; // Establish baseline answer crypt(cipher1, true, plaintext, ciphertext); // Test encryption resets checkReset(test, testCryptReset, cipher1, params, true, plaintext, ciphertext); // Test decryption resets with fresh instance cipher2.init(false, params); checkReset(test, testCryptReset, cipher2, params, false, ciphertext, plaintext); } private void checkReset(String test, boolean testCryptReset, BlockCipher cipher, CipherParameters params, boolean encrypt, byte[] pretext, byte[] posttext) throws InvalidCipherTextException { // Do initial run byte[] output = new byte[posttext.length]; crypt(cipher, encrypt, pretext, output); // Check encrypt resets cipher if (testCryptReset) { crypt(cipher, encrypt, pretext, output); if (!Arrays.areEqual(output, posttext)) { fail(test + (encrypt ? " encrypt" : " decrypt") + " did not reset cipher."); } } // Check init resets data cipher.processBlock(pretext, 0, output, 0); cipher.init(encrypt, params); try { crypt(cipher, encrypt, pretext, output); } catch (DataLengthException e) { fail(test + " init did not reset data."); } if (!Arrays.areEqual(output, posttext)) { fail(test + " init did not reset data.", new String(Hex.encode(posttext)), new String(Hex.encode(output))); } // Check reset resets data cipher.processBlock(pretext, 0, output, 0); cipher.reset(); try { crypt(cipher, encrypt, pretext, output); } catch (DataLengthException e) { fail(test + " reset did not reset data."); } if (!Arrays.areEqual(output, posttext)) { fail(test + " reset did not reset data."); } } private static void crypt(BlockCipher cipher1, boolean encrypt, byte[] plaintext, byte[] output) throws InvalidCipherTextException { cipher1.processBlock(plaintext, 0, output, 0); if (cipher1.getAlgorithmName().contains("PGPCFBwithIV") && !encrypt) { // Process past IV in first block cipher1.processBlock(plaintext, cipher1.getBlockSize(), output, 0); } } public static void main(String[] args) { runTest(new BlockCipherResetTest()); } }
/** * For license details see associated LICENSE.txt file. */ package uk.ac.ed.bio.SynthSys.SBMLDataTools; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.io.InputStreamReader; import java.io.Reader; import java.util.ArrayList; import java.util.List; import javax.xml.stream.XMLStreamException; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.DefaultParser; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.apache.commons.math3.analysis.interpolation.LinearInterpolator; import org.apache.commons.math3.analysis.interpolation.SplineInterpolator; import org.sbml.jsbml.Model; import org.sbml.jsbml.SBMLDocument; import org.sbml.jsbml.SBMLReader; import org.sbml.jsbml.SBMLWriter; import com.ctc.wstx.exc.WstxUnexpectedCharException; import com.opencsv.CSVReader; /** * Command line program to add external time course data to an SBML model. * * @author Ally Hume */ public class SBMLAddTimeCourseData { // Strings for the various command line options private static final String OPTION_CSV_IN = "csvIn"; private static final String OPTION_CSV_OUT = "csvOut"; private static final String OPTION_SBML_IN = "sbmlIn"; private static final String OPTION_SBML_LEVEL = "sbmlLevel"; private static final String OPTION_SBML_VERSION = "sbmlVersion"; private static final String OPTION_SBML_OUT = "sbmlOut"; private static final String OPTION_HELP = "help"; private static final String OPTION_CSV_SEPARATOR = "csvSeparator"; private static final String OPTION_INTERPOLATOR = "interpolator"; private static final String PROGRAM_NAME = "SBMLAddTimeCourseData"; // Defaults for any SBML files we create private static final int DEFAULT_SBML_LEVEL = 3; private static final int DEFAULT_SBML_VERSION = 1; private static final int DEFAULT_NUM_INTERVALS = 10; private static final String DEFAULT_INTERPOLATOR = "linear"; /** * Main command line call. * * @param args command line arguments * * @throws IOException if an unexpected IO error occurs. Most common errors are reported nicer * than throwing an exception. */ public static void main(String[] args) throws IOException { Options options = getCommandLineOptions(); try { CommandLineParser parser = new DefaultParser(); CommandLine commandLine = parser.parse(options, args); // Handle help option if (commandLine.hasOption(OPTION_HELP)) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp( PROGRAM_NAME, options ); return; } // Everything should be in the options so if there are any any left then we have an // error if (commandLine.getArgs().length != 0) { String error = "Usage error: unexpected arguments:"; for (String s : commandLine.getArgList()) { error = error + " " + s; } throw new ParseException(error); } // One of sbmlOut or csvOut is required if (!commandLine.hasOption(OPTION_SBML_OUT) && !commandLine.hasOption(OPTION_CSV_OUT)) { throw new ParseException("One of sbmlOut or csvOut arguments is required."); } // Get the CSV in file Reader csvInReader; if (commandLine.hasOption(OPTION_CSV_IN)) { String fileName = commandLine.getOptionValue(OPTION_CSV_IN); csvInReader = new BufferedReader(new FileReader(fileName)); } else { // Read from stdin csvInReader = new BufferedReader(new InputStreamReader(System.in)); } // Get SBML in reader SBMLDocument doc; if (commandLine.hasOption(OPTION_SBML_IN)) { File file= new File(commandLine.getOptionValue(OPTION_SBML_IN)); doc = SBMLReader.read(file); } else { // Create an empty SBML model int level = getIntegerOption(commandLine, OPTION_SBML_LEVEL, DEFAULT_SBML_LEVEL); int version = getIntegerOption(commandLine, OPTION_SBML_VERSION, DEFAULT_SBML_VERSION); doc = new SBMLDocument(level, version); doc.createModel("model"); } // Get SBML out file File sbmlOutFile = null; if (commandLine.hasOption(OPTION_SBML_OUT)) { sbmlOutFile = new File(commandLine.getOptionValue(OPTION_SBML_OUT)); } // CSV file out BufferedWriter csvOutWriter = null; if (commandLine.hasOption(OPTION_CSV_OUT)) { File csvFileOut = new File(commandLine.getOptionValue(OPTION_CSV_OUT)); csvOutWriter = new BufferedWriter(new FileWriter(csvFileOut)); } // Interpolator String interpolatorName = DEFAULT_INTERPOLATOR; Interpolator interpolator = null; if (commandLine.hasOption(OPTION_INTERPOLATOR)) { interpolatorName = commandLine.getOptionValue(OPTION_INTERPOLATOR); } // Map interpolator to appropriate class instance if (interpolatorName.equalsIgnoreCase("cubic")) { interpolator = new PolynomialInterpolator(new SplineInterpolator()); } else if (interpolatorName.equalsIgnoreCase("linear")) { interpolator = new PolynomialInterpolator(new LinearInterpolator()); } else { throw new ParseException("Unknown interpolator: " + interpolatorName); } // Do the work process(csvInReader, doc.getModel(), csvOutWriter, getSeparator(commandLine), interpolator); csvInReader.close(); if (csvOutWriter != null) csvOutWriter.close(); // Write the SBML file out if (commandLine.hasOption(OPTION_SBML_OUT)) { SBMLWriter.write(doc, sbmlOutFile, "SBMLAddTimeCourseData", "1.0"); } } catch( ParseException e) { System.err.println("Error: " + e.getLocalizedMessage()); HelpFormatter formatter = new HelpFormatter(); formatter.printHelp( PROGRAM_NAME, options ); } catch( FileNotFoundException e) { System.err.println("Error: File not found: " + e.getLocalizedMessage()); } catch( WstxUnexpectedCharException e) { System.err.println("Error reading SBML file: " + e.getLocalizedMessage()); } catch( XMLStreamException e) { System.err.println("Error reading SBML file: " + e.getLocalizedMessage()); } catch( IllegalArgumentException e ) { System.err.println("Error: " + e.getLocalizedMessage()); } } /** * Reads the CSV data from the given reader, validates it and writes it into the SBML model * as a parameter with an assignment rule. * * @param reader csv data reader * @param model SBML model * @param writer csv writer, or null * @param separator csv data separator * @param interpolator interpolator * * @throws IOException if an unexpected IO error occurs. */ public static void process( Reader reader, Model model, BufferedWriter csvWriter, char separator, Interpolator interpolator) throws IOException { // Read CSV CSVReader csvReader = new CSVReader(reader, separator); List<String[]> csvData = csvReader.readAll(); csvReader.close(); validateCsvData(csvData); // Get the number of columns int numCols = csvData.get(0).length; List<Double> fittedTimes = null; List<List<Double>> fittedValues = new ArrayList<List<Double>>(); // If we are to output CSV data then calculate the times we output if (csvWriter != null) { fittedTimes = new ArrayList<Double>(); for (int row=1; row<csvData.size()-1; ++row) { double t1 = Double.parseDouble((csvData.get(row)[0])); double t2 = Double.parseDouble((csvData.get(row+1)[0])); double interval = (t2-t1)/(double) DEFAULT_NUM_INTERVALS; for (int i=0; i<DEFAULT_NUM_INTERVALS; ++i) { fittedTimes.add(t1+interval*i); } } // Add the last time fittedTimes.add(Double.parseDouble((csvData.get(csvData.size()-1)[0]))); } // Assume time is column 0, process each other column in turn for (int col=1; col<numCols; ++col) { // Assume row 0 is a header String paramName = csvData.get(0)[col].trim(); // Collect the times and values for this column List<Double> times = new ArrayList<Double>(); List<Double> values = new ArrayList<Double>(); for (int row=1; row<csvData.size(); ++row) { String[] rowData = csvData.get(row); times.add(Double.parseDouble(rowData[0])); values.add(Double.parseDouble(rowData[col])); } List<Double> fittedValuesForThisColumn = null; if (fittedTimes != null) { fittedValuesForThisColumn = new ArrayList<Double>(); } fittedValues.add(fittedValuesForThisColumn); // Add the data to the SBML model SBMLTimeCourseDataHelper.addParameter( model, paramName, times, values, fittedTimes, fittedValuesForThisColumn, interpolator); } // Now we can write the CSV data if (csvWriter != null) { // Write the header for (int col=0; col<numCols; ++col) { if (col != 0) csvWriter.write(separator); csvWriter.write(csvData.get(0)[col]); } csvWriter.newLine(); // Write the data for (int row=0; row<fittedTimes.size(); ++row) { // Time csvWriter.write(Double.toString(fittedTimes.get(row))); for (List<Double> fittedValuesForColumn : fittedValues) { csvWriter.write(separator); csvWriter.write(Double.toString(fittedValuesForColumn.get(row))); } csvWriter.newLine(); } } } /** * Gets the command line options. This includes details to get their arguments in necessary * and descriptions required to display help message. * * @return command line options */ private static Options getCommandLineOptions() { Options options = new Options(); Option option; // help option = Option.builder(OPTION_HELP).build(); option.setDescription("Displays this help message"); options.addOption(option); // csvIn option = Option.builder(OPTION_CSV_IN).hasArg(true).argName("file").build(); option.setDescription( "csv time course data file. Optional: if not specified stdin will be used."); options.addOption(option); // csvOut option = Option.builder(OPTION_CSV_OUT).hasArg(true).argName("file").build(); option.setDescription( "csv file to write fitted data. Optional."); options.addOption(option); // sbmlIn option = Option.builder(OPTION_SBML_IN).hasArg(true).argName("file").build(); option.setDescription( "Input SBML file. Optional: if not specified an empty model will be used."); options.addOption(option); // sbmlLevel option = Option.builder(OPTION_SBML_LEVEL).hasArg(true).argName("level").build(); option.setDescription( "SBML level of SBML model if no SBML input file is specified. Optional. " + "Default is " + DEFAULT_SBML_LEVEL); options.addOption(option); // sbmlVerison option = Option.builder(OPTION_SBML_VERSION).hasArg(true).argName("version").build(); option.setDescription( "SBML version of SBML model if no SBML input file is specified. Optional. " + "Default is " + DEFAULT_SBML_VERSION); options.addOption(option); // sbmlOut option = Option.builder(OPTION_SBML_OUT).hasArg(true).argName("file").build(); option.setDescription( "Output SBML file. Optional."); options.addOption(option); // csvSeparator option = Option.builder(OPTION_CSV_SEPARATOR).hasArg(true).argName("separator").build(); option.setDescription( "Single character separator used between fields of CSV file " + "(or TAB can be used for a tab character). " + "Optional. Default is comma ','."); options.addOption(option); // interpolator option = Option.builder(OPTION_INTERPOLATOR).hasArg(true).argName("interpolator").build(); option.setDescription( "Interpolator to use: one of 'linear' or 'cubic' " + "Optional. Default is linear."); options.addOption(option); return options; } /** * Validates the input CSV data to ensure it has the properties required. The data is expected * to have a header row and at least three data rows. The first column is time values and * must be in ascending order. There must be at least one other data column. * * @param csvData CSV data * * @throws IllegalArgumentException if the data is invalid. */ private static void validateCsvData(List<String[]> csvData) { // Must have header row and at least 3 data rows if (csvData.size() < 4 ) { throw new IllegalArgumentException( "Input CSV data must have header row and at least 3 data rows"); } int numColumns = csvData.get(0).length; if (numColumns < 2 ) { throw new IllegalArgumentException( "Input CSV data must have time column and at least one data column"); } double lastTime = getCSVDataValue(csvData, 1, 0); for (int row=1; row < csvData.size(); row++) { // Ensure consistent number of columns if ( csvData.get(row).length != numColumns) { throw new IllegalArgumentException( "Input CSV data must have same number of columns in each row. " + "row " + (row+1) + " has " + csvData.get(row).length + " columns, expected it to have " + numColumns); } // Ensure all data is double for ( int col = 0; col < numColumns; ++col) { getCSVDataValue(csvData, row, col); } // Ensure time is always ascending double time; if (row > 1 ) { time = Double.parseDouble(csvData.get(row)[0]); if (time <= lastTime) throw new IllegalArgumentException( "Input CSV data must be sorted with ascending time. The time in " + "row " + (row+1) + " (" + time + ") is before time in row " + row + " (" + lastTime + ")."); lastTime = time; } } } /** * Gets the numeric value in the specified row and column of the CSV data * * @param csvData CSV data * @param row row index (zero based) * @param col col index (zero based) * * @return the numeric value in the specified position * * @throws IllegalArgumentException if CSV value is non-numeric. The exception contains a user * friendly error message than can be displayed to the user. */ private static double getCSVDataValue(List<String[]> csvData, int row, int col) { try { return Double.parseDouble(csvData.get(row)[col]); } catch(NumberFormatException e) { throw new IllegalArgumentException( "Input CSV data in row " + (row+1) + ", column " + (col+1) + " is not a numerical value: " + csvData.get(row)[col]); } } /** * Gets the value of an integer command line option. * * @param commandLine parsed command line * @param optionName name of the option * @param defaultValue default value if the option is not specified * * @return the option value, or the default value if it is not specified * * @throws IllegalArgumentException if the specified option value is not an integer. The * error message is user friendly and can be displayed to * the user. */ private static int getIntegerOption( CommandLine commandLine, String optionName, int defaultValue) { if (commandLine.hasOption(optionName)) { try { return Integer.parseInt(commandLine.getOptionValue(optionName)); } catch(NumberFormatException e) { throw new IllegalArgumentException( "Command line option " + optionName + " must be an integer", e); } } return defaultValue; } /** * Gets the CSV separator character from the command line. * * @param commandLine parsed command line * * @return the specified CSV separator character, or the default is the option is not specified. * * @throws IllegalArgumentException if the option value is invalid. The exception has a user- * friendly message than can be displayed to the user. */ private static char getSeparator(CommandLine commandLine) { if (commandLine.hasOption(OPTION_CSV_SEPARATOR)) { String separator = commandLine.getOptionValue(OPTION_CSV_SEPARATOR); if (separator.toUpperCase().equals("TAB")) return '\t'; if (separator.length() != 1) { throw new IllegalArgumentException( "csvSeparator must be a single character (or TAB)"); } return separator.charAt(0); } return ','; } }
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package com.web.mavenproject6.forms; import javax.validation.constraints.NotNull; import javax.validation.constraints.Size; import org.hibernate.validator.constraints.Email; import org.hibernate.validator.constraints.Length; public class UserForm { @NotNull @Size(min = 1, max = 50) private String login; @NotNull @Size(min = 5, max = 20) private String password; @NotNull @Size(min = 5, max = 20) private String confirmPassword; @NotNull @Size(min = 5, max = 50) @Email private String email; @Length(max = 25) private String fname; @Length(max = 25) private String sname; @Length(max = 25) private String tname; @Length(max = 25) private String phone; @Length(max = 10) private String pasportSeria; @Length(max = 10) private String pasportNumber; @Length(max = 25) private String country; @Length(max = 25) private String region; @Length(max = 25) private String city; @Length(max = 55) private String localAddress; private int age; @Length(max = 50) private String university; @Length(max = 250) private String fclt; private int course; @Length(max = 20) private String studyForm; private boolean sex; public String getLogin() { return login; } public void setLogin(String login) { this.login = login; } public String getPassword() { return password; } public void setPassword(String password) { this.password = password; } public String getConfirmPassword() { return confirmPassword; } public void setConfirmPassword(String confirmPassword) { this.confirmPassword = confirmPassword; } public String getEmail() { return email; } public void setEmail(String email) { this.email = email; } public String getFname() { return fname; } public void setFname(String fname) { this.fname = fname; } public String getSname() { return sname; } public void setSname(String sname) { this.sname = sname; } public String getTname() { return tname; } public void setTname(String tname) { this.tname = tname; } public String getPhone() { return phone; } public void setPhone(String phone) { this.phone = phone; } public String getPasportSeria() { return pasportSeria; } public void setPasportSeria(String pasportSeria) { this.pasportSeria = pasportSeria; } public String getPasportNumber() { return pasportNumber; } public void setPasportNumber(String pasportNumber) { this.pasportNumber = pasportNumber; } public String getCountry() { return country; } public void setCountry(String country) { this.country = country; } public String getRegion() { return region; } public void setRegion(String region) { this.region = region; } public String getCity() { return city; } public void setCity(String city) { this.city = city; } public String getLocalAddress() { return localAddress; } public void setLocalAddress(String localAddress) { this.localAddress = localAddress; } public int getAge() { return age; } public void setAge(int age) { this.age = age; } public String getUniversity() { return university; } public void setUniversity(String university) { this.university = university; } public String getFclt() { return fclt; } public void setFclt(String fclt) { this.fclt = fclt; } public int getCourse() { return course; } public void setCourse(int course) { this.course = course; } public String getStudyForm() { return studyForm; } public void setStudyForm(String studyForm) { this.studyForm = studyForm; } public boolean isSex() { return sex; } public void setSex(boolean sex) { this.sex = sex; } public UserForm(String login, String password, String confirmPassword, String email, String fname, String sname, String tname, String phone, String pasportSeria, String pasportNumber, String country, String region, String city, String localAddress, int age, String university, String fclt, int course, String studyForm, boolean sex) { this.login = login; this.password = password; this.confirmPassword = confirmPassword; this.email = email; this.fname = fname; this.sname = sname; this.tname = tname; this.phone = phone; this.pasportSeria = pasportSeria; this.pasportNumber = pasportNumber; this.country = country; this.region = region; this.city = city; this.localAddress = localAddress; this.age = age; this.university = university; this.fclt = fclt; this.course = course; this.studyForm = studyForm; this.sex = sex; } public UserForm() { this.login = ""; this.password = ""; this.confirmPassword = ""; this.email = ""; this.fname = ""; this.sname = ""; this.tname = ""; this.phone = ""; this.pasportSeria = ""; this.pasportNumber = ""; this.country = ""; this.region = ""; this.city = ""; this.localAddress = ""; this.age = 0; this.university = ""; this.fclt = ""; this.course = 0; this.studyForm = ""; this.sex = false; } @Override public String toString() { StringBuilder buf = new StringBuilder(); buf .append("login:").append(login) .append(" password:").append(password) .append(" confirmPassword:").append(confirmPassword) .append(" email:").append(email) .append(" fname:").append(fname) .append(" sname:").append(sname) .append(" tname:").append(tname) .append(" phone:").append(phone) .append(" pasportSeria:").append(pasportSeria) .append(" pasportNumber:").append(pasportNumber) .append(" country:").append(country) .append(" region:").append(region) .append(" city:").append(city) .append(" localAddress:").append(localAddress) .append(" age:").append(age) .append(" university:").append(university) .append(" fclt:").append(fclt) .append(" course:").append(course) .append(" studyForm:").append(studyForm) .append(" sex:").append(sex); return buf.toString(); } }